alias.c: Remove unused headers.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "stor-layout.h"
44 #include "calls.h"
45 #include "varasm.h"
46 #include "tree-object-size.h"
47 #include "realmpfr.h"
48 #include "cfgrtl.h"
49 #include "except.h"
50 #include "dojump.h"
51 #include "explow.h"
52 #include "stmt.h"
53 #include "expr.h"
54 #include "libfuncs.h"
55 #include "output.h"
56 #include "typeclass.h"
57 #include "langhooks.h"
58 #include "value-prof.h"
59 #include "builtins.h"
60 #include "asan.h"
61 #include "cilk.h"
62 #include "tree-chkp.h"
63 #include "rtl-chkp.h"
64
65
66 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
67
68 struct target_builtins default_target_builtins;
69 #if SWITCHABLE_TARGET
70 struct target_builtins *this_target_builtins = &default_target_builtins;
71 #endif
72
73 /* Define the names of the builtin function types and codes. */
74 const char *const built_in_class_names[BUILT_IN_LAST]
75 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
76
77 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
78 const char * built_in_names[(int) END_BUILTINS] =
79 {
80 #include "builtins.def"
81 };
82 #undef DEF_BUILTIN
83
84 /* Setup an array of builtin_info_type, make sure each element decl is
85 initialized to NULL_TREE. */
86 builtin_info_type builtin_info[(int)END_BUILTINS];
87
88 /* Non-zero if __builtin_constant_p should be folded right away. */
89 bool force_folding_builtin_constant_p;
90
91 static rtx c_readstr (const char *, machine_mode);
92 static int target_char_cast (tree, char *);
93 static rtx get_memory_rtx (tree, tree);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 static rtx result_vector (int, rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static void expand_errno_check (tree, rtx);
105 static rtx expand_builtin_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree fold_builtin_nan (tree, tree, int);
148 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
149 static bool validate_arg (const_tree, enum tree_code code);
150 static rtx expand_builtin_fabs (tree, rtx, rtx);
151 static rtx expand_builtin_signbit (tree, rtx);
152 static tree fold_builtin_bitop (tree, tree);
153 static tree fold_builtin_strchr (location_t, tree, tree, tree);
154 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
155 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
156 static tree fold_builtin_strcmp (location_t, tree, tree);
157 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
158 static tree fold_builtin_isascii (location_t, tree);
159 static tree fold_builtin_toascii (location_t, tree);
160 static tree fold_builtin_isdigit (location_t, tree);
161 static tree fold_builtin_fabs (location_t, tree, tree);
162 static tree fold_builtin_abs (location_t, tree, tree);
163 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
164 enum tree_code);
165 static tree fold_builtin_0 (location_t, tree);
166 static tree fold_builtin_1 (location_t, tree, tree);
167 static tree fold_builtin_2 (location_t, tree, tree, tree);
168 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_varargs (location_t, tree, tree*, int);
170
171 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
172 static tree fold_builtin_strstr (location_t, tree, tree, tree);
173 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
176
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
184
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
193 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
194 static tree do_mpfr_arg2 (tree, tree, tree,
195 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
196 static tree do_mpfr_arg3 (tree, tree, tree, tree,
197 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
198 static tree do_mpfr_sincos (tree, tree, tree);
199 static tree do_mpfr_bessel_n (tree, tree, tree,
200 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
201 const REAL_VALUE_TYPE *, bool);
202 static tree do_mpfr_remquo (tree, tree, tree);
203 static tree do_mpfr_lgamma_r (tree, tree, tree);
204 static void expand_builtin_sync_synchronize (void);
205
206 /* Return true if NAME starts with __builtin_ or __sync_. */
207
208 static bool
209 is_builtin_name (const char *name)
210 {
211 if (strncmp (name, "__builtin_", 10) == 0)
212 return true;
213 if (strncmp (name, "__sync_", 7) == 0)
214 return true;
215 if (strncmp (name, "__atomic_", 9) == 0)
216 return true;
217 if (flag_cilkplus
218 && (!strcmp (name, "__cilkrts_detach")
219 || !strcmp (name, "__cilkrts_pop_frame")))
220 return true;
221 return false;
222 }
223
224
225 /* Return true if DECL is a function symbol representing a built-in. */
226
227 bool
228 is_builtin_fn (tree decl)
229 {
230 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
231 }
232
233 /* Return true if NODE should be considered for inline expansion regardless
234 of the optimization level. This means whenever a function is invoked with
235 its "internal" name, which normally contains the prefix "__builtin". */
236
237 static bool
238 called_as_built_in (tree node)
239 {
240 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
241 we want the name used to call the function, not the name it
242 will have. */
243 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
244 return is_builtin_name (name);
245 }
246
247 /* Compute values M and N such that M divides (address of EXP - N) and such
248 that N < M. If these numbers can be determined, store M in alignp and N in
249 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
250 *alignp and any bit-offset to *bitposp.
251
252 Note that the address (and thus the alignment) computed here is based
253 on the address to which a symbol resolves, whereas DECL_ALIGN is based
254 on the address at which an object is actually located. These two
255 addresses are not always the same. For example, on ARM targets,
256 the address &foo of a Thumb function foo() has the lowest bit set,
257 whereas foo() itself starts on an even address.
258
259 If ADDR_P is true we are taking the address of the memory reference EXP
260 and thus cannot rely on the access taking place. */
261
262 static bool
263 get_object_alignment_2 (tree exp, unsigned int *alignp,
264 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
265 {
266 HOST_WIDE_INT bitsize, bitpos;
267 tree offset;
268 machine_mode mode;
269 int unsignedp, volatilep;
270 unsigned int align = BITS_PER_UNIT;
271 bool known_alignment = false;
272
273 /* Get the innermost object and the constant (bitpos) and possibly
274 variable (offset) offset of the access. */
275 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
276 &mode, &unsignedp, &volatilep, true);
277
278 /* Extract alignment information from the innermost object and
279 possibly adjust bitpos and offset. */
280 if (TREE_CODE (exp) == FUNCTION_DECL)
281 {
282 /* Function addresses can encode extra information besides their
283 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
284 allows the low bit to be used as a virtual bit, we know
285 that the address itself must be at least 2-byte aligned. */
286 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
287 align = 2 * BITS_PER_UNIT;
288 }
289 else if (TREE_CODE (exp) == LABEL_DECL)
290 ;
291 else if (TREE_CODE (exp) == CONST_DECL)
292 {
293 /* The alignment of a CONST_DECL is determined by its initializer. */
294 exp = DECL_INITIAL (exp);
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 if (CONSTANT_CLASS_P (exp))
297 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
298
299 known_alignment = true;
300 }
301 else if (DECL_P (exp))
302 {
303 align = DECL_ALIGN (exp);
304 known_alignment = true;
305 }
306 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
307 {
308 align = TYPE_ALIGN (TREE_TYPE (exp));
309 }
310 else if (TREE_CODE (exp) == INDIRECT_REF
311 || TREE_CODE (exp) == MEM_REF
312 || TREE_CODE (exp) == TARGET_MEM_REF)
313 {
314 tree addr = TREE_OPERAND (exp, 0);
315 unsigned ptr_align;
316 unsigned HOST_WIDE_INT ptr_bitpos;
317 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
318
319 /* If the address is explicitely aligned, handle that. */
320 if (TREE_CODE (addr) == BIT_AND_EXPR
321 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
322 {
323 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
324 ptr_bitmask *= BITS_PER_UNIT;
325 align = ptr_bitmask & -ptr_bitmask;
326 addr = TREE_OPERAND (addr, 0);
327 }
328
329 known_alignment
330 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
331 align = MAX (ptr_align, align);
332
333 /* Re-apply explicit alignment to the bitpos. */
334 ptr_bitpos &= ptr_bitmask;
335
336 /* The alignment of the pointer operand in a TARGET_MEM_REF
337 has to take the variable offset parts into account. */
338 if (TREE_CODE (exp) == TARGET_MEM_REF)
339 {
340 if (TMR_INDEX (exp))
341 {
342 unsigned HOST_WIDE_INT step = 1;
343 if (TMR_STEP (exp))
344 step = TREE_INT_CST_LOW (TMR_STEP (exp));
345 align = MIN (align, (step & -step) * BITS_PER_UNIT);
346 }
347 if (TMR_INDEX2 (exp))
348 align = BITS_PER_UNIT;
349 known_alignment = false;
350 }
351
352 /* When EXP is an actual memory reference then we can use
353 TYPE_ALIGN of a pointer indirection to derive alignment.
354 Do so only if get_pointer_alignment_1 did not reveal absolute
355 alignment knowledge and if using that alignment would
356 improve the situation. */
357 if (!addr_p && !known_alignment
358 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
359 align = TYPE_ALIGN (TREE_TYPE (exp));
360 else
361 {
362 /* Else adjust bitpos accordingly. */
363 bitpos += ptr_bitpos;
364 if (TREE_CODE (exp) == MEM_REF
365 || TREE_CODE (exp) == TARGET_MEM_REF)
366 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
367 }
368 }
369 else if (TREE_CODE (exp) == STRING_CST)
370 {
371 /* STRING_CST are the only constant objects we allow to be not
372 wrapped inside a CONST_DECL. */
373 align = TYPE_ALIGN (TREE_TYPE (exp));
374 if (CONSTANT_CLASS_P (exp))
375 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
376
377 known_alignment = true;
378 }
379
380 /* If there is a non-constant offset part extract the maximum
381 alignment that can prevail. */
382 if (offset)
383 {
384 unsigned int trailing_zeros = tree_ctz (offset);
385 if (trailing_zeros < HOST_BITS_PER_INT)
386 {
387 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
388 if (inner)
389 align = MIN (align, inner);
390 }
391 }
392
393 *alignp = align;
394 *bitposp = bitpos & (*alignp - 1);
395 return known_alignment;
396 }
397
398 /* For a memory reference expression EXP compute values M and N such that M
399 divides (&EXP - N) and such that N < M. If these numbers can be determined,
400 store M in alignp and N in *BITPOSP and return true. Otherwise return false
401 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
402
403 bool
404 get_object_alignment_1 (tree exp, unsigned int *alignp,
405 unsigned HOST_WIDE_INT *bitposp)
406 {
407 return get_object_alignment_2 (exp, alignp, bitposp, false);
408 }
409
410 /* Return the alignment in bits of EXP, an object. */
411
412 unsigned int
413 get_object_alignment (tree exp)
414 {
415 unsigned HOST_WIDE_INT bitpos = 0;
416 unsigned int align;
417
418 get_object_alignment_1 (exp, &align, &bitpos);
419
420 /* align and bitpos now specify known low bits of the pointer.
421 ptr & (align - 1) == bitpos. */
422
423 if (bitpos != 0)
424 align = (bitpos & -bitpos);
425 return align;
426 }
427
428 /* For a pointer valued expression EXP compute values M and N such that M
429 divides (EXP - N) and such that N < M. If these numbers can be determined,
430 store M in alignp and N in *BITPOSP and return true. Return false if
431 the results are just a conservative approximation.
432
433 If EXP is not a pointer, false is returned too. */
434
435 bool
436 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
437 unsigned HOST_WIDE_INT *bitposp)
438 {
439 STRIP_NOPS (exp);
440
441 if (TREE_CODE (exp) == ADDR_EXPR)
442 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
443 alignp, bitposp, true);
444 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
445 {
446 unsigned int align;
447 unsigned HOST_WIDE_INT bitpos;
448 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
449 &align, &bitpos);
450 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
451 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
452 else
453 {
454 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
455 if (trailing_zeros < HOST_BITS_PER_INT)
456 {
457 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
458 if (inner)
459 align = MIN (align, inner);
460 }
461 }
462 *alignp = align;
463 *bitposp = bitpos & (align - 1);
464 return res;
465 }
466 else if (TREE_CODE (exp) == SSA_NAME
467 && POINTER_TYPE_P (TREE_TYPE (exp)))
468 {
469 unsigned int ptr_align, ptr_misalign;
470 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
471
472 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
473 {
474 *bitposp = ptr_misalign * BITS_PER_UNIT;
475 *alignp = ptr_align * BITS_PER_UNIT;
476 /* We cannot really tell whether this result is an approximation. */
477 return true;
478 }
479 else
480 {
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485 }
486 else if (TREE_CODE (exp) == INTEGER_CST)
487 {
488 *alignp = BIGGEST_ALIGNMENT;
489 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
490 & (BIGGEST_ALIGNMENT - 1));
491 return true;
492 }
493
494 *bitposp = 0;
495 *alignp = BITS_PER_UNIT;
496 return false;
497 }
498
499 /* Return the alignment in bits of EXP, a pointer valued expression.
500 The alignment returned is, by default, the alignment of the thing that
501 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
502
503 Otherwise, look at the expression to see if we can do better, i.e., if the
504 expression is actually pointing at an object whose alignment is tighter. */
505
506 unsigned int
507 get_pointer_alignment (tree exp)
508 {
509 unsigned HOST_WIDE_INT bitpos = 0;
510 unsigned int align;
511
512 get_pointer_alignment_1 (exp, &align, &bitpos);
513
514 /* align and bitpos now specify known low bits of the pointer.
515 ptr & (align - 1) == bitpos. */
516
517 if (bitpos != 0)
518 align = (bitpos & -bitpos);
519
520 return align;
521 }
522
523 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
524 way, because it could contain a zero byte in the middle.
525 TREE_STRING_LENGTH is the size of the character array, not the string.
526
527 ONLY_VALUE should be nonzero if the result is not going to be emitted
528 into the instruction stream and zero if it is going to be expanded.
529 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
530 is returned, otherwise NULL, since
531 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
532 evaluate the side-effects.
533
534 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
535 accesses. Note that this implies the result is not going to be emitted
536 into the instruction stream.
537
538 The value returned is of type `ssizetype'.
539
540 Unfortunately, string_constant can't access the values of const char
541 arrays with initializers, so neither can we do so here. */
542
543 tree
544 c_strlen (tree src, int only_value)
545 {
546 tree offset_node;
547 HOST_WIDE_INT offset;
548 int max;
549 const char *ptr;
550 location_t loc;
551
552 STRIP_NOPS (src);
553 if (TREE_CODE (src) == COND_EXPR
554 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
555 {
556 tree len1, len2;
557
558 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
559 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
560 if (tree_int_cst_equal (len1, len2))
561 return len1;
562 }
563
564 if (TREE_CODE (src) == COMPOUND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
566 return c_strlen (TREE_OPERAND (src, 1), only_value);
567
568 loc = EXPR_LOC_OR_LOC (src, input_location);
569
570 src = string_constant (src, &offset_node);
571 if (src == 0)
572 return NULL_TREE;
573
574 max = TREE_STRING_LENGTH (src) - 1;
575 ptr = TREE_STRING_POINTER (src);
576
577 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
578 {
579 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
580 compute the offset to the following null if we don't know where to
581 start searching for it. */
582 int i;
583
584 for (i = 0; i < max; i++)
585 if (ptr[i] == 0)
586 return NULL_TREE;
587
588 /* We don't know the starting offset, but we do know that the string
589 has no internal zero bytes. We can assume that the offset falls
590 within the bounds of the string; otherwise, the programmer deserves
591 what he gets. Subtract the offset from the length of the string,
592 and return that. This would perhaps not be valid if we were dealing
593 with named arrays in addition to literal string constants. */
594
595 return size_diffop_loc (loc, size_int (max), offset_node);
596 }
597
598 /* We have a known offset into the string. Start searching there for
599 a null character if we can represent it as a single HOST_WIDE_INT. */
600 if (offset_node == 0)
601 offset = 0;
602 else if (! tree_fits_shwi_p (offset_node))
603 offset = -1;
604 else
605 offset = tree_to_shwi (offset_node);
606
607 /* If the offset is known to be out of bounds, warn, and call strlen at
608 runtime. */
609 if (offset < 0 || offset > max)
610 {
611 /* Suppress multiple warnings for propagated constant strings. */
612 if (only_value != 2
613 && !TREE_NO_WARNING (src))
614 {
615 warning_at (loc, 0, "offset outside bounds of constant string");
616 TREE_NO_WARNING (src) = 1;
617 }
618 return NULL_TREE;
619 }
620
621 /* Use strlen to search for the first zero byte. Since any strings
622 constructed with build_string will have nulls appended, we win even
623 if we get handed something like (char[4])"abcd".
624
625 Since OFFSET is our starting index into the string, no further
626 calculation is needed. */
627 return ssize_int (strlen (ptr + offset));
628 }
629
630 /* Return a char pointer for a C string if it is a string constant
631 or sum of string constant and integer constant. */
632
633 const char *
634 c_getstr (tree src)
635 {
636 tree offset_node;
637
638 src = string_constant (src, &offset_node);
639 if (src == 0)
640 return 0;
641
642 if (offset_node == 0)
643 return TREE_STRING_POINTER (src);
644 else if (!tree_fits_uhwi_p (offset_node)
645 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
646 return 0;
647
648 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
649 }
650
651 /* Return a constant integer corresponding to target reading
652 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
653
654 static rtx
655 c_readstr (const char *str, machine_mode mode)
656 {
657 HOST_WIDE_INT ch;
658 unsigned int i, j;
659 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
660
661 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
662 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
663 / HOST_BITS_PER_WIDE_INT;
664
665 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
666 for (i = 0; i < len; i++)
667 tmp[i] = 0;
668
669 ch = 1;
670 for (i = 0; i < GET_MODE_SIZE (mode); i++)
671 {
672 j = i;
673 if (WORDS_BIG_ENDIAN)
674 j = GET_MODE_SIZE (mode) - i - 1;
675 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
676 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
677 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
678 j *= BITS_PER_UNIT;
679
680 if (ch)
681 ch = (unsigned char) str[i];
682 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
683 }
684
685 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
686 return immed_wide_int_const (c, mode);
687 }
688
689 /* Cast a target constant CST to target CHAR and if that value fits into
690 host char type, return zero and put that value into variable pointed to by
691 P. */
692
693 static int
694 target_char_cast (tree cst, char *p)
695 {
696 unsigned HOST_WIDE_INT val, hostval;
697
698 if (TREE_CODE (cst) != INTEGER_CST
699 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
700 return 1;
701
702 /* Do not care if it fits or not right here. */
703 val = TREE_INT_CST_LOW (cst);
704
705 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
706 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
707
708 hostval = val;
709 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
710 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
711
712 if (val != hostval)
713 return 1;
714
715 *p = hostval;
716 return 0;
717 }
718
719 /* Similar to save_expr, but assumes that arbitrary code is not executed
720 in between the multiple evaluations. In particular, we assume that a
721 non-addressable local variable will not be modified. */
722
723 static tree
724 builtin_save_expr (tree exp)
725 {
726 if (TREE_CODE (exp) == SSA_NAME
727 || (TREE_ADDRESSABLE (exp) == 0
728 && (TREE_CODE (exp) == PARM_DECL
729 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
730 return exp;
731
732 return save_expr (exp);
733 }
734
735 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
736 times to get the address of either a higher stack frame, or a return
737 address located within it (depending on FNDECL_CODE). */
738
739 static rtx
740 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
741 {
742 int i;
743 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
744 if (tem == NULL_RTX)
745 {
746 /* For a zero count with __builtin_return_address, we don't care what
747 frame address we return, because target-specific definitions will
748 override us. Therefore frame pointer elimination is OK, and using
749 the soft frame pointer is OK.
750
751 For a nonzero count, or a zero count with __builtin_frame_address,
752 we require a stable offset from the current frame pointer to the
753 previous one, so we must use the hard frame pointer, and
754 we must disable frame pointer elimination. */
755 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
756 tem = frame_pointer_rtx;
757 else
758 {
759 tem = hard_frame_pointer_rtx;
760
761 /* Tell reload not to eliminate the frame pointer. */
762 crtl->accesses_prior_frames = 1;
763 }
764 }
765
766 if (count > 0)
767 SETUP_FRAME_ADDRESSES ();
768
769 /* On the SPARC, the return address is not in the frame, it is in a
770 register. There is no way to access it off of the current frame
771 pointer, but it can be accessed off the previous frame pointer by
772 reading the value from the register window save area. */
773 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
774 count--;
775
776 /* Scan back COUNT frames to the specified frame. */
777 for (i = 0; i < count; i++)
778 {
779 /* Assume the dynamic chain pointer is in the word that the
780 frame address points to, unless otherwise specified. */
781 tem = DYNAMIC_CHAIN_ADDRESS (tem);
782 tem = memory_address (Pmode, tem);
783 tem = gen_frame_mem (Pmode, tem);
784 tem = copy_to_reg (tem);
785 }
786
787 /* For __builtin_frame_address, return what we've got. But, on
788 the SPARC for example, we may have to add a bias. */
789 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
790 return FRAME_ADDR_RTX (tem);
791
792 /* For __builtin_return_address, get the return address from that frame. */
793 #ifdef RETURN_ADDR_RTX
794 tem = RETURN_ADDR_RTX (count, tem);
795 #else
796 tem = memory_address (Pmode,
797 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
798 tem = gen_frame_mem (Pmode, tem);
799 #endif
800 return tem;
801 }
802
803 /* Alias set used for setjmp buffer. */
804 static alias_set_type setjmp_alias_set = -1;
805
806 /* Construct the leading half of a __builtin_setjmp call. Control will
807 return to RECEIVER_LABEL. This is also called directly by the SJLJ
808 exception handling code. */
809
810 void
811 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
812 {
813 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
814 rtx stack_save;
815 rtx mem;
816
817 if (setjmp_alias_set == -1)
818 setjmp_alias_set = new_alias_set ();
819
820 buf_addr = convert_memory_address (Pmode, buf_addr);
821
822 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
823
824 /* We store the frame pointer and the address of receiver_label in
825 the buffer and use the rest of it for the stack save area, which
826 is machine-dependent. */
827
828 mem = gen_rtx_MEM (Pmode, buf_addr);
829 set_mem_alias_set (mem, setjmp_alias_set);
830 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
831
832 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
833 GET_MODE_SIZE (Pmode))),
834 set_mem_alias_set (mem, setjmp_alias_set);
835
836 emit_move_insn (validize_mem (mem),
837 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
838
839 stack_save = gen_rtx_MEM (sa_mode,
840 plus_constant (Pmode, buf_addr,
841 2 * GET_MODE_SIZE (Pmode)));
842 set_mem_alias_set (stack_save, setjmp_alias_set);
843 emit_stack_save (SAVE_NONLOCAL, &stack_save);
844
845 /* If there is further processing to do, do it. */
846 if (targetm.have_builtin_setjmp_setup ())
847 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
848
849 /* We have a nonlocal label. */
850 cfun->has_nonlocal_label = 1;
851 }
852
853 /* Construct the trailing part of a __builtin_setjmp call. This is
854 also called directly by the SJLJ exception handling code.
855 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
856
857 void
858 expand_builtin_setjmp_receiver (rtx receiver_label)
859 {
860 rtx chain;
861
862 /* Mark the FP as used when we get here, so we have to make sure it's
863 marked as used by this function. */
864 emit_use (hard_frame_pointer_rtx);
865
866 /* Mark the static chain as clobbered here so life information
867 doesn't get messed up for it. */
868 chain = targetm.calls.static_chain (current_function_decl, true);
869 if (chain && REG_P (chain))
870 emit_clobber (chain);
871
872 /* Now put in the code to restore the frame pointer, and argument
873 pointer, if needed. */
874 if (! targetm.have_nonlocal_goto ())
875 {
876 /* First adjust our frame pointer to its actual value. It was
877 previously set to the start of the virtual area corresponding to
878 the stacked variables when we branched here and now needs to be
879 adjusted to the actual hardware fp value.
880
881 Assignments to virtual registers are converted by
882 instantiate_virtual_regs into the corresponding assignment
883 to the underlying register (fp in this case) that makes
884 the original assignment true.
885 So the following insn will actually be decrementing fp by
886 STARTING_FRAME_OFFSET. */
887 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
888
889 /* Restoring the frame pointer also modifies the hard frame pointer.
890 Mark it used (so that the previous assignment remains live once
891 the frame pointer is eliminated) and clobbered (to represent the
892 implicit update from the assignment). */
893 emit_use (hard_frame_pointer_rtx);
894 emit_clobber (hard_frame_pointer_rtx);
895 }
896
897 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
898 {
899 #ifdef ELIMINABLE_REGS
900 /* If the argument pointer can be eliminated in favor of the
901 frame pointer, we don't need to restore it. We assume here
902 that if such an elimination is present, it can always be used.
903 This is the case on all known machines; if we don't make this
904 assumption, we do unnecessary saving on many machines. */
905 size_t i;
906 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
907
908 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
909 if (elim_regs[i].from == ARG_POINTER_REGNUM
910 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
911 break;
912
913 if (i == ARRAY_SIZE (elim_regs))
914 #endif
915 {
916 /* Now restore our arg pointer from the address at which it
917 was saved in our stack frame. */
918 emit_move_insn (crtl->args.internal_arg_pointer,
919 copy_to_reg (get_arg_pointer_save_area ()));
920 }
921 }
922
923 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
924 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
925 else if (targetm.have_nonlocal_goto_receiver ())
926 emit_insn (targetm.gen_nonlocal_goto_receiver ());
927 else
928 { /* Nothing */ }
929
930 /* We must not allow the code we just generated to be reordered by
931 scheduling. Specifically, the update of the frame pointer must
932 happen immediately, not later. */
933 emit_insn (gen_blockage ());
934 }
935
936 /* __builtin_longjmp is passed a pointer to an array of five words (not
937 all will be used on all machines). It operates similarly to the C
938 library function of the same name, but is more efficient. Much of
939 the code below is copied from the handling of non-local gotos. */
940
941 static void
942 expand_builtin_longjmp (rtx buf_addr, rtx value)
943 {
944 rtx fp, lab, stack;
945 rtx_insn *insn, *last;
946 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
947
948 /* DRAP is needed for stack realign if longjmp is expanded to current
949 function */
950 if (SUPPORTS_STACK_ALIGNMENT)
951 crtl->need_drap = true;
952
953 if (setjmp_alias_set == -1)
954 setjmp_alias_set = new_alias_set ();
955
956 buf_addr = convert_memory_address (Pmode, buf_addr);
957
958 buf_addr = force_reg (Pmode, buf_addr);
959
960 /* We require that the user must pass a second argument of 1, because
961 that is what builtin_setjmp will return. */
962 gcc_assert (value == const1_rtx);
963
964 last = get_last_insn ();
965 if (targetm.have_builtin_longjmp ())
966 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
967 else
968 {
969 fp = gen_rtx_MEM (Pmode, buf_addr);
970 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
971 GET_MODE_SIZE (Pmode)));
972
973 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
974 2 * GET_MODE_SIZE (Pmode)));
975 set_mem_alias_set (fp, setjmp_alias_set);
976 set_mem_alias_set (lab, setjmp_alias_set);
977 set_mem_alias_set (stack, setjmp_alias_set);
978
979 /* Pick up FP, label, and SP from the block and jump. This code is
980 from expand_goto in stmt.c; see there for detailed comments. */
981 if (targetm.have_nonlocal_goto ())
982 /* We have to pass a value to the nonlocal_goto pattern that will
983 get copied into the static_chain pointer, but it does not matter
984 what that value is, because builtin_setjmp does not use it. */
985 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
986 else
987 {
988 lab = copy_to_reg (lab);
989
990 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
991 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
992
993 emit_move_insn (hard_frame_pointer_rtx, fp);
994 emit_stack_restore (SAVE_NONLOCAL, stack);
995
996 emit_use (hard_frame_pointer_rtx);
997 emit_use (stack_pointer_rtx);
998 emit_indirect_jump (lab);
999 }
1000 }
1001
1002 /* Search backwards and mark the jump insn as a non-local goto.
1003 Note that this precludes the use of __builtin_longjmp to a
1004 __builtin_setjmp target in the same function. However, we've
1005 already cautioned the user that these functions are for
1006 internal exception handling use only. */
1007 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1008 {
1009 gcc_assert (insn != last);
1010
1011 if (JUMP_P (insn))
1012 {
1013 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1014 break;
1015 }
1016 else if (CALL_P (insn))
1017 break;
1018 }
1019 }
1020
1021 static inline bool
1022 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1023 {
1024 return (iter->i < iter->n);
1025 }
1026
1027 /* This function validates the types of a function call argument list
1028 against a specified list of tree_codes. If the last specifier is a 0,
1029 that represents an ellipses, otherwise the last specifier must be a
1030 VOID_TYPE. */
1031
1032 static bool
1033 validate_arglist (const_tree callexpr, ...)
1034 {
1035 enum tree_code code;
1036 bool res = 0;
1037 va_list ap;
1038 const_call_expr_arg_iterator iter;
1039 const_tree arg;
1040
1041 va_start (ap, callexpr);
1042 init_const_call_expr_arg_iterator (callexpr, &iter);
1043
1044 do
1045 {
1046 code = (enum tree_code) va_arg (ap, int);
1047 switch (code)
1048 {
1049 case 0:
1050 /* This signifies an ellipses, any further arguments are all ok. */
1051 res = true;
1052 goto end;
1053 case VOID_TYPE:
1054 /* This signifies an endlink, if no arguments remain, return
1055 true, otherwise return false. */
1056 res = !more_const_call_expr_args_p (&iter);
1057 goto end;
1058 default:
1059 /* If no parameters remain or the parameter's code does not
1060 match the specified code, return false. Otherwise continue
1061 checking any remaining arguments. */
1062 arg = next_const_call_expr_arg (&iter);
1063 if (!validate_arg (arg, code))
1064 goto end;
1065 break;
1066 }
1067 }
1068 while (1);
1069
1070 /* We need gotos here since we can only have one VA_CLOSE in a
1071 function. */
1072 end: ;
1073 va_end (ap);
1074
1075 return res;
1076 }
1077
1078 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1079 and the address of the save area. */
1080
1081 static rtx
1082 expand_builtin_nonlocal_goto (tree exp)
1083 {
1084 tree t_label, t_save_area;
1085 rtx r_label, r_save_area, r_fp, r_sp;
1086 rtx_insn *insn;
1087
1088 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1089 return NULL_RTX;
1090
1091 t_label = CALL_EXPR_ARG (exp, 0);
1092 t_save_area = CALL_EXPR_ARG (exp, 1);
1093
1094 r_label = expand_normal (t_label);
1095 r_label = convert_memory_address (Pmode, r_label);
1096 r_save_area = expand_normal (t_save_area);
1097 r_save_area = convert_memory_address (Pmode, r_save_area);
1098 /* Copy the address of the save location to a register just in case it was
1099 based on the frame pointer. */
1100 r_save_area = copy_to_reg (r_save_area);
1101 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1102 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1103 plus_constant (Pmode, r_save_area,
1104 GET_MODE_SIZE (Pmode)));
1105
1106 crtl->has_nonlocal_goto = 1;
1107
1108 /* ??? We no longer need to pass the static chain value, afaik. */
1109 if (targetm.have_nonlocal_goto ())
1110 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1111 else
1112 {
1113 r_label = copy_to_reg (r_label);
1114
1115 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1116 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1117
1118 /* Restore frame pointer for containing function. */
1119 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1120 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1121
1122 /* USE of hard_frame_pointer_rtx added for consistency;
1123 not clear if really needed. */
1124 emit_use (hard_frame_pointer_rtx);
1125 emit_use (stack_pointer_rtx);
1126
1127 /* If the architecture is using a GP register, we must
1128 conservatively assume that the target function makes use of it.
1129 The prologue of functions with nonlocal gotos must therefore
1130 initialize the GP register to the appropriate value, and we
1131 must then make sure that this value is live at the point
1132 of the jump. (Note that this doesn't necessarily apply
1133 to targets with a nonlocal_goto pattern; they are free
1134 to implement it in their own way. Note also that this is
1135 a no-op if the GP register is a global invariant.) */
1136 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1137 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1138 emit_use (pic_offset_table_rtx);
1139
1140 emit_indirect_jump (r_label);
1141 }
1142
1143 /* Search backwards to the jump insn and mark it as a
1144 non-local goto. */
1145 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1146 {
1147 if (JUMP_P (insn))
1148 {
1149 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1150 break;
1151 }
1152 else if (CALL_P (insn))
1153 break;
1154 }
1155
1156 return const0_rtx;
1157 }
1158
1159 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1160 (not all will be used on all machines) that was passed to __builtin_setjmp.
1161 It updates the stack pointer in that block to the current value. This is
1162 also called directly by the SJLJ exception handling code. */
1163
1164 void
1165 expand_builtin_update_setjmp_buf (rtx buf_addr)
1166 {
1167 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1168 rtx stack_save
1169 = gen_rtx_MEM (sa_mode,
1170 memory_address
1171 (sa_mode,
1172 plus_constant (Pmode, buf_addr,
1173 2 * GET_MODE_SIZE (Pmode))));
1174
1175 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1176 }
1177
1178 /* Expand a call to __builtin_prefetch. For a target that does not support
1179 data prefetch, evaluate the memory address argument in case it has side
1180 effects. */
1181
1182 static void
1183 expand_builtin_prefetch (tree exp)
1184 {
1185 tree arg0, arg1, arg2;
1186 int nargs;
1187 rtx op0, op1, op2;
1188
1189 if (!validate_arglist (exp, POINTER_TYPE, 0))
1190 return;
1191
1192 arg0 = CALL_EXPR_ARG (exp, 0);
1193
1194 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1195 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1196 locality). */
1197 nargs = call_expr_nargs (exp);
1198 if (nargs > 1)
1199 arg1 = CALL_EXPR_ARG (exp, 1);
1200 else
1201 arg1 = integer_zero_node;
1202 if (nargs > 2)
1203 arg2 = CALL_EXPR_ARG (exp, 2);
1204 else
1205 arg2 = integer_three_node;
1206
1207 /* Argument 0 is an address. */
1208 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1209
1210 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1211 if (TREE_CODE (arg1) != INTEGER_CST)
1212 {
1213 error ("second argument to %<__builtin_prefetch%> must be a constant");
1214 arg1 = integer_zero_node;
1215 }
1216 op1 = expand_normal (arg1);
1217 /* Argument 1 must be either zero or one. */
1218 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1219 {
1220 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1221 " using zero");
1222 op1 = const0_rtx;
1223 }
1224
1225 /* Argument 2 (locality) must be a compile-time constant int. */
1226 if (TREE_CODE (arg2) != INTEGER_CST)
1227 {
1228 error ("third argument to %<__builtin_prefetch%> must be a constant");
1229 arg2 = integer_zero_node;
1230 }
1231 op2 = expand_normal (arg2);
1232 /* Argument 2 must be 0, 1, 2, or 3. */
1233 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1234 {
1235 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1236 op2 = const0_rtx;
1237 }
1238
1239 if (targetm.have_prefetch ())
1240 {
1241 struct expand_operand ops[3];
1242
1243 create_address_operand (&ops[0], op0);
1244 create_integer_operand (&ops[1], INTVAL (op1));
1245 create_integer_operand (&ops[2], INTVAL (op2));
1246 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1247 return;
1248 }
1249
1250 /* Don't do anything with direct references to volatile memory, but
1251 generate code to handle other side effects. */
1252 if (!MEM_P (op0) && side_effects_p (op0))
1253 emit_insn (op0);
1254 }
1255
1256 /* Get a MEM rtx for expression EXP which is the address of an operand
1257 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1258 the maximum length of the block of memory that might be accessed or
1259 NULL if unknown. */
1260
1261 static rtx
1262 get_memory_rtx (tree exp, tree len)
1263 {
1264 tree orig_exp = exp;
1265 rtx addr, mem;
1266
1267 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1268 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1269 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1270 exp = TREE_OPERAND (exp, 0);
1271
1272 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1273 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1274
1275 /* Get an expression we can use to find the attributes to assign to MEM.
1276 First remove any nops. */
1277 while (CONVERT_EXPR_P (exp)
1278 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1279 exp = TREE_OPERAND (exp, 0);
1280
1281 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1282 (as builtin stringops may alias with anything). */
1283 exp = fold_build2 (MEM_REF,
1284 build_array_type (char_type_node,
1285 build_range_type (sizetype,
1286 size_one_node, len)),
1287 exp, build_int_cst (ptr_type_node, 0));
1288
1289 /* If the MEM_REF has no acceptable address, try to get the base object
1290 from the original address we got, and build an all-aliasing
1291 unknown-sized access to that one. */
1292 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1293 set_mem_attributes (mem, exp, 0);
1294 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1295 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1296 0))))
1297 {
1298 exp = build_fold_addr_expr (exp);
1299 exp = fold_build2 (MEM_REF,
1300 build_array_type (char_type_node,
1301 build_range_type (sizetype,
1302 size_zero_node,
1303 NULL)),
1304 exp, build_int_cst (ptr_type_node, 0));
1305 set_mem_attributes (mem, exp, 0);
1306 }
1307 set_mem_alias_set (mem, 0);
1308 return mem;
1309 }
1310 \f
1311 /* Built-in functions to perform an untyped call and return. */
1312
1313 #define apply_args_mode \
1314 (this_target_builtins->x_apply_args_mode)
1315 #define apply_result_mode \
1316 (this_target_builtins->x_apply_result_mode)
1317
1318 /* Return the size required for the block returned by __builtin_apply_args,
1319 and initialize apply_args_mode. */
1320
1321 static int
1322 apply_args_size (void)
1323 {
1324 static int size = -1;
1325 int align;
1326 unsigned int regno;
1327 machine_mode mode;
1328
1329 /* The values computed by this function never change. */
1330 if (size < 0)
1331 {
1332 /* The first value is the incoming arg-pointer. */
1333 size = GET_MODE_SIZE (Pmode);
1334
1335 /* The second value is the structure value address unless this is
1336 passed as an "invisible" first argument. */
1337 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1338 size += GET_MODE_SIZE (Pmode);
1339
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if (FUNCTION_ARG_REGNO_P (regno))
1342 {
1343 mode = targetm.calls.get_raw_arg_mode (regno);
1344
1345 gcc_assert (mode != VOIDmode);
1346
1347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1348 if (size % align != 0)
1349 size = CEIL (size, align) * align;
1350 size += GET_MODE_SIZE (mode);
1351 apply_args_mode[regno] = mode;
1352 }
1353 else
1354 {
1355 apply_args_mode[regno] = VOIDmode;
1356 }
1357 }
1358 return size;
1359 }
1360
1361 /* Return the size required for the block returned by __builtin_apply,
1362 and initialize apply_result_mode. */
1363
1364 static int
1365 apply_result_size (void)
1366 {
1367 static int size = -1;
1368 int align, regno;
1369 machine_mode mode;
1370
1371 /* The values computed by this function never change. */
1372 if (size < 0)
1373 {
1374 size = 0;
1375
1376 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1377 if (targetm.calls.function_value_regno_p (regno))
1378 {
1379 mode = targetm.calls.get_raw_result_mode (regno);
1380
1381 gcc_assert (mode != VOIDmode);
1382
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 size += GET_MODE_SIZE (mode);
1387 apply_result_mode[regno] = mode;
1388 }
1389 else
1390 apply_result_mode[regno] = VOIDmode;
1391
1392 /* Allow targets that use untyped_call and untyped_return to override
1393 the size so that machine-specific information can be stored here. */
1394 #ifdef APPLY_RESULT_SIZE
1395 size = APPLY_RESULT_SIZE;
1396 #endif
1397 }
1398 return size;
1399 }
1400
1401 /* Create a vector describing the result block RESULT. If SAVEP is true,
1402 the result block is used to save the values; otherwise it is used to
1403 restore the values. */
1404
1405 static rtx
1406 result_vector (int savep, rtx result)
1407 {
1408 int regno, size, align, nelts;
1409 machine_mode mode;
1410 rtx reg, mem;
1411 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1412
1413 size = nelts = 0;
1414 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1415 if ((mode = apply_result_mode[regno]) != VOIDmode)
1416 {
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1420 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1421 mem = adjust_address (result, mode, size);
1422 savevec[nelts++] = (savep
1423 ? gen_rtx_SET (mem, reg)
1424 : gen_rtx_SET (reg, mem));
1425 size += GET_MODE_SIZE (mode);
1426 }
1427 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1428 }
1429
1430 /* Save the state required to perform an untyped call with the same
1431 arguments as were passed to the current function. */
1432
1433 static rtx
1434 expand_builtin_apply_args_1 (void)
1435 {
1436 rtx registers, tem;
1437 int size, align, regno;
1438 machine_mode mode;
1439 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1440
1441 /* Create a block where the arg-pointer, structure value address,
1442 and argument registers can be saved. */
1443 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1444
1445 /* Walk past the arg-pointer and structure value address. */
1446 size = GET_MODE_SIZE (Pmode);
1447 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1448 size += GET_MODE_SIZE (Pmode);
1449
1450 /* Save each register used in calling a function to the block. */
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_args_mode[regno]) != VOIDmode)
1453 {
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457
1458 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1459
1460 emit_move_insn (adjust_address (registers, mode, size), tem);
1461 size += GET_MODE_SIZE (mode);
1462 }
1463
1464 /* Save the arg pointer to the block. */
1465 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1466 /* We need the pointer as the caller actually passed them to us, not
1467 as we might have pretended they were passed. Make sure it's a valid
1468 operand, as emit_move_insn isn't expected to handle a PLUS. */
1469 if (STACK_GROWS_DOWNWARD)
1470 tem
1471 = force_operand (plus_constant (Pmode, tem,
1472 crtl->args.pretend_args_size),
1473 NULL_RTX);
1474 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1475
1476 size = GET_MODE_SIZE (Pmode);
1477
1478 /* Save the structure value address unless this is passed as an
1479 "invisible" first argument. */
1480 if (struct_incoming_value)
1481 {
1482 emit_move_insn (adjust_address (registers, Pmode, size),
1483 copy_to_reg (struct_incoming_value));
1484 size += GET_MODE_SIZE (Pmode);
1485 }
1486
1487 /* Return the address of the block. */
1488 return copy_addr_to_reg (XEXP (registers, 0));
1489 }
1490
1491 /* __builtin_apply_args returns block of memory allocated on
1492 the stack into which is stored the arg pointer, structure
1493 value address, static chain, and all the registers that might
1494 possibly be used in performing a function call. The code is
1495 moved to the start of the function so the incoming values are
1496 saved. */
1497
1498 static rtx
1499 expand_builtin_apply_args (void)
1500 {
1501 /* Don't do __builtin_apply_args more than once in a function.
1502 Save the result of the first call and reuse it. */
1503 if (apply_args_value != 0)
1504 return apply_args_value;
1505 {
1506 /* When this function is called, it means that registers must be
1507 saved on entry to this function. So we migrate the
1508 call to the first insn of this function. */
1509 rtx temp;
1510
1511 start_sequence ();
1512 temp = expand_builtin_apply_args_1 ();
1513 rtx_insn *seq = get_insns ();
1514 end_sequence ();
1515
1516 apply_args_value = temp;
1517
1518 /* Put the insns after the NOTE that starts the function.
1519 If this is inside a start_sequence, make the outer-level insn
1520 chain current, so the code is placed at the start of the
1521 function. If internal_arg_pointer is a non-virtual pseudo,
1522 it needs to be placed after the function that initializes
1523 that pseudo. */
1524 push_topmost_sequence ();
1525 if (REG_P (crtl->args.internal_arg_pointer)
1526 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1527 emit_insn_before (seq, parm_birth_insn);
1528 else
1529 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1530 pop_topmost_sequence ();
1531 return temp;
1532 }
1533 }
1534
1535 /* Perform an untyped call and save the state required to perform an
1536 untyped return of whatever value was returned by the given function. */
1537
1538 static rtx
1539 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1540 {
1541 int size, align, regno;
1542 machine_mode mode;
1543 rtx incoming_args, result, reg, dest, src;
1544 rtx_call_insn *call_insn;
1545 rtx old_stack_level = 0;
1546 rtx call_fusage = 0;
1547 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1548
1549 arguments = convert_memory_address (Pmode, arguments);
1550
1551 /* Create a block where the return registers can be saved. */
1552 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1553
1554 /* Fetch the arg pointer from the ARGUMENTS block. */
1555 incoming_args = gen_reg_rtx (Pmode);
1556 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1557 if (!STACK_GROWS_DOWNWARD)
1558 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1559 incoming_args, 0, OPTAB_LIB_WIDEN);
1560
1561 /* Push a new argument block and copy the arguments. Do not allow
1562 the (potential) memcpy call below to interfere with our stack
1563 manipulations. */
1564 do_pending_stack_adjust ();
1565 NO_DEFER_POP;
1566
1567 /* Save the stack with nonlocal if available. */
1568 if (targetm.have_save_stack_nonlocal ())
1569 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1570 else
1571 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1572
1573 /* Allocate a block of memory onto the stack and copy the memory
1574 arguments to the outgoing arguments address. We can pass TRUE
1575 as the 4th argument because we just saved the stack pointer
1576 and will restore it right after the call. */
1577 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1578
1579 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1580 may have already set current_function_calls_alloca to true.
1581 current_function_calls_alloca won't be set if argsize is zero,
1582 so we have to guarantee need_drap is true here. */
1583 if (SUPPORTS_STACK_ALIGNMENT)
1584 crtl->need_drap = true;
1585
1586 dest = virtual_outgoing_args_rtx;
1587 if (!STACK_GROWS_DOWNWARD)
1588 {
1589 if (CONST_INT_P (argsize))
1590 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1591 else
1592 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1593 }
1594 dest = gen_rtx_MEM (BLKmode, dest);
1595 set_mem_align (dest, PARM_BOUNDARY);
1596 src = gen_rtx_MEM (BLKmode, incoming_args);
1597 set_mem_align (src, PARM_BOUNDARY);
1598 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1599
1600 /* Refer to the argument block. */
1601 apply_args_size ();
1602 arguments = gen_rtx_MEM (BLKmode, arguments);
1603 set_mem_align (arguments, PARM_BOUNDARY);
1604
1605 /* Walk past the arg-pointer and structure value address. */
1606 size = GET_MODE_SIZE (Pmode);
1607 if (struct_value)
1608 size += GET_MODE_SIZE (Pmode);
1609
1610 /* Restore each of the registers previously saved. Make USE insns
1611 for each of these registers for use in making the call. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_args_mode[regno]) != VOIDmode)
1614 {
1615 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1616 if (size % align != 0)
1617 size = CEIL (size, align) * align;
1618 reg = gen_rtx_REG (mode, regno);
1619 emit_move_insn (reg, adjust_address (arguments, mode, size));
1620 use_reg (&call_fusage, reg);
1621 size += GET_MODE_SIZE (mode);
1622 }
1623
1624 /* Restore the structure value address unless this is passed as an
1625 "invisible" first argument. */
1626 size = GET_MODE_SIZE (Pmode);
1627 if (struct_value)
1628 {
1629 rtx value = gen_reg_rtx (Pmode);
1630 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1631 emit_move_insn (struct_value, value);
1632 if (REG_P (struct_value))
1633 use_reg (&call_fusage, struct_value);
1634 size += GET_MODE_SIZE (Pmode);
1635 }
1636
1637 /* All arguments and registers used for the call are set up by now! */
1638 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1639
1640 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1641 and we don't want to load it into a register as an optimization,
1642 because prepare_call_address already did it if it should be done. */
1643 if (GET_CODE (function) != SYMBOL_REF)
1644 function = memory_address (FUNCTION_MODE, function);
1645
1646 /* Generate the actual call instruction and save the return value. */
1647 if (targetm.have_untyped_call ())
1648 {
1649 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1650 emit_call_insn (targetm.gen_untyped_call (mem, result,
1651 result_vector (1, result)));
1652 }
1653 else if (targetm.have_call_value ())
1654 {
1655 rtx valreg = 0;
1656
1657 /* Locate the unique return register. It is not possible to
1658 express a call that sets more than one return register using
1659 call_value; use untyped_call for that. In fact, untyped_call
1660 only needs to save the return registers in the given block. */
1661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1662 if ((mode = apply_result_mode[regno]) != VOIDmode)
1663 {
1664 gcc_assert (!valreg); /* have_untyped_call required. */
1665
1666 valreg = gen_rtx_REG (mode, regno);
1667 }
1668
1669 emit_insn (targetm.gen_call_value (valreg,
1670 gen_rtx_MEM (FUNCTION_MODE, function),
1671 const0_rtx, NULL_RTX, const0_rtx));
1672
1673 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1674 }
1675 else
1676 gcc_unreachable ();
1677
1678 /* Find the CALL insn we just emitted, and attach the register usage
1679 information. */
1680 call_insn = last_call_insn ();
1681 add_function_usage_to (call_insn, call_fusage);
1682
1683 /* Restore the stack. */
1684 if (targetm.have_save_stack_nonlocal ())
1685 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1686 else
1687 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1688 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1689
1690 OK_DEFER_POP;
1691
1692 /* Return the address of the result block. */
1693 result = copy_addr_to_reg (XEXP (result, 0));
1694 return convert_memory_address (ptr_mode, result);
1695 }
1696
1697 /* Perform an untyped return. */
1698
1699 static void
1700 expand_builtin_return (rtx result)
1701 {
1702 int size, align, regno;
1703 machine_mode mode;
1704 rtx reg;
1705 rtx_insn *call_fusage = 0;
1706
1707 result = convert_memory_address (Pmode, result);
1708
1709 apply_result_size ();
1710 result = gen_rtx_MEM (BLKmode, result);
1711
1712 if (targetm.have_untyped_return ())
1713 {
1714 rtx vector = result_vector (0, result);
1715 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1716 emit_barrier ();
1717 return;
1718 }
1719
1720 /* Restore the return value and note that each value is used. */
1721 size = 0;
1722 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1723 if ((mode = apply_result_mode[regno]) != VOIDmode)
1724 {
1725 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1726 if (size % align != 0)
1727 size = CEIL (size, align) * align;
1728 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1729 emit_move_insn (reg, adjust_address (result, mode, size));
1730
1731 push_to_sequence (call_fusage);
1732 emit_use (reg);
1733 call_fusage = get_insns ();
1734 end_sequence ();
1735 size += GET_MODE_SIZE (mode);
1736 }
1737
1738 /* Put the USE insns before the return. */
1739 emit_insn (call_fusage);
1740
1741 /* Return whatever values was restored by jumping directly to the end
1742 of the function. */
1743 expand_naked_return ();
1744 }
1745
1746 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1747
1748 static enum type_class
1749 type_to_class (tree type)
1750 {
1751 switch (TREE_CODE (type))
1752 {
1753 case VOID_TYPE: return void_type_class;
1754 case INTEGER_TYPE: return integer_type_class;
1755 case ENUMERAL_TYPE: return enumeral_type_class;
1756 case BOOLEAN_TYPE: return boolean_type_class;
1757 case POINTER_TYPE: return pointer_type_class;
1758 case REFERENCE_TYPE: return reference_type_class;
1759 case OFFSET_TYPE: return offset_type_class;
1760 case REAL_TYPE: return real_type_class;
1761 case COMPLEX_TYPE: return complex_type_class;
1762 case FUNCTION_TYPE: return function_type_class;
1763 case METHOD_TYPE: return method_type_class;
1764 case RECORD_TYPE: return record_type_class;
1765 case UNION_TYPE:
1766 case QUAL_UNION_TYPE: return union_type_class;
1767 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1768 ? string_type_class : array_type_class);
1769 case LANG_TYPE: return lang_type_class;
1770 default: return no_type_class;
1771 }
1772 }
1773
1774 /* Expand a call EXP to __builtin_classify_type. */
1775
1776 static rtx
1777 expand_builtin_classify_type (tree exp)
1778 {
1779 if (call_expr_nargs (exp))
1780 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1781 return GEN_INT (no_type_class);
1782 }
1783
1784 /* This helper macro, meant to be used in mathfn_built_in below,
1785 determines which among a set of three builtin math functions is
1786 appropriate for a given type mode. The `F' and `L' cases are
1787 automatically generated from the `double' case. */
1788 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1789 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1790 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1791 fcodel = BUILT_IN_MATHFN##L ; break;
1792 /* Similar to above, but appends _R after any F/L suffix. */
1793 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1794 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1795 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1796 fcodel = BUILT_IN_MATHFN##L_R ; break;
1797
1798 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1799 if available. If IMPLICIT is true use the implicit builtin declaration,
1800 otherwise use the explicit declaration. If we can't do the conversion,
1801 return zero. */
1802
1803 static tree
1804 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1805 {
1806 enum built_in_function fcode, fcodef, fcodel, fcode2;
1807
1808 switch (fn)
1809 {
1810 CASE_MATHFN (BUILT_IN_ACOS)
1811 CASE_MATHFN (BUILT_IN_ACOSH)
1812 CASE_MATHFN (BUILT_IN_ASIN)
1813 CASE_MATHFN (BUILT_IN_ASINH)
1814 CASE_MATHFN (BUILT_IN_ATAN)
1815 CASE_MATHFN (BUILT_IN_ATAN2)
1816 CASE_MATHFN (BUILT_IN_ATANH)
1817 CASE_MATHFN (BUILT_IN_CBRT)
1818 CASE_MATHFN (BUILT_IN_CEIL)
1819 CASE_MATHFN (BUILT_IN_CEXPI)
1820 CASE_MATHFN (BUILT_IN_COPYSIGN)
1821 CASE_MATHFN (BUILT_IN_COS)
1822 CASE_MATHFN (BUILT_IN_COSH)
1823 CASE_MATHFN (BUILT_IN_DREM)
1824 CASE_MATHFN (BUILT_IN_ERF)
1825 CASE_MATHFN (BUILT_IN_ERFC)
1826 CASE_MATHFN (BUILT_IN_EXP)
1827 CASE_MATHFN (BUILT_IN_EXP10)
1828 CASE_MATHFN (BUILT_IN_EXP2)
1829 CASE_MATHFN (BUILT_IN_EXPM1)
1830 CASE_MATHFN (BUILT_IN_FABS)
1831 CASE_MATHFN (BUILT_IN_FDIM)
1832 CASE_MATHFN (BUILT_IN_FLOOR)
1833 CASE_MATHFN (BUILT_IN_FMA)
1834 CASE_MATHFN (BUILT_IN_FMAX)
1835 CASE_MATHFN (BUILT_IN_FMIN)
1836 CASE_MATHFN (BUILT_IN_FMOD)
1837 CASE_MATHFN (BUILT_IN_FREXP)
1838 CASE_MATHFN (BUILT_IN_GAMMA)
1839 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1840 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1841 CASE_MATHFN (BUILT_IN_HYPOT)
1842 CASE_MATHFN (BUILT_IN_ILOGB)
1843 CASE_MATHFN (BUILT_IN_ICEIL)
1844 CASE_MATHFN (BUILT_IN_IFLOOR)
1845 CASE_MATHFN (BUILT_IN_INF)
1846 CASE_MATHFN (BUILT_IN_IRINT)
1847 CASE_MATHFN (BUILT_IN_IROUND)
1848 CASE_MATHFN (BUILT_IN_ISINF)
1849 CASE_MATHFN (BUILT_IN_J0)
1850 CASE_MATHFN (BUILT_IN_J1)
1851 CASE_MATHFN (BUILT_IN_JN)
1852 CASE_MATHFN (BUILT_IN_LCEIL)
1853 CASE_MATHFN (BUILT_IN_LDEXP)
1854 CASE_MATHFN (BUILT_IN_LFLOOR)
1855 CASE_MATHFN (BUILT_IN_LGAMMA)
1856 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1857 CASE_MATHFN (BUILT_IN_LLCEIL)
1858 CASE_MATHFN (BUILT_IN_LLFLOOR)
1859 CASE_MATHFN (BUILT_IN_LLRINT)
1860 CASE_MATHFN (BUILT_IN_LLROUND)
1861 CASE_MATHFN (BUILT_IN_LOG)
1862 CASE_MATHFN (BUILT_IN_LOG10)
1863 CASE_MATHFN (BUILT_IN_LOG1P)
1864 CASE_MATHFN (BUILT_IN_LOG2)
1865 CASE_MATHFN (BUILT_IN_LOGB)
1866 CASE_MATHFN (BUILT_IN_LRINT)
1867 CASE_MATHFN (BUILT_IN_LROUND)
1868 CASE_MATHFN (BUILT_IN_MODF)
1869 CASE_MATHFN (BUILT_IN_NAN)
1870 CASE_MATHFN (BUILT_IN_NANS)
1871 CASE_MATHFN (BUILT_IN_NEARBYINT)
1872 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1873 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1874 CASE_MATHFN (BUILT_IN_POW)
1875 CASE_MATHFN (BUILT_IN_POWI)
1876 CASE_MATHFN (BUILT_IN_POW10)
1877 CASE_MATHFN (BUILT_IN_REMAINDER)
1878 CASE_MATHFN (BUILT_IN_REMQUO)
1879 CASE_MATHFN (BUILT_IN_RINT)
1880 CASE_MATHFN (BUILT_IN_ROUND)
1881 CASE_MATHFN (BUILT_IN_SCALB)
1882 CASE_MATHFN (BUILT_IN_SCALBLN)
1883 CASE_MATHFN (BUILT_IN_SCALBN)
1884 CASE_MATHFN (BUILT_IN_SIGNBIT)
1885 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1886 CASE_MATHFN (BUILT_IN_SIN)
1887 CASE_MATHFN (BUILT_IN_SINCOS)
1888 CASE_MATHFN (BUILT_IN_SINH)
1889 CASE_MATHFN (BUILT_IN_SQRT)
1890 CASE_MATHFN (BUILT_IN_TAN)
1891 CASE_MATHFN (BUILT_IN_TANH)
1892 CASE_MATHFN (BUILT_IN_TGAMMA)
1893 CASE_MATHFN (BUILT_IN_TRUNC)
1894 CASE_MATHFN (BUILT_IN_Y0)
1895 CASE_MATHFN (BUILT_IN_Y1)
1896 CASE_MATHFN (BUILT_IN_YN)
1897
1898 default:
1899 return NULL_TREE;
1900 }
1901
1902 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1903 fcode2 = fcode;
1904 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1905 fcode2 = fcodef;
1906 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1907 fcode2 = fcodel;
1908 else
1909 return NULL_TREE;
1910
1911 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1912 return NULL_TREE;
1913
1914 return builtin_decl_explicit (fcode2);
1915 }
1916
1917 /* Like mathfn_built_in_1(), but always use the implicit array. */
1918
1919 tree
1920 mathfn_built_in (tree type, enum built_in_function fn)
1921 {
1922 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1923 }
1924
1925 /* If errno must be maintained, expand the RTL to check if the result,
1926 TARGET, of a built-in function call, EXP, is NaN, and if so set
1927 errno to EDOM. */
1928
1929 static void
1930 expand_errno_check (tree exp, rtx target)
1931 {
1932 rtx_code_label *lab = gen_label_rtx ();
1933
1934 /* Test the result; if it is NaN, set errno=EDOM because
1935 the argument was not in the domain. */
1936 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1937 NULL_RTX, NULL, lab,
1938 /* The jump is very likely. */
1939 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1940
1941 #ifdef TARGET_EDOM
1942 /* If this built-in doesn't throw an exception, set errno directly. */
1943 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1944 {
1945 #ifdef GEN_ERRNO_RTX
1946 rtx errno_rtx = GEN_ERRNO_RTX;
1947 #else
1948 rtx errno_rtx
1949 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1950 #endif
1951 emit_move_insn (errno_rtx,
1952 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1953 emit_label (lab);
1954 return;
1955 }
1956 #endif
1957
1958 /* Make sure the library call isn't expanded as a tail call. */
1959 CALL_EXPR_TAILCALL (exp) = 0;
1960
1961 /* We can't set errno=EDOM directly; let the library call do it.
1962 Pop the arguments right away in case the call gets deleted. */
1963 NO_DEFER_POP;
1964 expand_call (exp, target, 0);
1965 OK_DEFER_POP;
1966 emit_label (lab);
1967 }
1968
1969 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding
1971 the function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's operands. */
1974
1975 static rtx
1976 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1977 {
1978 optab builtin_optab;
1979 rtx op0;
1980 rtx_insn *insns;
1981 tree fndecl = get_callee_fndecl (exp);
1982 machine_mode mode;
1983 bool errno_set = false;
1984 bool try_widening = false;
1985 tree arg;
1986
1987 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1988 return NULL_RTX;
1989
1990 arg = CALL_EXPR_ARG (exp, 0);
1991
1992 switch (DECL_FUNCTION_CODE (fndecl))
1993 {
1994 CASE_FLT_FN (BUILT_IN_SQRT):
1995 errno_set = ! tree_expr_nonnegative_p (arg);
1996 try_widening = true;
1997 builtin_optab = sqrt_optab;
1998 break;
1999 CASE_FLT_FN (BUILT_IN_EXP):
2000 errno_set = true; builtin_optab = exp_optab; break;
2001 CASE_FLT_FN (BUILT_IN_EXP10):
2002 CASE_FLT_FN (BUILT_IN_POW10):
2003 errno_set = true; builtin_optab = exp10_optab; break;
2004 CASE_FLT_FN (BUILT_IN_EXP2):
2005 errno_set = true; builtin_optab = exp2_optab; break;
2006 CASE_FLT_FN (BUILT_IN_EXPM1):
2007 errno_set = true; builtin_optab = expm1_optab; break;
2008 CASE_FLT_FN (BUILT_IN_LOGB):
2009 errno_set = true; builtin_optab = logb_optab; break;
2010 CASE_FLT_FN (BUILT_IN_LOG):
2011 errno_set = true; builtin_optab = log_optab; break;
2012 CASE_FLT_FN (BUILT_IN_LOG10):
2013 errno_set = true; builtin_optab = log10_optab; break;
2014 CASE_FLT_FN (BUILT_IN_LOG2):
2015 errno_set = true; builtin_optab = log2_optab; break;
2016 CASE_FLT_FN (BUILT_IN_LOG1P):
2017 errno_set = true; builtin_optab = log1p_optab; break;
2018 CASE_FLT_FN (BUILT_IN_ASIN):
2019 builtin_optab = asin_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ACOS):
2021 builtin_optab = acos_optab; break;
2022 CASE_FLT_FN (BUILT_IN_TAN):
2023 builtin_optab = tan_optab; break;
2024 CASE_FLT_FN (BUILT_IN_ATAN):
2025 builtin_optab = atan_optab; break;
2026 CASE_FLT_FN (BUILT_IN_FLOOR):
2027 builtin_optab = floor_optab; break;
2028 CASE_FLT_FN (BUILT_IN_CEIL):
2029 builtin_optab = ceil_optab; break;
2030 CASE_FLT_FN (BUILT_IN_TRUNC):
2031 builtin_optab = btrunc_optab; break;
2032 CASE_FLT_FN (BUILT_IN_ROUND):
2033 builtin_optab = round_optab; break;
2034 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2035 builtin_optab = nearbyint_optab;
2036 if (flag_trapping_math)
2037 break;
2038 /* Else fallthrough and expand as rint. */
2039 CASE_FLT_FN (BUILT_IN_RINT):
2040 builtin_optab = rint_optab; break;
2041 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2042 builtin_optab = significand_optab; break;
2043 default:
2044 gcc_unreachable ();
2045 }
2046
2047 /* Make a suitable register to place result in. */
2048 mode = TYPE_MODE (TREE_TYPE (exp));
2049
2050 if (! flag_errno_math || ! HONOR_NANS (mode))
2051 errno_set = false;
2052
2053 /* Before working hard, check whether the instruction is available, but try
2054 to widen the mode for specific operations. */
2055 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2056 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2057 && (!errno_set || !optimize_insn_for_size_p ()))
2058 {
2059 rtx result = gen_reg_rtx (mode);
2060
2061 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2062 need to expand the argument again. This way, we will not perform
2063 side-effects more the once. */
2064 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2065
2066 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2067
2068 start_sequence ();
2069
2070 /* Compute into RESULT.
2071 Set RESULT to wherever the result comes back. */
2072 result = expand_unop (mode, builtin_optab, op0, result, 0);
2073
2074 if (result != 0)
2075 {
2076 if (errno_set)
2077 expand_errno_check (exp, result);
2078
2079 /* Output the entire sequence. */
2080 insns = get_insns ();
2081 end_sequence ();
2082 emit_insn (insns);
2083 return result;
2084 }
2085
2086 /* If we were unable to expand via the builtin, stop the sequence
2087 (without outputting the insns) and call to the library function
2088 with the stabilized argument list. */
2089 end_sequence ();
2090 }
2091
2092 return expand_call (exp, target, target == const0_rtx);
2093 }
2094
2095 /* Expand a call to the builtin binary math functions (pow and atan2).
2096 Return NULL_RTX if a normal call should be emitted rather than expanding the
2097 function in-line. EXP is the expression that is a call to the builtin
2098 function; if convenient, the result should be placed in TARGET.
2099 SUBTARGET may be used as the target for computing one of EXP's
2100 operands. */
2101
2102 static rtx
2103 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2104 {
2105 optab builtin_optab;
2106 rtx op0, op1, result;
2107 rtx_insn *insns;
2108 int op1_type = REAL_TYPE;
2109 tree fndecl = get_callee_fndecl (exp);
2110 tree arg0, arg1;
2111 machine_mode mode;
2112 bool errno_set = true;
2113
2114 switch (DECL_FUNCTION_CODE (fndecl))
2115 {
2116 CASE_FLT_FN (BUILT_IN_SCALBN):
2117 CASE_FLT_FN (BUILT_IN_SCALBLN):
2118 CASE_FLT_FN (BUILT_IN_LDEXP):
2119 op1_type = INTEGER_TYPE;
2120 default:
2121 break;
2122 }
2123
2124 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2125 return NULL_RTX;
2126
2127 arg0 = CALL_EXPR_ARG (exp, 0);
2128 arg1 = CALL_EXPR_ARG (exp, 1);
2129
2130 switch (DECL_FUNCTION_CODE (fndecl))
2131 {
2132 CASE_FLT_FN (BUILT_IN_POW):
2133 builtin_optab = pow_optab; break;
2134 CASE_FLT_FN (BUILT_IN_ATAN2):
2135 builtin_optab = atan2_optab; break;
2136 CASE_FLT_FN (BUILT_IN_SCALB):
2137 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2138 return 0;
2139 builtin_optab = scalb_optab; break;
2140 CASE_FLT_FN (BUILT_IN_SCALBN):
2141 CASE_FLT_FN (BUILT_IN_SCALBLN):
2142 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2143 return 0;
2144 /* Fall through... */
2145 CASE_FLT_FN (BUILT_IN_LDEXP):
2146 builtin_optab = ldexp_optab; break;
2147 CASE_FLT_FN (BUILT_IN_FMOD):
2148 builtin_optab = fmod_optab; break;
2149 CASE_FLT_FN (BUILT_IN_REMAINDER):
2150 CASE_FLT_FN (BUILT_IN_DREM):
2151 builtin_optab = remainder_optab; break;
2152 default:
2153 gcc_unreachable ();
2154 }
2155
2156 /* Make a suitable register to place result in. */
2157 mode = TYPE_MODE (TREE_TYPE (exp));
2158
2159 /* Before working hard, check whether the instruction is available. */
2160 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2161 return NULL_RTX;
2162
2163 result = gen_reg_rtx (mode);
2164
2165 if (! flag_errno_math || ! HONOR_NANS (mode))
2166 errno_set = false;
2167
2168 if (errno_set && optimize_insn_for_size_p ())
2169 return 0;
2170
2171 /* Always stabilize the argument list. */
2172 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2173 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2174
2175 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2176 op1 = expand_normal (arg1);
2177
2178 start_sequence ();
2179
2180 /* Compute into RESULT.
2181 Set RESULT to wherever the result comes back. */
2182 result = expand_binop (mode, builtin_optab, op0, op1,
2183 result, 0, OPTAB_DIRECT);
2184
2185 /* If we were unable to expand via the builtin, stop the sequence
2186 (without outputting the insns) and call to the library function
2187 with the stabilized argument list. */
2188 if (result == 0)
2189 {
2190 end_sequence ();
2191 return expand_call (exp, target, target == const0_rtx);
2192 }
2193
2194 if (errno_set)
2195 expand_errno_check (exp, result);
2196
2197 /* Output the entire sequence. */
2198 insns = get_insns ();
2199 end_sequence ();
2200 emit_insn (insns);
2201
2202 return result;
2203 }
2204
2205 /* Expand a call to the builtin trinary math functions (fma).
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2210 operands. */
2211
2212 static rtx
2213 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2214 {
2215 optab builtin_optab;
2216 rtx op0, op1, op2, result;
2217 rtx_insn *insns;
2218 tree fndecl = get_callee_fndecl (exp);
2219 tree arg0, arg1, arg2;
2220 machine_mode mode;
2221
2222 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2223 return NULL_RTX;
2224
2225 arg0 = CALL_EXPR_ARG (exp, 0);
2226 arg1 = CALL_EXPR_ARG (exp, 1);
2227 arg2 = CALL_EXPR_ARG (exp, 2);
2228
2229 switch (DECL_FUNCTION_CODE (fndecl))
2230 {
2231 CASE_FLT_FN (BUILT_IN_FMA):
2232 builtin_optab = fma_optab; break;
2233 default:
2234 gcc_unreachable ();
2235 }
2236
2237 /* Make a suitable register to place result in. */
2238 mode = TYPE_MODE (TREE_TYPE (exp));
2239
2240 /* Before working hard, check whether the instruction is available. */
2241 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2242 return NULL_RTX;
2243
2244 result = gen_reg_rtx (mode);
2245
2246 /* Always stabilize the argument list. */
2247 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2248 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2249 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2250
2251 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2252 op1 = expand_normal (arg1);
2253 op2 = expand_normal (arg2);
2254
2255 start_sequence ();
2256
2257 /* Compute into RESULT.
2258 Set RESULT to wherever the result comes back. */
2259 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2260 result, 0);
2261
2262 /* If we were unable to expand via the builtin, stop the sequence
2263 (without outputting the insns) and call to the library function
2264 with the stabilized argument list. */
2265 if (result == 0)
2266 {
2267 end_sequence ();
2268 return expand_call (exp, target, target == const0_rtx);
2269 }
2270
2271 /* Output the entire sequence. */
2272 insns = get_insns ();
2273 end_sequence ();
2274 emit_insn (insns);
2275
2276 return result;
2277 }
2278
2279 /* Expand a call to the builtin sin and cos math functions.
2280 Return NULL_RTX if a normal call should be emitted rather than expanding the
2281 function in-line. EXP is the expression that is a call to the builtin
2282 function; if convenient, the result should be placed in TARGET.
2283 SUBTARGET may be used as the target for computing one of EXP's
2284 operands. */
2285
2286 static rtx
2287 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2288 {
2289 optab builtin_optab;
2290 rtx op0;
2291 rtx_insn *insns;
2292 tree fndecl = get_callee_fndecl (exp);
2293 machine_mode mode;
2294 tree arg;
2295
2296 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2297 return NULL_RTX;
2298
2299 arg = CALL_EXPR_ARG (exp, 0);
2300
2301 switch (DECL_FUNCTION_CODE (fndecl))
2302 {
2303 CASE_FLT_FN (BUILT_IN_SIN):
2304 CASE_FLT_FN (BUILT_IN_COS):
2305 builtin_optab = sincos_optab; break;
2306 default:
2307 gcc_unreachable ();
2308 }
2309
2310 /* Make a suitable register to place result in. */
2311 mode = TYPE_MODE (TREE_TYPE (exp));
2312
2313 /* Check if sincos insn is available, otherwise fallback
2314 to sin or cos insn. */
2315 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2316 switch (DECL_FUNCTION_CODE (fndecl))
2317 {
2318 CASE_FLT_FN (BUILT_IN_SIN):
2319 builtin_optab = sin_optab; break;
2320 CASE_FLT_FN (BUILT_IN_COS):
2321 builtin_optab = cos_optab; break;
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 /* Before working hard, check whether the instruction is available. */
2327 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2328 {
2329 rtx result = gen_reg_rtx (mode);
2330
2331 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2332 need to expand the argument again. This way, we will not perform
2333 side-effects more the once. */
2334 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2335
2336 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2337
2338 start_sequence ();
2339
2340 /* Compute into RESULT.
2341 Set RESULT to wherever the result comes back. */
2342 if (builtin_optab == sincos_optab)
2343 {
2344 int ok;
2345
2346 switch (DECL_FUNCTION_CODE (fndecl))
2347 {
2348 CASE_FLT_FN (BUILT_IN_SIN):
2349 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2350 break;
2351 CASE_FLT_FN (BUILT_IN_COS):
2352 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2353 break;
2354 default:
2355 gcc_unreachable ();
2356 }
2357 gcc_assert (ok);
2358 }
2359 else
2360 result = expand_unop (mode, builtin_optab, op0, result, 0);
2361
2362 if (result != 0)
2363 {
2364 /* Output the entire sequence. */
2365 insns = get_insns ();
2366 end_sequence ();
2367 emit_insn (insns);
2368 return result;
2369 }
2370
2371 /* If we were unable to expand via the builtin, stop the sequence
2372 (without outputting the insns) and call to the library function
2373 with the stabilized argument list. */
2374 end_sequence ();
2375 }
2376
2377 return expand_call (exp, target, target == const0_rtx);
2378 }
2379
2380 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2381 return an RTL instruction code that implements the functionality.
2382 If that isn't possible or available return CODE_FOR_nothing. */
2383
2384 static enum insn_code
2385 interclass_mathfn_icode (tree arg, tree fndecl)
2386 {
2387 bool errno_set = false;
2388 optab builtin_optab = unknown_optab;
2389 machine_mode mode;
2390
2391 switch (DECL_FUNCTION_CODE (fndecl))
2392 {
2393 CASE_FLT_FN (BUILT_IN_ILOGB):
2394 errno_set = true; builtin_optab = ilogb_optab; break;
2395 CASE_FLT_FN (BUILT_IN_ISINF):
2396 builtin_optab = isinf_optab; break;
2397 case BUILT_IN_ISNORMAL:
2398 case BUILT_IN_ISFINITE:
2399 CASE_FLT_FN (BUILT_IN_FINITE):
2400 case BUILT_IN_FINITED32:
2401 case BUILT_IN_FINITED64:
2402 case BUILT_IN_FINITED128:
2403 case BUILT_IN_ISINFD32:
2404 case BUILT_IN_ISINFD64:
2405 case BUILT_IN_ISINFD128:
2406 /* These builtins have no optabs (yet). */
2407 break;
2408 default:
2409 gcc_unreachable ();
2410 }
2411
2412 /* There's no easy way to detect the case we need to set EDOM. */
2413 if (flag_errno_math && errno_set)
2414 return CODE_FOR_nothing;
2415
2416 /* Optab mode depends on the mode of the input argument. */
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2418
2419 if (builtin_optab)
2420 return optab_handler (builtin_optab, mode);
2421 return CODE_FOR_nothing;
2422 }
2423
2424 /* Expand a call to one of the builtin math functions that operate on
2425 floating point argument and output an integer result (ilogb, isinf,
2426 isnan, etc).
2427 Return 0 if a normal call should be emitted rather than expanding the
2428 function in-line. EXP is the expression that is a call to the builtin
2429 function; if convenient, the result should be placed in TARGET. */
2430
2431 static rtx
2432 expand_builtin_interclass_mathfn (tree exp, rtx target)
2433 {
2434 enum insn_code icode = CODE_FOR_nothing;
2435 rtx op0;
2436 tree fndecl = get_callee_fndecl (exp);
2437 machine_mode mode;
2438 tree arg;
2439
2440 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2441 return NULL_RTX;
2442
2443 arg = CALL_EXPR_ARG (exp, 0);
2444 icode = interclass_mathfn_icode (arg, fndecl);
2445 mode = TYPE_MODE (TREE_TYPE (arg));
2446
2447 if (icode != CODE_FOR_nothing)
2448 {
2449 struct expand_operand ops[1];
2450 rtx_insn *last = get_last_insn ();
2451 tree orig_arg = arg;
2452
2453 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2454 need to expand the argument again. This way, we will not perform
2455 side-effects more the once. */
2456 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2457
2458 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2459
2460 if (mode != GET_MODE (op0))
2461 op0 = convert_to_mode (mode, op0, 0);
2462
2463 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2464 if (maybe_legitimize_operands (icode, 0, 1, ops)
2465 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2466 return ops[0].value;
2467
2468 delete_insns_since (last);
2469 CALL_EXPR_ARG (exp, 0) = orig_arg;
2470 }
2471
2472 return NULL_RTX;
2473 }
2474
2475 /* Expand a call to the builtin sincos math function.
2476 Return NULL_RTX if a normal call should be emitted rather than expanding the
2477 function in-line. EXP is the expression that is a call to the builtin
2478 function. */
2479
2480 static rtx
2481 expand_builtin_sincos (tree exp)
2482 {
2483 rtx op0, op1, op2, target1, target2;
2484 machine_mode mode;
2485 tree arg, sinp, cosp;
2486 int result;
2487 location_t loc = EXPR_LOCATION (exp);
2488 tree alias_type, alias_off;
2489
2490 if (!validate_arglist (exp, REAL_TYPE,
2491 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2492 return NULL_RTX;
2493
2494 arg = CALL_EXPR_ARG (exp, 0);
2495 sinp = CALL_EXPR_ARG (exp, 1);
2496 cosp = CALL_EXPR_ARG (exp, 2);
2497
2498 /* Make a suitable register to place result in. */
2499 mode = TYPE_MODE (TREE_TYPE (arg));
2500
2501 /* Check if sincos insn is available, otherwise emit the call. */
2502 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2503 return NULL_RTX;
2504
2505 target1 = gen_reg_rtx (mode);
2506 target2 = gen_reg_rtx (mode);
2507
2508 op0 = expand_normal (arg);
2509 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2510 alias_off = build_int_cst (alias_type, 0);
2511 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2512 sinp, alias_off));
2513 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2514 cosp, alias_off));
2515
2516 /* Compute into target1 and target2.
2517 Set TARGET to wherever the result comes back. */
2518 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2519 gcc_assert (result);
2520
2521 /* Move target1 and target2 to the memory locations indicated
2522 by op1 and op2. */
2523 emit_move_insn (op1, target1);
2524 emit_move_insn (op2, target2);
2525
2526 return const0_rtx;
2527 }
2528
2529 /* Expand a call to the internal cexpi builtin to the sincos math function.
2530 EXP is the expression that is a call to the builtin function; if convenient,
2531 the result should be placed in TARGET. */
2532
2533 static rtx
2534 expand_builtin_cexpi (tree exp, rtx target)
2535 {
2536 tree fndecl = get_callee_fndecl (exp);
2537 tree arg, type;
2538 machine_mode mode;
2539 rtx op0, op1, op2;
2540 location_t loc = EXPR_LOCATION (exp);
2541
2542 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2543 return NULL_RTX;
2544
2545 arg = CALL_EXPR_ARG (exp, 0);
2546 type = TREE_TYPE (arg);
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2548
2549 /* Try expanding via a sincos optab, fall back to emitting a libcall
2550 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2551 is only generated from sincos, cexp or if we have either of them. */
2552 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2553 {
2554 op1 = gen_reg_rtx (mode);
2555 op2 = gen_reg_rtx (mode);
2556
2557 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2558
2559 /* Compute into op1 and op2. */
2560 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2561 }
2562 else if (targetm.libc_has_function (function_sincos))
2563 {
2564 tree call, fn = NULL_TREE;
2565 tree top1, top2;
2566 rtx op1a, op2a;
2567
2568 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2569 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2570 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2571 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2572 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2573 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2574 else
2575 gcc_unreachable ();
2576
2577 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2578 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2579 op1a = copy_addr_to_reg (XEXP (op1, 0));
2580 op2a = copy_addr_to_reg (XEXP (op2, 0));
2581 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2582 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2583
2584 /* Make sure not to fold the sincos call again. */
2585 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2586 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2587 call, 3, arg, top1, top2));
2588 }
2589 else
2590 {
2591 tree call, fn = NULL_TREE, narg;
2592 tree ctype = build_complex_type (type);
2593
2594 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2595 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2596 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2597 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2598 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2599 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2600 else
2601 gcc_unreachable ();
2602
2603 /* If we don't have a decl for cexp create one. This is the
2604 friendliest fallback if the user calls __builtin_cexpi
2605 without full target C99 function support. */
2606 if (fn == NULL_TREE)
2607 {
2608 tree fntype;
2609 const char *name = NULL;
2610
2611 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2612 name = "cexpf";
2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2614 name = "cexp";
2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2616 name = "cexpl";
2617
2618 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2619 fn = build_fn_decl (name, fntype);
2620 }
2621
2622 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2623 build_real (type, dconst0), arg);
2624
2625 /* Make sure not to fold the cexp call again. */
2626 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2627 return expand_expr (build_call_nary (ctype, call, 1, narg),
2628 target, VOIDmode, EXPAND_NORMAL);
2629 }
2630
2631 /* Now build the proper return type. */
2632 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2633 make_tree (TREE_TYPE (arg), op2),
2634 make_tree (TREE_TYPE (arg), op1)),
2635 target, VOIDmode, EXPAND_NORMAL);
2636 }
2637
2638 /* Conveniently construct a function call expression. FNDECL names the
2639 function to be called, N is the number of arguments, and the "..."
2640 parameters are the argument expressions. Unlike build_call_exr
2641 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2642
2643 static tree
2644 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2645 {
2646 va_list ap;
2647 tree fntype = TREE_TYPE (fndecl);
2648 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2649
2650 va_start (ap, n);
2651 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2652 va_end (ap);
2653 SET_EXPR_LOCATION (fn, loc);
2654 return fn;
2655 }
2656
2657 /* Expand a call to one of the builtin rounding functions gcc defines
2658 as an extension (lfloor and lceil). As these are gcc extensions we
2659 do not need to worry about setting errno to EDOM.
2660 If expanding via optab fails, lower expression to (int)(floor(x)).
2661 EXP is the expression that is a call to the builtin function;
2662 if convenient, the result should be placed in TARGET. */
2663
2664 static rtx
2665 expand_builtin_int_roundingfn (tree exp, rtx target)
2666 {
2667 convert_optab builtin_optab;
2668 rtx op0, tmp;
2669 rtx_insn *insns;
2670 tree fndecl = get_callee_fndecl (exp);
2671 enum built_in_function fallback_fn;
2672 tree fallback_fndecl;
2673 machine_mode mode;
2674 tree arg;
2675
2676 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2677 gcc_unreachable ();
2678
2679 arg = CALL_EXPR_ARG (exp, 0);
2680
2681 switch (DECL_FUNCTION_CODE (fndecl))
2682 {
2683 CASE_FLT_FN (BUILT_IN_ICEIL):
2684 CASE_FLT_FN (BUILT_IN_LCEIL):
2685 CASE_FLT_FN (BUILT_IN_LLCEIL):
2686 builtin_optab = lceil_optab;
2687 fallback_fn = BUILT_IN_CEIL;
2688 break;
2689
2690 CASE_FLT_FN (BUILT_IN_IFLOOR):
2691 CASE_FLT_FN (BUILT_IN_LFLOOR):
2692 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2693 builtin_optab = lfloor_optab;
2694 fallback_fn = BUILT_IN_FLOOR;
2695 break;
2696
2697 default:
2698 gcc_unreachable ();
2699 }
2700
2701 /* Make a suitable register to place result in. */
2702 mode = TYPE_MODE (TREE_TYPE (exp));
2703
2704 target = gen_reg_rtx (mode);
2705
2706 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2707 need to expand the argument again. This way, we will not perform
2708 side-effects more the once. */
2709 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2710
2711 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2712
2713 start_sequence ();
2714
2715 /* Compute into TARGET. */
2716 if (expand_sfix_optab (target, op0, builtin_optab))
2717 {
2718 /* Output the entire sequence. */
2719 insns = get_insns ();
2720 end_sequence ();
2721 emit_insn (insns);
2722 return target;
2723 }
2724
2725 /* If we were unable to expand via the builtin, stop the sequence
2726 (without outputting the insns). */
2727 end_sequence ();
2728
2729 /* Fall back to floating point rounding optab. */
2730 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2731
2732 /* For non-C99 targets we may end up without a fallback fndecl here
2733 if the user called __builtin_lfloor directly. In this case emit
2734 a call to the floor/ceil variants nevertheless. This should result
2735 in the best user experience for not full C99 targets. */
2736 if (fallback_fndecl == NULL_TREE)
2737 {
2738 tree fntype;
2739 const char *name = NULL;
2740
2741 switch (DECL_FUNCTION_CODE (fndecl))
2742 {
2743 case BUILT_IN_ICEIL:
2744 case BUILT_IN_LCEIL:
2745 case BUILT_IN_LLCEIL:
2746 name = "ceil";
2747 break;
2748 case BUILT_IN_ICEILF:
2749 case BUILT_IN_LCEILF:
2750 case BUILT_IN_LLCEILF:
2751 name = "ceilf";
2752 break;
2753 case BUILT_IN_ICEILL:
2754 case BUILT_IN_LCEILL:
2755 case BUILT_IN_LLCEILL:
2756 name = "ceill";
2757 break;
2758 case BUILT_IN_IFLOOR:
2759 case BUILT_IN_LFLOOR:
2760 case BUILT_IN_LLFLOOR:
2761 name = "floor";
2762 break;
2763 case BUILT_IN_IFLOORF:
2764 case BUILT_IN_LFLOORF:
2765 case BUILT_IN_LLFLOORF:
2766 name = "floorf";
2767 break;
2768 case BUILT_IN_IFLOORL:
2769 case BUILT_IN_LFLOORL:
2770 case BUILT_IN_LLFLOORL:
2771 name = "floorl";
2772 break;
2773 default:
2774 gcc_unreachable ();
2775 }
2776
2777 fntype = build_function_type_list (TREE_TYPE (arg),
2778 TREE_TYPE (arg), NULL_TREE);
2779 fallback_fndecl = build_fn_decl (name, fntype);
2780 }
2781
2782 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2783
2784 tmp = expand_normal (exp);
2785 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2786
2787 /* Truncate the result of floating point optab to integer
2788 via expand_fix (). */
2789 target = gen_reg_rtx (mode);
2790 expand_fix (target, tmp, 0);
2791
2792 return target;
2793 }
2794
2795 /* Expand a call to one of the builtin math functions doing integer
2796 conversion (lrint).
2797 Return 0 if a normal call should be emitted rather than expanding the
2798 function in-line. EXP is the expression that is a call to the builtin
2799 function; if convenient, the result should be placed in TARGET. */
2800
2801 static rtx
2802 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2803 {
2804 convert_optab builtin_optab;
2805 rtx op0;
2806 rtx_insn *insns;
2807 tree fndecl = get_callee_fndecl (exp);
2808 tree arg;
2809 machine_mode mode;
2810 enum built_in_function fallback_fn = BUILT_IN_NONE;
2811
2812 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2813 gcc_unreachable ();
2814
2815 arg = CALL_EXPR_ARG (exp, 0);
2816
2817 switch (DECL_FUNCTION_CODE (fndecl))
2818 {
2819 CASE_FLT_FN (BUILT_IN_IRINT):
2820 fallback_fn = BUILT_IN_LRINT;
2821 /* FALLTHRU */
2822 CASE_FLT_FN (BUILT_IN_LRINT):
2823 CASE_FLT_FN (BUILT_IN_LLRINT):
2824 builtin_optab = lrint_optab;
2825 break;
2826
2827 CASE_FLT_FN (BUILT_IN_IROUND):
2828 fallback_fn = BUILT_IN_LROUND;
2829 /* FALLTHRU */
2830 CASE_FLT_FN (BUILT_IN_LROUND):
2831 CASE_FLT_FN (BUILT_IN_LLROUND):
2832 builtin_optab = lround_optab;
2833 break;
2834
2835 default:
2836 gcc_unreachable ();
2837 }
2838
2839 /* There's no easy way to detect the case we need to set EDOM. */
2840 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2841 return NULL_RTX;
2842
2843 /* Make a suitable register to place result in. */
2844 mode = TYPE_MODE (TREE_TYPE (exp));
2845
2846 /* There's no easy way to detect the case we need to set EDOM. */
2847 if (!flag_errno_math)
2848 {
2849 rtx result = gen_reg_rtx (mode);
2850
2851 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2852 need to expand the argument again. This way, we will not perform
2853 side-effects more the once. */
2854 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2855
2856 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2857
2858 start_sequence ();
2859
2860 if (expand_sfix_optab (result, op0, builtin_optab))
2861 {
2862 /* Output the entire sequence. */
2863 insns = get_insns ();
2864 end_sequence ();
2865 emit_insn (insns);
2866 return result;
2867 }
2868
2869 /* If we were unable to expand via the builtin, stop the sequence
2870 (without outputting the insns) and call to the library function
2871 with the stabilized argument list. */
2872 end_sequence ();
2873 }
2874
2875 if (fallback_fn != BUILT_IN_NONE)
2876 {
2877 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2878 targets, (int) round (x) should never be transformed into
2879 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2880 a call to lround in the hope that the target provides at least some
2881 C99 functions. This should result in the best user experience for
2882 not full C99 targets. */
2883 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2884 fallback_fn, 0);
2885
2886 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2887 fallback_fndecl, 1, arg);
2888
2889 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2890 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2891 return convert_to_mode (mode, target, 0);
2892 }
2893
2894 return expand_call (exp, target, target == const0_rtx);
2895 }
2896
2897 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2898 a normal call should be emitted rather than expanding the function
2899 in-line. EXP is the expression that is a call to the builtin
2900 function; if convenient, the result should be placed in TARGET. */
2901
2902 static rtx
2903 expand_builtin_powi (tree exp, rtx target)
2904 {
2905 tree arg0, arg1;
2906 rtx op0, op1;
2907 machine_mode mode;
2908 machine_mode mode2;
2909
2910 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2911 return NULL_RTX;
2912
2913 arg0 = CALL_EXPR_ARG (exp, 0);
2914 arg1 = CALL_EXPR_ARG (exp, 1);
2915 mode = TYPE_MODE (TREE_TYPE (exp));
2916
2917 /* Emit a libcall to libgcc. */
2918
2919 /* Mode of the 2nd argument must match that of an int. */
2920 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2921
2922 if (target == NULL_RTX)
2923 target = gen_reg_rtx (mode);
2924
2925 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2926 if (GET_MODE (op0) != mode)
2927 op0 = convert_to_mode (mode, op0, 0);
2928 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2929 if (GET_MODE (op1) != mode2)
2930 op1 = convert_to_mode (mode2, op1, 0);
2931
2932 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2933 target, LCT_CONST, mode, 2,
2934 op0, mode, op1, mode2);
2935
2936 return target;
2937 }
2938
2939 /* Expand expression EXP which is a call to the strlen builtin. Return
2940 NULL_RTX if we failed the caller should emit a normal call, otherwise
2941 try to get the result in TARGET, if convenient. */
2942
2943 static rtx
2944 expand_builtin_strlen (tree exp, rtx target,
2945 machine_mode target_mode)
2946 {
2947 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2948 return NULL_RTX;
2949 else
2950 {
2951 struct expand_operand ops[4];
2952 rtx pat;
2953 tree len;
2954 tree src = CALL_EXPR_ARG (exp, 0);
2955 rtx src_reg;
2956 rtx_insn *before_strlen;
2957 machine_mode insn_mode = target_mode;
2958 enum insn_code icode = CODE_FOR_nothing;
2959 unsigned int align;
2960
2961 /* If the length can be computed at compile-time, return it. */
2962 len = c_strlen (src, 0);
2963 if (len)
2964 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2965
2966 /* If the length can be computed at compile-time and is constant
2967 integer, but there are side-effects in src, evaluate
2968 src for side-effects, then return len.
2969 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2970 can be optimized into: i++; x = 3; */
2971 len = c_strlen (src, 1);
2972 if (len && TREE_CODE (len) == INTEGER_CST)
2973 {
2974 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2975 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2976 }
2977
2978 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2979
2980 /* If SRC is not a pointer type, don't do this operation inline. */
2981 if (align == 0)
2982 return NULL_RTX;
2983
2984 /* Bail out if we can't compute strlen in the right mode. */
2985 while (insn_mode != VOIDmode)
2986 {
2987 icode = optab_handler (strlen_optab, insn_mode);
2988 if (icode != CODE_FOR_nothing)
2989 break;
2990
2991 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2992 }
2993 if (insn_mode == VOIDmode)
2994 return NULL_RTX;
2995
2996 /* Make a place to hold the source address. We will not expand
2997 the actual source until we are sure that the expansion will
2998 not fail -- there are trees that cannot be expanded twice. */
2999 src_reg = gen_reg_rtx (Pmode);
3000
3001 /* Mark the beginning of the strlen sequence so we can emit the
3002 source operand later. */
3003 before_strlen = get_last_insn ();
3004
3005 create_output_operand (&ops[0], target, insn_mode);
3006 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3007 create_integer_operand (&ops[2], 0);
3008 create_integer_operand (&ops[3], align);
3009 if (!maybe_expand_insn (icode, 4, ops))
3010 return NULL_RTX;
3011
3012 /* Now that we are assured of success, expand the source. */
3013 start_sequence ();
3014 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3015 if (pat != src_reg)
3016 {
3017 #ifdef POINTERS_EXTEND_UNSIGNED
3018 if (GET_MODE (pat) != Pmode)
3019 pat = convert_to_mode (Pmode, pat,
3020 POINTERS_EXTEND_UNSIGNED);
3021 #endif
3022 emit_move_insn (src_reg, pat);
3023 }
3024 pat = get_insns ();
3025 end_sequence ();
3026
3027 if (before_strlen)
3028 emit_insn_after (pat, before_strlen);
3029 else
3030 emit_insn_before (pat, get_insns ());
3031
3032 /* Return the value in the proper mode for this function. */
3033 if (GET_MODE (ops[0].value) == target_mode)
3034 target = ops[0].value;
3035 else if (target != 0)
3036 convert_move (target, ops[0].value, 0);
3037 else
3038 target = convert_to_mode (target_mode, ops[0].value, 0);
3039
3040 return target;
3041 }
3042 }
3043
3044 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3045 bytes from constant string DATA + OFFSET and return it as target
3046 constant. */
3047
3048 static rtx
3049 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3050 machine_mode mode)
3051 {
3052 const char *str = (const char *) data;
3053
3054 gcc_assert (offset >= 0
3055 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3056 <= strlen (str) + 1));
3057
3058 return c_readstr (str + offset, mode);
3059 }
3060
3061 /* LEN specify length of the block of memcpy/memset operation.
3062 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3063 In some cases we can make very likely guess on max size, then we
3064 set it into PROBABLE_MAX_SIZE. */
3065
3066 static void
3067 determine_block_size (tree len, rtx len_rtx,
3068 unsigned HOST_WIDE_INT *min_size,
3069 unsigned HOST_WIDE_INT *max_size,
3070 unsigned HOST_WIDE_INT *probable_max_size)
3071 {
3072 if (CONST_INT_P (len_rtx))
3073 {
3074 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3075 return;
3076 }
3077 else
3078 {
3079 wide_int min, max;
3080 enum value_range_type range_type = VR_UNDEFINED;
3081
3082 /* Determine bounds from the type. */
3083 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3084 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3085 else
3086 *min_size = 0;
3087 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3088 *probable_max_size = *max_size
3089 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3090 else
3091 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3092
3093 if (TREE_CODE (len) == SSA_NAME)
3094 range_type = get_range_info (len, &min, &max);
3095 if (range_type == VR_RANGE)
3096 {
3097 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3098 *min_size = min.to_uhwi ();
3099 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3100 *probable_max_size = *max_size = max.to_uhwi ();
3101 }
3102 else if (range_type == VR_ANTI_RANGE)
3103 {
3104 /* Anti range 0...N lets us to determine minimal size to N+1. */
3105 if (min == 0)
3106 {
3107 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3108 *min_size = max.to_uhwi () + 1;
3109 }
3110 /* Code like
3111
3112 int n;
3113 if (n < 100)
3114 memcpy (a, b, n)
3115
3116 Produce anti range allowing negative values of N. We still
3117 can use the information and make a guess that N is not negative.
3118 */
3119 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3120 *probable_max_size = min.to_uhwi () - 1;
3121 }
3122 }
3123 gcc_checking_assert (*max_size <=
3124 (unsigned HOST_WIDE_INT)
3125 GET_MODE_MASK (GET_MODE (len_rtx)));
3126 }
3127
3128 /* Helper function to do the actual work for expand_builtin_memcpy. */
3129
3130 static rtx
3131 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3132 {
3133 const char *src_str;
3134 unsigned int src_align = get_pointer_alignment (src);
3135 unsigned int dest_align = get_pointer_alignment (dest);
3136 rtx dest_mem, src_mem, dest_addr, len_rtx;
3137 HOST_WIDE_INT expected_size = -1;
3138 unsigned int expected_align = 0;
3139 unsigned HOST_WIDE_INT min_size;
3140 unsigned HOST_WIDE_INT max_size;
3141 unsigned HOST_WIDE_INT probable_max_size;
3142
3143 /* If DEST is not a pointer type, call the normal function. */
3144 if (dest_align == 0)
3145 return NULL_RTX;
3146
3147 /* If either SRC is not a pointer type, don't do this
3148 operation in-line. */
3149 if (src_align == 0)
3150 return NULL_RTX;
3151
3152 if (currently_expanding_gimple_stmt)
3153 stringop_block_profile (currently_expanding_gimple_stmt,
3154 &expected_align, &expected_size);
3155
3156 if (expected_align < dest_align)
3157 expected_align = dest_align;
3158 dest_mem = get_memory_rtx (dest, len);
3159 set_mem_align (dest_mem, dest_align);
3160 len_rtx = expand_normal (len);
3161 determine_block_size (len, len_rtx, &min_size, &max_size,
3162 &probable_max_size);
3163 src_str = c_getstr (src);
3164
3165 /* If SRC is a string constant and block move would be done
3166 by pieces, we can avoid loading the string from memory
3167 and only stored the computed constants. */
3168 if (src_str
3169 && CONST_INT_P (len_rtx)
3170 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3171 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3172 CONST_CAST (char *, src_str),
3173 dest_align, false))
3174 {
3175 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3176 builtin_memcpy_read_str,
3177 CONST_CAST (char *, src_str),
3178 dest_align, false, 0);
3179 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3180 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3181 return dest_mem;
3182 }
3183
3184 src_mem = get_memory_rtx (src, len);
3185 set_mem_align (src_mem, src_align);
3186
3187 /* Copy word part most expediently. */
3188 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3189 CALL_EXPR_TAILCALL (exp)
3190 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3191 expected_align, expected_size,
3192 min_size, max_size, probable_max_size);
3193
3194 if (dest_addr == 0)
3195 {
3196 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3197 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3198 }
3199
3200 return dest_addr;
3201 }
3202
3203 /* Expand a call EXP to the memcpy builtin.
3204 Return NULL_RTX if we failed, the caller should emit a normal call,
3205 otherwise try to get the result in TARGET, if convenient (and in
3206 mode MODE if that's convenient). */
3207
3208 static rtx
3209 expand_builtin_memcpy (tree exp, rtx target)
3210 {
3211 if (!validate_arglist (exp,
3212 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3213 return NULL_RTX;
3214 else
3215 {
3216 tree dest = CALL_EXPR_ARG (exp, 0);
3217 tree src = CALL_EXPR_ARG (exp, 1);
3218 tree len = CALL_EXPR_ARG (exp, 2);
3219 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3220 }
3221 }
3222
3223 /* Expand an instrumented call EXP to the memcpy builtin.
3224 Return NULL_RTX if we failed, the caller should emit a normal call,
3225 otherwise try to get the result in TARGET, if convenient (and in
3226 mode MODE if that's convenient). */
3227
3228 static rtx
3229 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3230 {
3231 if (!validate_arglist (exp,
3232 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3233 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3234 INTEGER_TYPE, VOID_TYPE))
3235 return NULL_RTX;
3236 else
3237 {
3238 tree dest = CALL_EXPR_ARG (exp, 0);
3239 tree src = CALL_EXPR_ARG (exp, 2);
3240 tree len = CALL_EXPR_ARG (exp, 4);
3241 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3242
3243 /* Return src bounds with the result. */
3244 if (res)
3245 {
3246 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3247 expand_normal (CALL_EXPR_ARG (exp, 1)));
3248 res = chkp_join_splitted_slot (res, bnd);
3249 }
3250 return res;
3251 }
3252 }
3253
3254 /* Expand a call EXP to the mempcpy builtin.
3255 Return NULL_RTX if we failed; the caller should emit a normal call,
3256 otherwise try to get the result in TARGET, if convenient (and in
3257 mode MODE if that's convenient). If ENDP is 0 return the
3258 destination pointer, if ENDP is 1 return the end pointer ala
3259 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3260 stpcpy. */
3261
3262 static rtx
3263 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3264 {
3265 if (!validate_arglist (exp,
3266 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3267 return NULL_RTX;
3268 else
3269 {
3270 tree dest = CALL_EXPR_ARG (exp, 0);
3271 tree src = CALL_EXPR_ARG (exp, 1);
3272 tree len = CALL_EXPR_ARG (exp, 2);
3273 return expand_builtin_mempcpy_args (dest, src, len,
3274 target, mode, /*endp=*/ 1,
3275 exp);
3276 }
3277 }
3278
3279 /* Expand an instrumented call EXP to the mempcpy builtin.
3280 Return NULL_RTX if we failed, the caller should emit a normal call,
3281 otherwise try to get the result in TARGET, if convenient (and in
3282 mode MODE if that's convenient). */
3283
3284 static rtx
3285 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3286 {
3287 if (!validate_arglist (exp,
3288 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3289 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3290 INTEGER_TYPE, VOID_TYPE))
3291 return NULL_RTX;
3292 else
3293 {
3294 tree dest = CALL_EXPR_ARG (exp, 0);
3295 tree src = CALL_EXPR_ARG (exp, 2);
3296 tree len = CALL_EXPR_ARG (exp, 4);
3297 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3298 mode, 1, exp);
3299
3300 /* Return src bounds with the result. */
3301 if (res)
3302 {
3303 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3304 expand_normal (CALL_EXPR_ARG (exp, 1)));
3305 res = chkp_join_splitted_slot (res, bnd);
3306 }
3307 return res;
3308 }
3309 }
3310
3311 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3312 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3313 so that this can also be called without constructing an actual CALL_EXPR.
3314 The other arguments and return value are the same as for
3315 expand_builtin_mempcpy. */
3316
3317 static rtx
3318 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3319 rtx target, machine_mode mode, int endp,
3320 tree orig_exp)
3321 {
3322 tree fndecl = get_callee_fndecl (orig_exp);
3323
3324 /* If return value is ignored, transform mempcpy into memcpy. */
3325 if (target == const0_rtx
3326 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3327 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3328 {
3329 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3330 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3331 dest, src, len);
3332 return expand_expr (result, target, mode, EXPAND_NORMAL);
3333 }
3334 else if (target == const0_rtx
3335 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3336 {
3337 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3338 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3339 dest, src, len);
3340 return expand_expr (result, target, mode, EXPAND_NORMAL);
3341 }
3342 else
3343 {
3344 const char *src_str;
3345 unsigned int src_align = get_pointer_alignment (src);
3346 unsigned int dest_align = get_pointer_alignment (dest);
3347 rtx dest_mem, src_mem, len_rtx;
3348
3349 /* If either SRC or DEST is not a pointer type, don't do this
3350 operation in-line. */
3351 if (dest_align == 0 || src_align == 0)
3352 return NULL_RTX;
3353
3354 /* If LEN is not constant, call the normal function. */
3355 if (! tree_fits_uhwi_p (len))
3356 return NULL_RTX;
3357
3358 len_rtx = expand_normal (len);
3359 src_str = c_getstr (src);
3360
3361 /* If SRC is a string constant and block move would be done
3362 by pieces, we can avoid loading the string from memory
3363 and only stored the computed constants. */
3364 if (src_str
3365 && CONST_INT_P (len_rtx)
3366 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3367 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3368 CONST_CAST (char *, src_str),
3369 dest_align, false))
3370 {
3371 dest_mem = get_memory_rtx (dest, len);
3372 set_mem_align (dest_mem, dest_align);
3373 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3374 builtin_memcpy_read_str,
3375 CONST_CAST (char *, src_str),
3376 dest_align, false, endp);
3377 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3378 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3379 return dest_mem;
3380 }
3381
3382 if (CONST_INT_P (len_rtx)
3383 && can_move_by_pieces (INTVAL (len_rtx),
3384 MIN (dest_align, src_align)))
3385 {
3386 dest_mem = get_memory_rtx (dest, len);
3387 set_mem_align (dest_mem, dest_align);
3388 src_mem = get_memory_rtx (src, len);
3389 set_mem_align (src_mem, src_align);
3390 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3391 MIN (dest_align, src_align), endp);
3392 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3393 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3394 return dest_mem;
3395 }
3396
3397 return NULL_RTX;
3398 }
3399 }
3400
3401 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3402 we failed, the caller should emit a normal call, otherwise try to
3403 get the result in TARGET, if convenient. If ENDP is 0 return the
3404 destination pointer, if ENDP is 1 return the end pointer ala
3405 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3406 stpcpy. */
3407
3408 static rtx
3409 expand_movstr (tree dest, tree src, rtx target, int endp)
3410 {
3411 struct expand_operand ops[3];
3412 rtx dest_mem;
3413 rtx src_mem;
3414
3415 if (!targetm.have_movstr ())
3416 return NULL_RTX;
3417
3418 dest_mem = get_memory_rtx (dest, NULL);
3419 src_mem = get_memory_rtx (src, NULL);
3420 if (!endp)
3421 {
3422 target = force_reg (Pmode, XEXP (dest_mem, 0));
3423 dest_mem = replace_equiv_address (dest_mem, target);
3424 }
3425
3426 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3427 create_fixed_operand (&ops[1], dest_mem);
3428 create_fixed_operand (&ops[2], src_mem);
3429 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3430 return NULL_RTX;
3431
3432 if (endp && target != const0_rtx)
3433 {
3434 target = ops[0].value;
3435 /* movstr is supposed to set end to the address of the NUL
3436 terminator. If the caller requested a mempcpy-like return value,
3437 adjust it. */
3438 if (endp == 1)
3439 {
3440 rtx tem = plus_constant (GET_MODE (target),
3441 gen_lowpart (GET_MODE (target), target), 1);
3442 emit_move_insn (target, force_operand (tem, NULL_RTX));
3443 }
3444 }
3445 return target;
3446 }
3447
3448 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3449 NULL_RTX if we failed the caller should emit a normal call, otherwise
3450 try to get the result in TARGET, if convenient (and in mode MODE if that's
3451 convenient). */
3452
3453 static rtx
3454 expand_builtin_strcpy (tree exp, rtx target)
3455 {
3456 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3457 {
3458 tree dest = CALL_EXPR_ARG (exp, 0);
3459 tree src = CALL_EXPR_ARG (exp, 1);
3460 return expand_builtin_strcpy_args (dest, src, target);
3461 }
3462 return NULL_RTX;
3463 }
3464
3465 /* Helper function to do the actual work for expand_builtin_strcpy. The
3466 arguments to the builtin_strcpy call DEST and SRC are broken out
3467 so that this can also be called without constructing an actual CALL_EXPR.
3468 The other arguments and return value are the same as for
3469 expand_builtin_strcpy. */
3470
3471 static rtx
3472 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3473 {
3474 return expand_movstr (dest, src, target, /*endp=*/0);
3475 }
3476
3477 /* Expand a call EXP to the stpcpy builtin.
3478 Return NULL_RTX if we failed the caller should emit a normal call,
3479 otherwise try to get the result in TARGET, if convenient (and in
3480 mode MODE if that's convenient). */
3481
3482 static rtx
3483 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3484 {
3485 tree dst, src;
3486 location_t loc = EXPR_LOCATION (exp);
3487
3488 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3489 return NULL_RTX;
3490
3491 dst = CALL_EXPR_ARG (exp, 0);
3492 src = CALL_EXPR_ARG (exp, 1);
3493
3494 /* If return value is ignored, transform stpcpy into strcpy. */
3495 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3496 {
3497 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3498 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3499 return expand_expr (result, target, mode, EXPAND_NORMAL);
3500 }
3501 else
3502 {
3503 tree len, lenp1;
3504 rtx ret;
3505
3506 /* Ensure we get an actual string whose length can be evaluated at
3507 compile-time, not an expression containing a string. This is
3508 because the latter will potentially produce pessimized code
3509 when used to produce the return value. */
3510 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3511 return expand_movstr (dst, src, target, /*endp=*/2);
3512
3513 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3514 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3515 target, mode, /*endp=*/2,
3516 exp);
3517
3518 if (ret)
3519 return ret;
3520
3521 if (TREE_CODE (len) == INTEGER_CST)
3522 {
3523 rtx len_rtx = expand_normal (len);
3524
3525 if (CONST_INT_P (len_rtx))
3526 {
3527 ret = expand_builtin_strcpy_args (dst, src, target);
3528
3529 if (ret)
3530 {
3531 if (! target)
3532 {
3533 if (mode != VOIDmode)
3534 target = gen_reg_rtx (mode);
3535 else
3536 target = gen_reg_rtx (GET_MODE (ret));
3537 }
3538 if (GET_MODE (target) != GET_MODE (ret))
3539 ret = gen_lowpart (GET_MODE (target), ret);
3540
3541 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3542 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3543 gcc_assert (ret);
3544
3545 return target;
3546 }
3547 }
3548 }
3549
3550 return expand_movstr (dst, src, target, /*endp=*/2);
3551 }
3552 }
3553
3554 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3555 bytes from constant string DATA + OFFSET and return it as target
3556 constant. */
3557
3558 rtx
3559 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3560 machine_mode mode)
3561 {
3562 const char *str = (const char *) data;
3563
3564 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3565 return const0_rtx;
3566
3567 return c_readstr (str + offset, mode);
3568 }
3569
3570 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3571 NULL_RTX if we failed the caller should emit a normal call. */
3572
3573 static rtx
3574 expand_builtin_strncpy (tree exp, rtx target)
3575 {
3576 location_t loc = EXPR_LOCATION (exp);
3577
3578 if (validate_arglist (exp,
3579 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3580 {
3581 tree dest = CALL_EXPR_ARG (exp, 0);
3582 tree src = CALL_EXPR_ARG (exp, 1);
3583 tree len = CALL_EXPR_ARG (exp, 2);
3584 tree slen = c_strlen (src, 1);
3585
3586 /* We must be passed a constant len and src parameter. */
3587 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3588 return NULL_RTX;
3589
3590 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3591
3592 /* We're required to pad with trailing zeros if the requested
3593 len is greater than strlen(s2)+1. In that case try to
3594 use store_by_pieces, if it fails, punt. */
3595 if (tree_int_cst_lt (slen, len))
3596 {
3597 unsigned int dest_align = get_pointer_alignment (dest);
3598 const char *p = c_getstr (src);
3599 rtx dest_mem;
3600
3601 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3602 || !can_store_by_pieces (tree_to_uhwi (len),
3603 builtin_strncpy_read_str,
3604 CONST_CAST (char *, p),
3605 dest_align, false))
3606 return NULL_RTX;
3607
3608 dest_mem = get_memory_rtx (dest, len);
3609 store_by_pieces (dest_mem, tree_to_uhwi (len),
3610 builtin_strncpy_read_str,
3611 CONST_CAST (char *, p), dest_align, false, 0);
3612 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3613 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3614 return dest_mem;
3615 }
3616 }
3617 return NULL_RTX;
3618 }
3619
3620 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3621 bytes from constant string DATA + OFFSET and return it as target
3622 constant. */
3623
3624 rtx
3625 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3626 machine_mode mode)
3627 {
3628 const char *c = (const char *) data;
3629 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3630
3631 memset (p, *c, GET_MODE_SIZE (mode));
3632
3633 return c_readstr (p, mode);
3634 }
3635
3636 /* Callback routine for store_by_pieces. Return the RTL of a register
3637 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3638 char value given in the RTL register data. For example, if mode is
3639 4 bytes wide, return the RTL for 0x01010101*data. */
3640
3641 static rtx
3642 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3643 machine_mode mode)
3644 {
3645 rtx target, coeff;
3646 size_t size;
3647 char *p;
3648
3649 size = GET_MODE_SIZE (mode);
3650 if (size == 1)
3651 return (rtx) data;
3652
3653 p = XALLOCAVEC (char, size);
3654 memset (p, 1, size);
3655 coeff = c_readstr (p, mode);
3656
3657 target = convert_to_mode (mode, (rtx) data, 1);
3658 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3659 return force_reg (mode, target);
3660 }
3661
3662 /* Expand expression EXP, which is a call to the memset builtin. Return
3663 NULL_RTX if we failed the caller should emit a normal call, otherwise
3664 try to get the result in TARGET, if convenient (and in mode MODE if that's
3665 convenient). */
3666
3667 static rtx
3668 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3669 {
3670 if (!validate_arglist (exp,
3671 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3672 return NULL_RTX;
3673 else
3674 {
3675 tree dest = CALL_EXPR_ARG (exp, 0);
3676 tree val = CALL_EXPR_ARG (exp, 1);
3677 tree len = CALL_EXPR_ARG (exp, 2);
3678 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3679 }
3680 }
3681
3682 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3683 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3684 try to get the result in TARGET, if convenient (and in mode MODE if that's
3685 convenient). */
3686
3687 static rtx
3688 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3689 {
3690 if (!validate_arglist (exp,
3691 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3692 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3693 return NULL_RTX;
3694 else
3695 {
3696 tree dest = CALL_EXPR_ARG (exp, 0);
3697 tree val = CALL_EXPR_ARG (exp, 2);
3698 tree len = CALL_EXPR_ARG (exp, 3);
3699 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3700
3701 /* Return src bounds with the result. */
3702 if (res)
3703 {
3704 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3705 expand_normal (CALL_EXPR_ARG (exp, 1)));
3706 res = chkp_join_splitted_slot (res, bnd);
3707 }
3708 return res;
3709 }
3710 }
3711
3712 /* Helper function to do the actual work for expand_builtin_memset. The
3713 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3714 so that this can also be called without constructing an actual CALL_EXPR.
3715 The other arguments and return value are the same as for
3716 expand_builtin_memset. */
3717
3718 static rtx
3719 expand_builtin_memset_args (tree dest, tree val, tree len,
3720 rtx target, machine_mode mode, tree orig_exp)
3721 {
3722 tree fndecl, fn;
3723 enum built_in_function fcode;
3724 machine_mode val_mode;
3725 char c;
3726 unsigned int dest_align;
3727 rtx dest_mem, dest_addr, len_rtx;
3728 HOST_WIDE_INT expected_size = -1;
3729 unsigned int expected_align = 0;
3730 unsigned HOST_WIDE_INT min_size;
3731 unsigned HOST_WIDE_INT max_size;
3732 unsigned HOST_WIDE_INT probable_max_size;
3733
3734 dest_align = get_pointer_alignment (dest);
3735
3736 /* If DEST is not a pointer type, don't do this operation in-line. */
3737 if (dest_align == 0)
3738 return NULL_RTX;
3739
3740 if (currently_expanding_gimple_stmt)
3741 stringop_block_profile (currently_expanding_gimple_stmt,
3742 &expected_align, &expected_size);
3743
3744 if (expected_align < dest_align)
3745 expected_align = dest_align;
3746
3747 /* If the LEN parameter is zero, return DEST. */
3748 if (integer_zerop (len))
3749 {
3750 /* Evaluate and ignore VAL in case it has side-effects. */
3751 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3752 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3753 }
3754
3755 /* Stabilize the arguments in case we fail. */
3756 dest = builtin_save_expr (dest);
3757 val = builtin_save_expr (val);
3758 len = builtin_save_expr (len);
3759
3760 len_rtx = expand_normal (len);
3761 determine_block_size (len, len_rtx, &min_size, &max_size,
3762 &probable_max_size);
3763 dest_mem = get_memory_rtx (dest, len);
3764 val_mode = TYPE_MODE (unsigned_char_type_node);
3765
3766 if (TREE_CODE (val) != INTEGER_CST)
3767 {
3768 rtx val_rtx;
3769
3770 val_rtx = expand_normal (val);
3771 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3772
3773 /* Assume that we can memset by pieces if we can store
3774 * the coefficients by pieces (in the required modes).
3775 * We can't pass builtin_memset_gen_str as that emits RTL. */
3776 c = 1;
3777 if (tree_fits_uhwi_p (len)
3778 && can_store_by_pieces (tree_to_uhwi (len),
3779 builtin_memset_read_str, &c, dest_align,
3780 true))
3781 {
3782 val_rtx = force_reg (val_mode, val_rtx);
3783 store_by_pieces (dest_mem, tree_to_uhwi (len),
3784 builtin_memset_gen_str, val_rtx, dest_align,
3785 true, 0);
3786 }
3787 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3788 dest_align, expected_align,
3789 expected_size, min_size, max_size,
3790 probable_max_size))
3791 goto do_libcall;
3792
3793 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3794 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3795 return dest_mem;
3796 }
3797
3798 if (target_char_cast (val, &c))
3799 goto do_libcall;
3800
3801 if (c)
3802 {
3803 if (tree_fits_uhwi_p (len)
3804 && can_store_by_pieces (tree_to_uhwi (len),
3805 builtin_memset_read_str, &c, dest_align,
3806 true))
3807 store_by_pieces (dest_mem, tree_to_uhwi (len),
3808 builtin_memset_read_str, &c, dest_align, true, 0);
3809 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3810 gen_int_mode (c, val_mode),
3811 dest_align, expected_align,
3812 expected_size, min_size, max_size,
3813 probable_max_size))
3814 goto do_libcall;
3815
3816 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3817 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3818 return dest_mem;
3819 }
3820
3821 set_mem_align (dest_mem, dest_align);
3822 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3823 CALL_EXPR_TAILCALL (orig_exp)
3824 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3825 expected_align, expected_size,
3826 min_size, max_size,
3827 probable_max_size);
3828
3829 if (dest_addr == 0)
3830 {
3831 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3832 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3833 }
3834
3835 return dest_addr;
3836
3837 do_libcall:
3838 fndecl = get_callee_fndecl (orig_exp);
3839 fcode = DECL_FUNCTION_CODE (fndecl);
3840 if (fcode == BUILT_IN_MEMSET
3841 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3842 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3843 dest, val, len);
3844 else if (fcode == BUILT_IN_BZERO)
3845 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3846 dest, len);
3847 else
3848 gcc_unreachable ();
3849 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3850 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3851 return expand_call (fn, target, target == const0_rtx);
3852 }
3853
3854 /* Expand expression EXP, which is a call to the bzero builtin. Return
3855 NULL_RTX if we failed the caller should emit a normal call. */
3856
3857 static rtx
3858 expand_builtin_bzero (tree exp)
3859 {
3860 tree dest, size;
3861 location_t loc = EXPR_LOCATION (exp);
3862
3863 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3864 return NULL_RTX;
3865
3866 dest = CALL_EXPR_ARG (exp, 0);
3867 size = CALL_EXPR_ARG (exp, 1);
3868
3869 /* New argument list transforming bzero(ptr x, int y) to
3870 memset(ptr x, int 0, size_t y). This is done this way
3871 so that if it isn't expanded inline, we fallback to
3872 calling bzero instead of memset. */
3873
3874 return expand_builtin_memset_args (dest, integer_zero_node,
3875 fold_convert_loc (loc,
3876 size_type_node, size),
3877 const0_rtx, VOIDmode, exp);
3878 }
3879
3880 /* Try to expand cmpstr operation ICODE with the given operands.
3881 Return the result rtx on success, otherwise return null. */
3882
3883 static rtx
3884 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3885 HOST_WIDE_INT align)
3886 {
3887 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3888
3889 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3890 target = NULL_RTX;
3891
3892 struct expand_operand ops[4];
3893 create_output_operand (&ops[0], target, insn_mode);
3894 create_fixed_operand (&ops[1], arg1_rtx);
3895 create_fixed_operand (&ops[2], arg2_rtx);
3896 create_integer_operand (&ops[3], align);
3897 if (maybe_expand_insn (icode, 4, ops))
3898 return ops[0].value;
3899 return NULL_RTX;
3900 }
3901
3902 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3903 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3904 otherwise return null. */
3905
3906 static rtx
3907 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3908 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3909 HOST_WIDE_INT align)
3910 {
3911 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3912
3913 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3914 target = NULL_RTX;
3915
3916 struct expand_operand ops[5];
3917 create_output_operand (&ops[0], target, insn_mode);
3918 create_fixed_operand (&ops[1], arg1_rtx);
3919 create_fixed_operand (&ops[2], arg2_rtx);
3920 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3921 TYPE_UNSIGNED (arg3_type));
3922 create_integer_operand (&ops[4], align);
3923 if (maybe_expand_insn (icode, 5, ops))
3924 return ops[0].value;
3925 return NULL_RTX;
3926 }
3927
3928 /* Expand expression EXP, which is a call to the memcmp built-in function.
3929 Return NULL_RTX if we failed and the caller should emit a normal call,
3930 otherwise try to get the result in TARGET, if convenient. */
3931
3932 static rtx
3933 expand_builtin_memcmp (tree exp, rtx target)
3934 {
3935 if (!validate_arglist (exp,
3936 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3937 return NULL_RTX;
3938
3939 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3940 implementing memcmp because it will stop if it encounters two
3941 zero bytes. */
3942 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3943 if (icode == CODE_FOR_nothing)
3944 return NULL_RTX;
3945
3946 tree arg1 = CALL_EXPR_ARG (exp, 0);
3947 tree arg2 = CALL_EXPR_ARG (exp, 1);
3948 tree len = CALL_EXPR_ARG (exp, 2);
3949
3950 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3951 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3952
3953 /* If we don't have POINTER_TYPE, call the function. */
3954 if (arg1_align == 0 || arg2_align == 0)
3955 return NULL_RTX;
3956
3957 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3958 location_t loc = EXPR_LOCATION (exp);
3959 rtx arg1_rtx = get_memory_rtx (arg1, len);
3960 rtx arg2_rtx = get_memory_rtx (arg2, len);
3961 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3962
3963 /* Set MEM_SIZE as appropriate. */
3964 if (CONST_INT_P (arg3_rtx))
3965 {
3966 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3967 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3968 }
3969
3970 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3971 TREE_TYPE (len), arg3_rtx,
3972 MIN (arg1_align, arg2_align));
3973 if (result)
3974 {
3975 /* Return the value in the proper mode for this function. */
3976 if (GET_MODE (result) == mode)
3977 return result;
3978
3979 if (target != 0)
3980 {
3981 convert_move (target, result, 0);
3982 return target;
3983 }
3984
3985 return convert_to_mode (mode, result, 0);
3986 }
3987
3988 result = target;
3989 if (! (result != 0
3990 && REG_P (result) && GET_MODE (result) == mode
3991 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3992 result = gen_reg_rtx (mode);
3993
3994 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3995 TYPE_MODE (integer_type_node), 3,
3996 XEXP (arg1_rtx, 0), Pmode,
3997 XEXP (arg2_rtx, 0), Pmode,
3998 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3999 TYPE_UNSIGNED (sizetype)),
4000 TYPE_MODE (sizetype));
4001 return result;
4002 }
4003
4004 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4005 if we failed the caller should emit a normal call, otherwise try to get
4006 the result in TARGET, if convenient. */
4007
4008 static rtx
4009 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4010 {
4011 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4012 return NULL_RTX;
4013
4014 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4015 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4016 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4017 {
4018 rtx arg1_rtx, arg2_rtx;
4019 tree fndecl, fn;
4020 tree arg1 = CALL_EXPR_ARG (exp, 0);
4021 tree arg2 = CALL_EXPR_ARG (exp, 1);
4022 rtx result = NULL_RTX;
4023
4024 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4025 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4026
4027 /* If we don't have POINTER_TYPE, call the function. */
4028 if (arg1_align == 0 || arg2_align == 0)
4029 return NULL_RTX;
4030
4031 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4032 arg1 = builtin_save_expr (arg1);
4033 arg2 = builtin_save_expr (arg2);
4034
4035 arg1_rtx = get_memory_rtx (arg1, NULL);
4036 arg2_rtx = get_memory_rtx (arg2, NULL);
4037
4038 /* Try to call cmpstrsi. */
4039 if (cmpstr_icode != CODE_FOR_nothing)
4040 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4041 MIN (arg1_align, arg2_align));
4042
4043 /* Try to determine at least one length and call cmpstrnsi. */
4044 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4045 {
4046 tree len;
4047 rtx arg3_rtx;
4048
4049 tree len1 = c_strlen (arg1, 1);
4050 tree len2 = c_strlen (arg2, 1);
4051
4052 if (len1)
4053 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4054 if (len2)
4055 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4056
4057 /* If we don't have a constant length for the first, use the length
4058 of the second, if we know it. We don't require a constant for
4059 this case; some cost analysis could be done if both are available
4060 but neither is constant. For now, assume they're equally cheap,
4061 unless one has side effects. If both strings have constant lengths,
4062 use the smaller. */
4063
4064 if (!len1)
4065 len = len2;
4066 else if (!len2)
4067 len = len1;
4068 else if (TREE_SIDE_EFFECTS (len1))
4069 len = len2;
4070 else if (TREE_SIDE_EFFECTS (len2))
4071 len = len1;
4072 else if (TREE_CODE (len1) != INTEGER_CST)
4073 len = len2;
4074 else if (TREE_CODE (len2) != INTEGER_CST)
4075 len = len1;
4076 else if (tree_int_cst_lt (len1, len2))
4077 len = len1;
4078 else
4079 len = len2;
4080
4081 /* If both arguments have side effects, we cannot optimize. */
4082 if (len && !TREE_SIDE_EFFECTS (len))
4083 {
4084 arg3_rtx = expand_normal (len);
4085 result = expand_cmpstrn_or_cmpmem
4086 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4087 arg3_rtx, MIN (arg1_align, arg2_align));
4088 }
4089 }
4090
4091 if (result)
4092 {
4093 /* Return the value in the proper mode for this function. */
4094 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4095 if (GET_MODE (result) == mode)
4096 return result;
4097 if (target == 0)
4098 return convert_to_mode (mode, result, 0);
4099 convert_move (target, result, 0);
4100 return target;
4101 }
4102
4103 /* Expand the library call ourselves using a stabilized argument
4104 list to avoid re-evaluating the function's arguments twice. */
4105 fndecl = get_callee_fndecl (exp);
4106 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4107 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4108 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4109 return expand_call (fn, target, target == const0_rtx);
4110 }
4111 return NULL_RTX;
4112 }
4113
4114 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4115 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4116 the result in TARGET, if convenient. */
4117
4118 static rtx
4119 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4120 ATTRIBUTE_UNUSED machine_mode mode)
4121 {
4122 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4123
4124 if (!validate_arglist (exp,
4125 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4126 return NULL_RTX;
4127
4128 /* If c_strlen can determine an expression for one of the string
4129 lengths, and it doesn't have side effects, then emit cmpstrnsi
4130 using length MIN(strlen(string)+1, arg3). */
4131 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4132 if (cmpstrn_icode != CODE_FOR_nothing)
4133 {
4134 tree len, len1, len2;
4135 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4136 rtx result;
4137 tree fndecl, fn;
4138 tree arg1 = CALL_EXPR_ARG (exp, 0);
4139 tree arg2 = CALL_EXPR_ARG (exp, 1);
4140 tree arg3 = CALL_EXPR_ARG (exp, 2);
4141
4142 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4143 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4144
4145 len1 = c_strlen (arg1, 1);
4146 len2 = c_strlen (arg2, 1);
4147
4148 if (len1)
4149 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4150 if (len2)
4151 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4152
4153 /* If we don't have a constant length for the first, use the length
4154 of the second, if we know it. We don't require a constant for
4155 this case; some cost analysis could be done if both are available
4156 but neither is constant. For now, assume they're equally cheap,
4157 unless one has side effects. If both strings have constant lengths,
4158 use the smaller. */
4159
4160 if (!len1)
4161 len = len2;
4162 else if (!len2)
4163 len = len1;
4164 else if (TREE_SIDE_EFFECTS (len1))
4165 len = len2;
4166 else if (TREE_SIDE_EFFECTS (len2))
4167 len = len1;
4168 else if (TREE_CODE (len1) != INTEGER_CST)
4169 len = len2;
4170 else if (TREE_CODE (len2) != INTEGER_CST)
4171 len = len1;
4172 else if (tree_int_cst_lt (len1, len2))
4173 len = len1;
4174 else
4175 len = len2;
4176
4177 /* If both arguments have side effects, we cannot optimize. */
4178 if (!len || TREE_SIDE_EFFECTS (len))
4179 return NULL_RTX;
4180
4181 /* The actual new length parameter is MIN(len,arg3). */
4182 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4183 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4184
4185 /* If we don't have POINTER_TYPE, call the function. */
4186 if (arg1_align == 0 || arg2_align == 0)
4187 return NULL_RTX;
4188
4189 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4190 arg1 = builtin_save_expr (arg1);
4191 arg2 = builtin_save_expr (arg2);
4192 len = builtin_save_expr (len);
4193
4194 arg1_rtx = get_memory_rtx (arg1, len);
4195 arg2_rtx = get_memory_rtx (arg2, len);
4196 arg3_rtx = expand_normal (len);
4197 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4198 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4199 MIN (arg1_align, arg2_align));
4200 if (result)
4201 {
4202 /* Return the value in the proper mode for this function. */
4203 mode = TYPE_MODE (TREE_TYPE (exp));
4204 if (GET_MODE (result) == mode)
4205 return result;
4206 if (target == 0)
4207 return convert_to_mode (mode, result, 0);
4208 convert_move (target, result, 0);
4209 return target;
4210 }
4211
4212 /* Expand the library call ourselves using a stabilized argument
4213 list to avoid re-evaluating the function's arguments twice. */
4214 fndecl = get_callee_fndecl (exp);
4215 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4216 arg1, arg2, len);
4217 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4218 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4219 return expand_call (fn, target, target == const0_rtx);
4220 }
4221 return NULL_RTX;
4222 }
4223
4224 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4225 if that's convenient. */
4226
4227 rtx
4228 expand_builtin_saveregs (void)
4229 {
4230 rtx val;
4231 rtx_insn *seq;
4232
4233 /* Don't do __builtin_saveregs more than once in a function.
4234 Save the result of the first call and reuse it. */
4235 if (saveregs_value != 0)
4236 return saveregs_value;
4237
4238 /* When this function is called, it means that registers must be
4239 saved on entry to this function. So we migrate the call to the
4240 first insn of this function. */
4241
4242 start_sequence ();
4243
4244 /* Do whatever the machine needs done in this case. */
4245 val = targetm.calls.expand_builtin_saveregs ();
4246
4247 seq = get_insns ();
4248 end_sequence ();
4249
4250 saveregs_value = val;
4251
4252 /* Put the insns after the NOTE that starts the function. If this
4253 is inside a start_sequence, make the outer-level insn chain current, so
4254 the code is placed at the start of the function. */
4255 push_topmost_sequence ();
4256 emit_insn_after (seq, entry_of_function ());
4257 pop_topmost_sequence ();
4258
4259 return val;
4260 }
4261
4262 /* Expand a call to __builtin_next_arg. */
4263
4264 static rtx
4265 expand_builtin_next_arg (void)
4266 {
4267 /* Checking arguments is already done in fold_builtin_next_arg
4268 that must be called before this function. */
4269 return expand_binop (ptr_mode, add_optab,
4270 crtl->args.internal_arg_pointer,
4271 crtl->args.arg_offset_rtx,
4272 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4273 }
4274
4275 /* Make it easier for the backends by protecting the valist argument
4276 from multiple evaluations. */
4277
4278 static tree
4279 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4280 {
4281 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4282
4283 /* The current way of determining the type of valist is completely
4284 bogus. We should have the information on the va builtin instead. */
4285 if (!vatype)
4286 vatype = targetm.fn_abi_va_list (cfun->decl);
4287
4288 if (TREE_CODE (vatype) == ARRAY_TYPE)
4289 {
4290 if (TREE_SIDE_EFFECTS (valist))
4291 valist = save_expr (valist);
4292
4293 /* For this case, the backends will be expecting a pointer to
4294 vatype, but it's possible we've actually been given an array
4295 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4296 So fix it. */
4297 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4298 {
4299 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4300 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4301 }
4302 }
4303 else
4304 {
4305 tree pt = build_pointer_type (vatype);
4306
4307 if (! needs_lvalue)
4308 {
4309 if (! TREE_SIDE_EFFECTS (valist))
4310 return valist;
4311
4312 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4313 TREE_SIDE_EFFECTS (valist) = 1;
4314 }
4315
4316 if (TREE_SIDE_EFFECTS (valist))
4317 valist = save_expr (valist);
4318 valist = fold_build2_loc (loc, MEM_REF,
4319 vatype, valist, build_int_cst (pt, 0));
4320 }
4321
4322 return valist;
4323 }
4324
4325 /* The "standard" definition of va_list is void*. */
4326
4327 tree
4328 std_build_builtin_va_list (void)
4329 {
4330 return ptr_type_node;
4331 }
4332
4333 /* The "standard" abi va_list is va_list_type_node. */
4334
4335 tree
4336 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4337 {
4338 return va_list_type_node;
4339 }
4340
4341 /* The "standard" type of va_list is va_list_type_node. */
4342
4343 tree
4344 std_canonical_va_list_type (tree type)
4345 {
4346 tree wtype, htype;
4347
4348 if (INDIRECT_REF_P (type))
4349 type = TREE_TYPE (type);
4350 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4351 type = TREE_TYPE (type);
4352 wtype = va_list_type_node;
4353 htype = type;
4354 /* Treat structure va_list types. */
4355 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4356 htype = TREE_TYPE (htype);
4357 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4358 {
4359 /* If va_list is an array type, the argument may have decayed
4360 to a pointer type, e.g. by being passed to another function.
4361 In that case, unwrap both types so that we can compare the
4362 underlying records. */
4363 if (TREE_CODE (htype) == ARRAY_TYPE
4364 || POINTER_TYPE_P (htype))
4365 {
4366 wtype = TREE_TYPE (wtype);
4367 htype = TREE_TYPE (htype);
4368 }
4369 }
4370 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4371 return va_list_type_node;
4372
4373 return NULL_TREE;
4374 }
4375
4376 /* The "standard" implementation of va_start: just assign `nextarg' to
4377 the variable. */
4378
4379 void
4380 std_expand_builtin_va_start (tree valist, rtx nextarg)
4381 {
4382 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4383 convert_move (va_r, nextarg, 0);
4384
4385 /* We do not have any valid bounds for the pointer, so
4386 just store zero bounds for it. */
4387 if (chkp_function_instrumented_p (current_function_decl))
4388 chkp_expand_bounds_reset_for_mem (valist,
4389 make_tree (TREE_TYPE (valist),
4390 nextarg));
4391 }
4392
4393 /* Expand EXP, a call to __builtin_va_start. */
4394
4395 static rtx
4396 expand_builtin_va_start (tree exp)
4397 {
4398 rtx nextarg;
4399 tree valist;
4400 location_t loc = EXPR_LOCATION (exp);
4401
4402 if (call_expr_nargs (exp) < 2)
4403 {
4404 error_at (loc, "too few arguments to function %<va_start%>");
4405 return const0_rtx;
4406 }
4407
4408 if (fold_builtin_next_arg (exp, true))
4409 return const0_rtx;
4410
4411 nextarg = expand_builtin_next_arg ();
4412 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4413
4414 if (targetm.expand_builtin_va_start)
4415 targetm.expand_builtin_va_start (valist, nextarg);
4416 else
4417 std_expand_builtin_va_start (valist, nextarg);
4418
4419 return const0_rtx;
4420 }
4421
4422 /* Expand EXP, a call to __builtin_va_end. */
4423
4424 static rtx
4425 expand_builtin_va_end (tree exp)
4426 {
4427 tree valist = CALL_EXPR_ARG (exp, 0);
4428
4429 /* Evaluate for side effects, if needed. I hate macros that don't
4430 do that. */
4431 if (TREE_SIDE_EFFECTS (valist))
4432 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4433
4434 return const0_rtx;
4435 }
4436
4437 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4438 builtin rather than just as an assignment in stdarg.h because of the
4439 nastiness of array-type va_list types. */
4440
4441 static rtx
4442 expand_builtin_va_copy (tree exp)
4443 {
4444 tree dst, src, t;
4445 location_t loc = EXPR_LOCATION (exp);
4446
4447 dst = CALL_EXPR_ARG (exp, 0);
4448 src = CALL_EXPR_ARG (exp, 1);
4449
4450 dst = stabilize_va_list_loc (loc, dst, 1);
4451 src = stabilize_va_list_loc (loc, src, 0);
4452
4453 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4454
4455 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4456 {
4457 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4458 TREE_SIDE_EFFECTS (t) = 1;
4459 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4460 }
4461 else
4462 {
4463 rtx dstb, srcb, size;
4464
4465 /* Evaluate to pointers. */
4466 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4467 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4468 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4469 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4470
4471 dstb = convert_memory_address (Pmode, dstb);
4472 srcb = convert_memory_address (Pmode, srcb);
4473
4474 /* "Dereference" to BLKmode memories. */
4475 dstb = gen_rtx_MEM (BLKmode, dstb);
4476 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4477 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4478 srcb = gen_rtx_MEM (BLKmode, srcb);
4479 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4480 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4481
4482 /* Copy. */
4483 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4484 }
4485
4486 return const0_rtx;
4487 }
4488
4489 /* Expand a call to one of the builtin functions __builtin_frame_address or
4490 __builtin_return_address. */
4491
4492 static rtx
4493 expand_builtin_frame_address (tree fndecl, tree exp)
4494 {
4495 /* The argument must be a nonnegative integer constant.
4496 It counts the number of frames to scan up the stack.
4497 The value is either the frame pointer value or the return
4498 address saved in that frame. */
4499 if (call_expr_nargs (exp) == 0)
4500 /* Warning about missing arg was already issued. */
4501 return const0_rtx;
4502 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4503 {
4504 error ("invalid argument to %qD", fndecl);
4505 return const0_rtx;
4506 }
4507 else
4508 {
4509 /* Number of frames to scan up the stack. */
4510 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4511
4512 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4513
4514 /* Some ports cannot access arbitrary stack frames. */
4515 if (tem == NULL)
4516 {
4517 warning (0, "unsupported argument to %qD", fndecl);
4518 return const0_rtx;
4519 }
4520
4521 if (count)
4522 {
4523 /* Warn since no effort is made to ensure that any frame
4524 beyond the current one exists or can be safely reached. */
4525 warning (OPT_Wframe_address, "calling %qD with "
4526 "a nonzero argument is unsafe", fndecl);
4527 }
4528
4529 /* For __builtin_frame_address, return what we've got. */
4530 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4531 return tem;
4532
4533 if (!REG_P (tem)
4534 && ! CONSTANT_P (tem))
4535 tem = copy_addr_to_reg (tem);
4536 return tem;
4537 }
4538 }
4539
4540 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4541 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4542 is the same as for allocate_dynamic_stack_space. */
4543
4544 static rtx
4545 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4546 {
4547 rtx op0;
4548 rtx result;
4549 bool valid_arglist;
4550 unsigned int align;
4551 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4552 == BUILT_IN_ALLOCA_WITH_ALIGN);
4553
4554 valid_arglist
4555 = (alloca_with_align
4556 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4557 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4558
4559 if (!valid_arglist)
4560 return NULL_RTX;
4561
4562 /* Compute the argument. */
4563 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4564
4565 /* Compute the alignment. */
4566 align = (alloca_with_align
4567 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4568 : BIGGEST_ALIGNMENT);
4569
4570 /* Allocate the desired space. */
4571 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4572 result = convert_memory_address (ptr_mode, result);
4573
4574 return result;
4575 }
4576
4577 /* Expand a call to bswap builtin in EXP.
4578 Return NULL_RTX if a normal call should be emitted rather than expanding the
4579 function in-line. If convenient, the result should be placed in TARGET.
4580 SUBTARGET may be used as the target for computing one of EXP's operands. */
4581
4582 static rtx
4583 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4584 rtx subtarget)
4585 {
4586 tree arg;
4587 rtx op0;
4588
4589 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4590 return NULL_RTX;
4591
4592 arg = CALL_EXPR_ARG (exp, 0);
4593 op0 = expand_expr (arg,
4594 subtarget && GET_MODE (subtarget) == target_mode
4595 ? subtarget : NULL_RTX,
4596 target_mode, EXPAND_NORMAL);
4597 if (GET_MODE (op0) != target_mode)
4598 op0 = convert_to_mode (target_mode, op0, 1);
4599
4600 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4601
4602 gcc_assert (target);
4603
4604 return convert_to_mode (target_mode, target, 1);
4605 }
4606
4607 /* Expand a call to a unary builtin in EXP.
4608 Return NULL_RTX if a normal call should be emitted rather than expanding the
4609 function in-line. If convenient, the result should be placed in TARGET.
4610 SUBTARGET may be used as the target for computing one of EXP's operands. */
4611
4612 static rtx
4613 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4614 rtx subtarget, optab op_optab)
4615 {
4616 rtx op0;
4617
4618 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4619 return NULL_RTX;
4620
4621 /* Compute the argument. */
4622 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4623 (subtarget
4624 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4625 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4626 VOIDmode, EXPAND_NORMAL);
4627 /* Compute op, into TARGET if possible.
4628 Set TARGET to wherever the result comes back. */
4629 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4630 op_optab, op0, target, op_optab != clrsb_optab);
4631 gcc_assert (target);
4632
4633 return convert_to_mode (target_mode, target, 0);
4634 }
4635
4636 /* Expand a call to __builtin_expect. We just return our argument
4637 as the builtin_expect semantic should've been already executed by
4638 tree branch prediction pass. */
4639
4640 static rtx
4641 expand_builtin_expect (tree exp, rtx target)
4642 {
4643 tree arg;
4644
4645 if (call_expr_nargs (exp) < 2)
4646 return const0_rtx;
4647 arg = CALL_EXPR_ARG (exp, 0);
4648
4649 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4650 /* When guessing was done, the hints should be already stripped away. */
4651 gcc_assert (!flag_guess_branch_prob
4652 || optimize == 0 || seen_error ());
4653 return target;
4654 }
4655
4656 /* Expand a call to __builtin_assume_aligned. We just return our first
4657 argument as the builtin_assume_aligned semantic should've been already
4658 executed by CCP. */
4659
4660 static rtx
4661 expand_builtin_assume_aligned (tree exp, rtx target)
4662 {
4663 if (call_expr_nargs (exp) < 2)
4664 return const0_rtx;
4665 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4666 EXPAND_NORMAL);
4667 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4668 && (call_expr_nargs (exp) < 3
4669 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4670 return target;
4671 }
4672
4673 void
4674 expand_builtin_trap (void)
4675 {
4676 if (targetm.have_trap ())
4677 {
4678 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4679 /* For trap insns when not accumulating outgoing args force
4680 REG_ARGS_SIZE note to prevent crossjumping of calls with
4681 different args sizes. */
4682 if (!ACCUMULATE_OUTGOING_ARGS)
4683 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4684 }
4685 else
4686 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4687 emit_barrier ();
4688 }
4689
4690 /* Expand a call to __builtin_unreachable. We do nothing except emit
4691 a barrier saying that control flow will not pass here.
4692
4693 It is the responsibility of the program being compiled to ensure
4694 that control flow does never reach __builtin_unreachable. */
4695 static void
4696 expand_builtin_unreachable (void)
4697 {
4698 emit_barrier ();
4699 }
4700
4701 /* Expand EXP, a call to fabs, fabsf or fabsl.
4702 Return NULL_RTX if a normal call should be emitted rather than expanding
4703 the function inline. If convenient, the result should be placed
4704 in TARGET. SUBTARGET may be used as the target for computing
4705 the operand. */
4706
4707 static rtx
4708 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4709 {
4710 machine_mode mode;
4711 tree arg;
4712 rtx op0;
4713
4714 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4715 return NULL_RTX;
4716
4717 arg = CALL_EXPR_ARG (exp, 0);
4718 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4719 mode = TYPE_MODE (TREE_TYPE (arg));
4720 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4721 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4722 }
4723
4724 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4725 Return NULL is a normal call should be emitted rather than expanding the
4726 function inline. If convenient, the result should be placed in TARGET.
4727 SUBTARGET may be used as the target for computing the operand. */
4728
4729 static rtx
4730 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4731 {
4732 rtx op0, op1;
4733 tree arg;
4734
4735 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4736 return NULL_RTX;
4737
4738 arg = CALL_EXPR_ARG (exp, 0);
4739 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4740
4741 arg = CALL_EXPR_ARG (exp, 1);
4742 op1 = expand_normal (arg);
4743
4744 return expand_copysign (op0, op1, target);
4745 }
4746
4747 /* Expand a call to __builtin___clear_cache. */
4748
4749 static rtx
4750 expand_builtin___clear_cache (tree exp)
4751 {
4752 if (!targetm.code_for_clear_cache)
4753 {
4754 #ifdef CLEAR_INSN_CACHE
4755 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4756 does something. Just do the default expansion to a call to
4757 __clear_cache(). */
4758 return NULL_RTX;
4759 #else
4760 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4761 does nothing. There is no need to call it. Do nothing. */
4762 return const0_rtx;
4763 #endif /* CLEAR_INSN_CACHE */
4764 }
4765
4766 /* We have a "clear_cache" insn, and it will handle everything. */
4767 tree begin, end;
4768 rtx begin_rtx, end_rtx;
4769
4770 /* We must not expand to a library call. If we did, any
4771 fallback library function in libgcc that might contain a call to
4772 __builtin___clear_cache() would recurse infinitely. */
4773 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4774 {
4775 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4776 return const0_rtx;
4777 }
4778
4779 if (targetm.have_clear_cache ())
4780 {
4781 struct expand_operand ops[2];
4782
4783 begin = CALL_EXPR_ARG (exp, 0);
4784 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4785
4786 end = CALL_EXPR_ARG (exp, 1);
4787 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4788
4789 create_address_operand (&ops[0], begin_rtx);
4790 create_address_operand (&ops[1], end_rtx);
4791 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4792 return const0_rtx;
4793 }
4794 return const0_rtx;
4795 }
4796
4797 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4798
4799 static rtx
4800 round_trampoline_addr (rtx tramp)
4801 {
4802 rtx temp, addend, mask;
4803
4804 /* If we don't need too much alignment, we'll have been guaranteed
4805 proper alignment by get_trampoline_type. */
4806 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4807 return tramp;
4808
4809 /* Round address up to desired boundary. */
4810 temp = gen_reg_rtx (Pmode);
4811 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4812 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4813
4814 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4815 temp, 0, OPTAB_LIB_WIDEN);
4816 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4817 temp, 0, OPTAB_LIB_WIDEN);
4818
4819 return tramp;
4820 }
4821
4822 static rtx
4823 expand_builtin_init_trampoline (tree exp, bool onstack)
4824 {
4825 tree t_tramp, t_func, t_chain;
4826 rtx m_tramp, r_tramp, r_chain, tmp;
4827
4828 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4829 POINTER_TYPE, VOID_TYPE))
4830 return NULL_RTX;
4831
4832 t_tramp = CALL_EXPR_ARG (exp, 0);
4833 t_func = CALL_EXPR_ARG (exp, 1);
4834 t_chain = CALL_EXPR_ARG (exp, 2);
4835
4836 r_tramp = expand_normal (t_tramp);
4837 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4838 MEM_NOTRAP_P (m_tramp) = 1;
4839
4840 /* If ONSTACK, the TRAMP argument should be the address of a field
4841 within the local function's FRAME decl. Either way, let's see if
4842 we can fill in the MEM_ATTRs for this memory. */
4843 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4844 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4845
4846 /* Creator of a heap trampoline is responsible for making sure the
4847 address is aligned to at least STACK_BOUNDARY. Normally malloc
4848 will ensure this anyhow. */
4849 tmp = round_trampoline_addr (r_tramp);
4850 if (tmp != r_tramp)
4851 {
4852 m_tramp = change_address (m_tramp, BLKmode, tmp);
4853 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4854 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4855 }
4856
4857 /* The FUNC argument should be the address of the nested function.
4858 Extract the actual function decl to pass to the hook. */
4859 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4860 t_func = TREE_OPERAND (t_func, 0);
4861 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4862
4863 r_chain = expand_normal (t_chain);
4864
4865 /* Generate insns to initialize the trampoline. */
4866 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4867
4868 if (onstack)
4869 {
4870 trampolines_created = 1;
4871
4872 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4873 "trampoline generated for nested function %qD", t_func);
4874 }
4875
4876 return const0_rtx;
4877 }
4878
4879 static rtx
4880 expand_builtin_adjust_trampoline (tree exp)
4881 {
4882 rtx tramp;
4883
4884 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4885 return NULL_RTX;
4886
4887 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4888 tramp = round_trampoline_addr (tramp);
4889 if (targetm.calls.trampoline_adjust_address)
4890 tramp = targetm.calls.trampoline_adjust_address (tramp);
4891
4892 return tramp;
4893 }
4894
4895 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4896 function. The function first checks whether the back end provides
4897 an insn to implement signbit for the respective mode. If not, it
4898 checks whether the floating point format of the value is such that
4899 the sign bit can be extracted. If that is not the case, error out.
4900 EXP is the expression that is a call to the builtin function; if
4901 convenient, the result should be placed in TARGET. */
4902 static rtx
4903 expand_builtin_signbit (tree exp, rtx target)
4904 {
4905 const struct real_format *fmt;
4906 machine_mode fmode, imode, rmode;
4907 tree arg;
4908 int word, bitpos;
4909 enum insn_code icode;
4910 rtx temp;
4911 location_t loc = EXPR_LOCATION (exp);
4912
4913 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4914 return NULL_RTX;
4915
4916 arg = CALL_EXPR_ARG (exp, 0);
4917 fmode = TYPE_MODE (TREE_TYPE (arg));
4918 rmode = TYPE_MODE (TREE_TYPE (exp));
4919 fmt = REAL_MODE_FORMAT (fmode);
4920
4921 arg = builtin_save_expr (arg);
4922
4923 /* Expand the argument yielding a RTX expression. */
4924 temp = expand_normal (arg);
4925
4926 /* Check if the back end provides an insn that handles signbit for the
4927 argument's mode. */
4928 icode = optab_handler (signbit_optab, fmode);
4929 if (icode != CODE_FOR_nothing)
4930 {
4931 rtx_insn *last = get_last_insn ();
4932 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4933 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4934 return target;
4935 delete_insns_since (last);
4936 }
4937
4938 /* For floating point formats without a sign bit, implement signbit
4939 as "ARG < 0.0". */
4940 bitpos = fmt->signbit_ro;
4941 if (bitpos < 0)
4942 {
4943 /* But we can't do this if the format supports signed zero. */
4944 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4945
4946 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4947 build_real (TREE_TYPE (arg), dconst0));
4948 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4949 }
4950
4951 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4952 {
4953 imode = int_mode_for_mode (fmode);
4954 gcc_assert (imode != BLKmode);
4955 temp = gen_lowpart (imode, temp);
4956 }
4957 else
4958 {
4959 imode = word_mode;
4960 /* Handle targets with different FP word orders. */
4961 if (FLOAT_WORDS_BIG_ENDIAN)
4962 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4963 else
4964 word = bitpos / BITS_PER_WORD;
4965 temp = operand_subword_force (temp, word, fmode);
4966 bitpos = bitpos % BITS_PER_WORD;
4967 }
4968
4969 /* Force the intermediate word_mode (or narrower) result into a
4970 register. This avoids attempting to create paradoxical SUBREGs
4971 of floating point modes below. */
4972 temp = force_reg (imode, temp);
4973
4974 /* If the bitpos is within the "result mode" lowpart, the operation
4975 can be implement with a single bitwise AND. Otherwise, we need
4976 a right shift and an AND. */
4977
4978 if (bitpos < GET_MODE_BITSIZE (rmode))
4979 {
4980 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4981
4982 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4983 temp = gen_lowpart (rmode, temp);
4984 temp = expand_binop (rmode, and_optab, temp,
4985 immed_wide_int_const (mask, rmode),
4986 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4987 }
4988 else
4989 {
4990 /* Perform a logical right shift to place the signbit in the least
4991 significant bit, then truncate the result to the desired mode
4992 and mask just this bit. */
4993 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4994 temp = gen_lowpart (rmode, temp);
4995 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4996 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4997 }
4998
4999 return temp;
5000 }
5001
5002 /* Expand fork or exec calls. TARGET is the desired target of the
5003 call. EXP is the call. FN is the
5004 identificator of the actual function. IGNORE is nonzero if the
5005 value is to be ignored. */
5006
5007 static rtx
5008 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5009 {
5010 tree id, decl;
5011 tree call;
5012
5013 /* If we are not profiling, just call the function. */
5014 if (!profile_arc_flag)
5015 return NULL_RTX;
5016
5017 /* Otherwise call the wrapper. This should be equivalent for the rest of
5018 compiler, so the code does not diverge, and the wrapper may run the
5019 code necessary for keeping the profiling sane. */
5020
5021 switch (DECL_FUNCTION_CODE (fn))
5022 {
5023 case BUILT_IN_FORK:
5024 id = get_identifier ("__gcov_fork");
5025 break;
5026
5027 case BUILT_IN_EXECL:
5028 id = get_identifier ("__gcov_execl");
5029 break;
5030
5031 case BUILT_IN_EXECV:
5032 id = get_identifier ("__gcov_execv");
5033 break;
5034
5035 case BUILT_IN_EXECLP:
5036 id = get_identifier ("__gcov_execlp");
5037 break;
5038
5039 case BUILT_IN_EXECLE:
5040 id = get_identifier ("__gcov_execle");
5041 break;
5042
5043 case BUILT_IN_EXECVP:
5044 id = get_identifier ("__gcov_execvp");
5045 break;
5046
5047 case BUILT_IN_EXECVE:
5048 id = get_identifier ("__gcov_execve");
5049 break;
5050
5051 default:
5052 gcc_unreachable ();
5053 }
5054
5055 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5056 FUNCTION_DECL, id, TREE_TYPE (fn));
5057 DECL_EXTERNAL (decl) = 1;
5058 TREE_PUBLIC (decl) = 1;
5059 DECL_ARTIFICIAL (decl) = 1;
5060 TREE_NOTHROW (decl) = 1;
5061 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5062 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5063 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5064 return expand_call (call, target, ignore);
5065 }
5066
5067
5068 \f
5069 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5070 the pointer in these functions is void*, the tree optimizers may remove
5071 casts. The mode computed in expand_builtin isn't reliable either, due
5072 to __sync_bool_compare_and_swap.
5073
5074 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5075 group of builtins. This gives us log2 of the mode size. */
5076
5077 static inline machine_mode
5078 get_builtin_sync_mode (int fcode_diff)
5079 {
5080 /* The size is not negotiable, so ask not to get BLKmode in return
5081 if the target indicates that a smaller size would be better. */
5082 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5083 }
5084
5085 /* Expand the memory expression LOC and return the appropriate memory operand
5086 for the builtin_sync operations. */
5087
5088 static rtx
5089 get_builtin_sync_mem (tree loc, machine_mode mode)
5090 {
5091 rtx addr, mem;
5092
5093 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5094 addr = convert_memory_address (Pmode, addr);
5095
5096 /* Note that we explicitly do not want any alias information for this
5097 memory, so that we kill all other live memories. Otherwise we don't
5098 satisfy the full barrier semantics of the intrinsic. */
5099 mem = validize_mem (gen_rtx_MEM (mode, addr));
5100
5101 /* The alignment needs to be at least according to that of the mode. */
5102 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5103 get_pointer_alignment (loc)));
5104 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5105 MEM_VOLATILE_P (mem) = 1;
5106
5107 return mem;
5108 }
5109
5110 /* Make sure an argument is in the right mode.
5111 EXP is the tree argument.
5112 MODE is the mode it should be in. */
5113
5114 static rtx
5115 expand_expr_force_mode (tree exp, machine_mode mode)
5116 {
5117 rtx val;
5118 machine_mode old_mode;
5119
5120 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5121 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5122 of CONST_INTs, where we know the old_mode only from the call argument. */
5123
5124 old_mode = GET_MODE (val);
5125 if (old_mode == VOIDmode)
5126 old_mode = TYPE_MODE (TREE_TYPE (exp));
5127 val = convert_modes (mode, old_mode, val, 1);
5128 return val;
5129 }
5130
5131
5132 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5133 EXP is the CALL_EXPR. CODE is the rtx code
5134 that corresponds to the arithmetic or logical operation from the name;
5135 an exception here is that NOT actually means NAND. TARGET is an optional
5136 place for us to store the results; AFTER is true if this is the
5137 fetch_and_xxx form. */
5138
5139 static rtx
5140 expand_builtin_sync_operation (machine_mode mode, tree exp,
5141 enum rtx_code code, bool after,
5142 rtx target)
5143 {
5144 rtx val, mem;
5145 location_t loc = EXPR_LOCATION (exp);
5146
5147 if (code == NOT && warn_sync_nand)
5148 {
5149 tree fndecl = get_callee_fndecl (exp);
5150 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5151
5152 static bool warned_f_a_n, warned_n_a_f;
5153
5154 switch (fcode)
5155 {
5156 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5157 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5158 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5159 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5160 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5161 if (warned_f_a_n)
5162 break;
5163
5164 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5165 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5166 warned_f_a_n = true;
5167 break;
5168
5169 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5170 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5171 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5172 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5173 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5174 if (warned_n_a_f)
5175 break;
5176
5177 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5178 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5179 warned_n_a_f = true;
5180 break;
5181
5182 default:
5183 gcc_unreachable ();
5184 }
5185 }
5186
5187 /* Expand the operands. */
5188 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5189 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5190
5191 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5192 after);
5193 }
5194
5195 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5196 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5197 true if this is the boolean form. TARGET is a place for us to store the
5198 results; this is NOT optional if IS_BOOL is true. */
5199
5200 static rtx
5201 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5202 bool is_bool, rtx target)
5203 {
5204 rtx old_val, new_val, mem;
5205 rtx *pbool, *poval;
5206
5207 /* Expand the operands. */
5208 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5209 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5210 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5211
5212 pbool = poval = NULL;
5213 if (target != const0_rtx)
5214 {
5215 if (is_bool)
5216 pbool = &target;
5217 else
5218 poval = &target;
5219 }
5220 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5221 false, MEMMODEL_SYNC_SEQ_CST,
5222 MEMMODEL_SYNC_SEQ_CST))
5223 return NULL_RTX;
5224
5225 return target;
5226 }
5227
5228 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5229 general form is actually an atomic exchange, and some targets only
5230 support a reduced form with the second argument being a constant 1.
5231 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5232 the results. */
5233
5234 static rtx
5235 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5236 rtx target)
5237 {
5238 rtx val, mem;
5239
5240 /* Expand the operands. */
5241 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5242 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5243
5244 return expand_sync_lock_test_and_set (target, mem, val);
5245 }
5246
5247 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5248
5249 static void
5250 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5251 {
5252 rtx mem;
5253
5254 /* Expand the operands. */
5255 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5256
5257 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5258 }
5259
5260 /* Given an integer representing an ``enum memmodel'', verify its
5261 correctness and return the memory model enum. */
5262
5263 static enum memmodel
5264 get_memmodel (tree exp)
5265 {
5266 rtx op;
5267 unsigned HOST_WIDE_INT val;
5268
5269 /* If the parameter is not a constant, it's a run time value so we'll just
5270 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5271 if (TREE_CODE (exp) != INTEGER_CST)
5272 return MEMMODEL_SEQ_CST;
5273
5274 op = expand_normal (exp);
5275
5276 val = INTVAL (op);
5277 if (targetm.memmodel_check)
5278 val = targetm.memmodel_check (val);
5279 else if (val & ~MEMMODEL_MASK)
5280 {
5281 warning (OPT_Winvalid_memory_model,
5282 "Unknown architecture specifier in memory model to builtin.");
5283 return MEMMODEL_SEQ_CST;
5284 }
5285
5286 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5287 if (memmodel_base (val) >= MEMMODEL_LAST)
5288 {
5289 warning (OPT_Winvalid_memory_model,
5290 "invalid memory model argument to builtin");
5291 return MEMMODEL_SEQ_CST;
5292 }
5293
5294 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5295 be conservative and promote consume to acquire. */
5296 if (val == MEMMODEL_CONSUME)
5297 val = MEMMODEL_ACQUIRE;
5298
5299 return (enum memmodel) val;
5300 }
5301
5302 /* Expand the __atomic_exchange intrinsic:
5303 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5304 EXP is the CALL_EXPR.
5305 TARGET is an optional place for us to store the results. */
5306
5307 static rtx
5308 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5309 {
5310 rtx val, mem;
5311 enum memmodel model;
5312
5313 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5314
5315 if (!flag_inline_atomics)
5316 return NULL_RTX;
5317
5318 /* Expand the operands. */
5319 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5320 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5321
5322 return expand_atomic_exchange (target, mem, val, model);
5323 }
5324
5325 /* Expand the __atomic_compare_exchange intrinsic:
5326 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5327 TYPE desired, BOOL weak,
5328 enum memmodel success,
5329 enum memmodel failure)
5330 EXP is the CALL_EXPR.
5331 TARGET is an optional place for us to store the results. */
5332
5333 static rtx
5334 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5335 rtx target)
5336 {
5337 rtx expect, desired, mem, oldval;
5338 rtx_code_label *label;
5339 enum memmodel success, failure;
5340 tree weak;
5341 bool is_weak;
5342
5343 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5344 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5345
5346 if (failure > success)
5347 {
5348 warning (OPT_Winvalid_memory_model,
5349 "failure memory model cannot be stronger than success memory "
5350 "model for %<__atomic_compare_exchange%>");
5351 success = MEMMODEL_SEQ_CST;
5352 }
5353
5354 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5355 {
5356 warning (OPT_Winvalid_memory_model,
5357 "invalid failure memory model for "
5358 "%<__atomic_compare_exchange%>");
5359 failure = MEMMODEL_SEQ_CST;
5360 success = MEMMODEL_SEQ_CST;
5361 }
5362
5363
5364 if (!flag_inline_atomics)
5365 return NULL_RTX;
5366
5367 /* Expand the operands. */
5368 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5369
5370 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5371 expect = convert_memory_address (Pmode, expect);
5372 expect = gen_rtx_MEM (mode, expect);
5373 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5374
5375 weak = CALL_EXPR_ARG (exp, 3);
5376 is_weak = false;
5377 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5378 is_weak = true;
5379
5380 if (target == const0_rtx)
5381 target = NULL;
5382
5383 /* Lest the rtl backend create a race condition with an imporoper store
5384 to memory, always create a new pseudo for OLDVAL. */
5385 oldval = NULL;
5386
5387 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5388 is_weak, success, failure))
5389 return NULL_RTX;
5390
5391 /* Conditionally store back to EXPECT, lest we create a race condition
5392 with an improper store to memory. */
5393 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5394 the normal case where EXPECT is totally private, i.e. a register. At
5395 which point the store can be unconditional. */
5396 label = gen_label_rtx ();
5397 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5398 GET_MODE (target), 1, label);
5399 emit_move_insn (expect, oldval);
5400 emit_label (label);
5401
5402 return target;
5403 }
5404
5405 /* Expand the __atomic_load intrinsic:
5406 TYPE __atomic_load (TYPE *object, enum memmodel)
5407 EXP is the CALL_EXPR.
5408 TARGET is an optional place for us to store the results. */
5409
5410 static rtx
5411 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5412 {
5413 rtx mem;
5414 enum memmodel model;
5415
5416 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5417 if (is_mm_release (model) || is_mm_acq_rel (model))
5418 {
5419 warning (OPT_Winvalid_memory_model,
5420 "invalid memory model for %<__atomic_load%>");
5421 model = MEMMODEL_SEQ_CST;
5422 }
5423
5424 if (!flag_inline_atomics)
5425 return NULL_RTX;
5426
5427 /* Expand the operand. */
5428 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5429
5430 return expand_atomic_load (target, mem, model);
5431 }
5432
5433
5434 /* Expand the __atomic_store intrinsic:
5435 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5436 EXP is the CALL_EXPR.
5437 TARGET is an optional place for us to store the results. */
5438
5439 static rtx
5440 expand_builtin_atomic_store (machine_mode mode, tree exp)
5441 {
5442 rtx mem, val;
5443 enum memmodel model;
5444
5445 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5446 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5447 || is_mm_release (model)))
5448 {
5449 warning (OPT_Winvalid_memory_model,
5450 "invalid memory model for %<__atomic_store%>");
5451 model = MEMMODEL_SEQ_CST;
5452 }
5453
5454 if (!flag_inline_atomics)
5455 return NULL_RTX;
5456
5457 /* Expand the operands. */
5458 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5459 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5460
5461 return expand_atomic_store (mem, val, model, false);
5462 }
5463
5464 /* Expand the __atomic_fetch_XXX intrinsic:
5465 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5466 EXP is the CALL_EXPR.
5467 TARGET is an optional place for us to store the results.
5468 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5469 FETCH_AFTER is true if returning the result of the operation.
5470 FETCH_AFTER is false if returning the value before the operation.
5471 IGNORE is true if the result is not used.
5472 EXT_CALL is the correct builtin for an external call if this cannot be
5473 resolved to an instruction sequence. */
5474
5475 static rtx
5476 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5477 enum rtx_code code, bool fetch_after,
5478 bool ignore, enum built_in_function ext_call)
5479 {
5480 rtx val, mem, ret;
5481 enum memmodel model;
5482 tree fndecl;
5483 tree addr;
5484
5485 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5486
5487 /* Expand the operands. */
5488 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5489 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5490
5491 /* Only try generating instructions if inlining is turned on. */
5492 if (flag_inline_atomics)
5493 {
5494 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5495 if (ret)
5496 return ret;
5497 }
5498
5499 /* Return if a different routine isn't needed for the library call. */
5500 if (ext_call == BUILT_IN_NONE)
5501 return NULL_RTX;
5502
5503 /* Change the call to the specified function. */
5504 fndecl = get_callee_fndecl (exp);
5505 addr = CALL_EXPR_FN (exp);
5506 STRIP_NOPS (addr);
5507
5508 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5509 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5510
5511 /* Expand the call here so we can emit trailing code. */
5512 ret = expand_call (exp, target, ignore);
5513
5514 /* Replace the original function just in case it matters. */
5515 TREE_OPERAND (addr, 0) = fndecl;
5516
5517 /* Then issue the arithmetic correction to return the right result. */
5518 if (!ignore)
5519 {
5520 if (code == NOT)
5521 {
5522 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5523 OPTAB_LIB_WIDEN);
5524 ret = expand_simple_unop (mode, NOT, ret, target, true);
5525 }
5526 else
5527 ret = expand_simple_binop (mode, code, ret, val, target, true,
5528 OPTAB_LIB_WIDEN);
5529 }
5530 return ret;
5531 }
5532
5533 /* Expand an atomic clear operation.
5534 void _atomic_clear (BOOL *obj, enum memmodel)
5535 EXP is the call expression. */
5536
5537 static rtx
5538 expand_builtin_atomic_clear (tree exp)
5539 {
5540 machine_mode mode;
5541 rtx mem, ret;
5542 enum memmodel model;
5543
5544 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5545 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5546 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5547
5548 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5549 {
5550 warning (OPT_Winvalid_memory_model,
5551 "invalid memory model for %<__atomic_store%>");
5552 model = MEMMODEL_SEQ_CST;
5553 }
5554
5555 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5556 Failing that, a store is issued by __atomic_store. The only way this can
5557 fail is if the bool type is larger than a word size. Unlikely, but
5558 handle it anyway for completeness. Assume a single threaded model since
5559 there is no atomic support in this case, and no barriers are required. */
5560 ret = expand_atomic_store (mem, const0_rtx, model, true);
5561 if (!ret)
5562 emit_move_insn (mem, const0_rtx);
5563 return const0_rtx;
5564 }
5565
5566 /* Expand an atomic test_and_set operation.
5567 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5568 EXP is the call expression. */
5569
5570 static rtx
5571 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5572 {
5573 rtx mem;
5574 enum memmodel model;
5575 machine_mode mode;
5576
5577 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5578 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5579 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5580
5581 return expand_atomic_test_and_set (target, mem, model);
5582 }
5583
5584
5585 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5586 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5587
5588 static tree
5589 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5590 {
5591 int size;
5592 machine_mode mode;
5593 unsigned int mode_align, type_align;
5594
5595 if (TREE_CODE (arg0) != INTEGER_CST)
5596 return NULL_TREE;
5597
5598 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5599 mode = mode_for_size (size, MODE_INT, 0);
5600 mode_align = GET_MODE_ALIGNMENT (mode);
5601
5602 if (TREE_CODE (arg1) == INTEGER_CST)
5603 {
5604 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5605
5606 /* Either this argument is null, or it's a fake pointer encoding
5607 the alignment of the object. */
5608 val = val & -val;
5609 val *= BITS_PER_UNIT;
5610
5611 if (val == 0 || mode_align < val)
5612 type_align = mode_align;
5613 else
5614 type_align = val;
5615 }
5616 else
5617 {
5618 tree ttype = TREE_TYPE (arg1);
5619
5620 /* This function is usually invoked and folded immediately by the front
5621 end before anything else has a chance to look at it. The pointer
5622 parameter at this point is usually cast to a void *, so check for that
5623 and look past the cast. */
5624 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5625 && VOID_TYPE_P (TREE_TYPE (ttype)))
5626 arg1 = TREE_OPERAND (arg1, 0);
5627
5628 ttype = TREE_TYPE (arg1);
5629 gcc_assert (POINTER_TYPE_P (ttype));
5630
5631 /* Get the underlying type of the object. */
5632 ttype = TREE_TYPE (ttype);
5633 type_align = TYPE_ALIGN (ttype);
5634 }
5635
5636 /* If the object has smaller alignment, the lock free routines cannot
5637 be used. */
5638 if (type_align < mode_align)
5639 return boolean_false_node;
5640
5641 /* Check if a compare_and_swap pattern exists for the mode which represents
5642 the required size. The pattern is not allowed to fail, so the existence
5643 of the pattern indicates support is present. */
5644 if (can_compare_and_swap_p (mode, true))
5645 return boolean_true_node;
5646 else
5647 return boolean_false_node;
5648 }
5649
5650 /* Return true if the parameters to call EXP represent an object which will
5651 always generate lock free instructions. The first argument represents the
5652 size of the object, and the second parameter is a pointer to the object
5653 itself. If NULL is passed for the object, then the result is based on
5654 typical alignment for an object of the specified size. Otherwise return
5655 false. */
5656
5657 static rtx
5658 expand_builtin_atomic_always_lock_free (tree exp)
5659 {
5660 tree size;
5661 tree arg0 = CALL_EXPR_ARG (exp, 0);
5662 tree arg1 = CALL_EXPR_ARG (exp, 1);
5663
5664 if (TREE_CODE (arg0) != INTEGER_CST)
5665 {
5666 error ("non-constant argument 1 to __atomic_always_lock_free");
5667 return const0_rtx;
5668 }
5669
5670 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5671 if (size == boolean_true_node)
5672 return const1_rtx;
5673 return const0_rtx;
5674 }
5675
5676 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5677 is lock free on this architecture. */
5678
5679 static tree
5680 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5681 {
5682 if (!flag_inline_atomics)
5683 return NULL_TREE;
5684
5685 /* If it isn't always lock free, don't generate a result. */
5686 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5687 return boolean_true_node;
5688
5689 return NULL_TREE;
5690 }
5691
5692 /* Return true if the parameters to call EXP represent an object which will
5693 always generate lock free instructions. The first argument represents the
5694 size of the object, and the second parameter is a pointer to the object
5695 itself. If NULL is passed for the object, then the result is based on
5696 typical alignment for an object of the specified size. Otherwise return
5697 NULL*/
5698
5699 static rtx
5700 expand_builtin_atomic_is_lock_free (tree exp)
5701 {
5702 tree size;
5703 tree arg0 = CALL_EXPR_ARG (exp, 0);
5704 tree arg1 = CALL_EXPR_ARG (exp, 1);
5705
5706 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5707 {
5708 error ("non-integer argument 1 to __atomic_is_lock_free");
5709 return NULL_RTX;
5710 }
5711
5712 if (!flag_inline_atomics)
5713 return NULL_RTX;
5714
5715 /* If the value is known at compile time, return the RTX for it. */
5716 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5717 if (size == boolean_true_node)
5718 return const1_rtx;
5719
5720 return NULL_RTX;
5721 }
5722
5723 /* Expand the __atomic_thread_fence intrinsic:
5724 void __atomic_thread_fence (enum memmodel)
5725 EXP is the CALL_EXPR. */
5726
5727 static void
5728 expand_builtin_atomic_thread_fence (tree exp)
5729 {
5730 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5731 expand_mem_thread_fence (model);
5732 }
5733
5734 /* Expand the __atomic_signal_fence intrinsic:
5735 void __atomic_signal_fence (enum memmodel)
5736 EXP is the CALL_EXPR. */
5737
5738 static void
5739 expand_builtin_atomic_signal_fence (tree exp)
5740 {
5741 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5742 expand_mem_signal_fence (model);
5743 }
5744
5745 /* Expand the __sync_synchronize intrinsic. */
5746
5747 static void
5748 expand_builtin_sync_synchronize (void)
5749 {
5750 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5751 }
5752
5753 static rtx
5754 expand_builtin_thread_pointer (tree exp, rtx target)
5755 {
5756 enum insn_code icode;
5757 if (!validate_arglist (exp, VOID_TYPE))
5758 return const0_rtx;
5759 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5760 if (icode != CODE_FOR_nothing)
5761 {
5762 struct expand_operand op;
5763 /* If the target is not sutitable then create a new target. */
5764 if (target == NULL_RTX
5765 || !REG_P (target)
5766 || GET_MODE (target) != Pmode)
5767 target = gen_reg_rtx (Pmode);
5768 create_output_operand (&op, target, Pmode);
5769 expand_insn (icode, 1, &op);
5770 return target;
5771 }
5772 error ("__builtin_thread_pointer is not supported on this target");
5773 return const0_rtx;
5774 }
5775
5776 static void
5777 expand_builtin_set_thread_pointer (tree exp)
5778 {
5779 enum insn_code icode;
5780 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5781 return;
5782 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5783 if (icode != CODE_FOR_nothing)
5784 {
5785 struct expand_operand op;
5786 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5787 Pmode, EXPAND_NORMAL);
5788 create_input_operand (&op, val, Pmode);
5789 expand_insn (icode, 1, &op);
5790 return;
5791 }
5792 error ("__builtin_set_thread_pointer is not supported on this target");
5793 }
5794
5795 \f
5796 /* Emit code to restore the current value of stack. */
5797
5798 static void
5799 expand_stack_restore (tree var)
5800 {
5801 rtx_insn *prev;
5802 rtx sa = expand_normal (var);
5803
5804 sa = convert_memory_address (Pmode, sa);
5805
5806 prev = get_last_insn ();
5807 emit_stack_restore (SAVE_BLOCK, sa);
5808
5809 record_new_stack_level ();
5810
5811 fixup_args_size_notes (prev, get_last_insn (), 0);
5812 }
5813
5814 /* Emit code to save the current value of stack. */
5815
5816 static rtx
5817 expand_stack_save (void)
5818 {
5819 rtx ret = NULL_RTX;
5820
5821 emit_stack_save (SAVE_BLOCK, &ret);
5822 return ret;
5823 }
5824
5825
5826 /* Expand an expression EXP that calls a built-in function,
5827 with result going to TARGET if that's convenient
5828 (and in mode MODE if that's convenient).
5829 SUBTARGET may be used as the target for computing one of EXP's operands.
5830 IGNORE is nonzero if the value is to be ignored. */
5831
5832 rtx
5833 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5834 int ignore)
5835 {
5836 tree fndecl = get_callee_fndecl (exp);
5837 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5838 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5839 int flags;
5840
5841 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5842 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5843
5844 /* When ASan is enabled, we don't want to expand some memory/string
5845 builtins and rely on libsanitizer's hooks. This allows us to avoid
5846 redundant checks and be sure, that possible overflow will be detected
5847 by ASan. */
5848
5849 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5850 return expand_call (exp, target, ignore);
5851
5852 /* When not optimizing, generate calls to library functions for a certain
5853 set of builtins. */
5854 if (!optimize
5855 && !called_as_built_in (fndecl)
5856 && fcode != BUILT_IN_FORK
5857 && fcode != BUILT_IN_EXECL
5858 && fcode != BUILT_IN_EXECV
5859 && fcode != BUILT_IN_EXECLP
5860 && fcode != BUILT_IN_EXECLE
5861 && fcode != BUILT_IN_EXECVP
5862 && fcode != BUILT_IN_EXECVE
5863 && fcode != BUILT_IN_ALLOCA
5864 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5865 && fcode != BUILT_IN_FREE
5866 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5867 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5868 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5869 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5870 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5871 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5872 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5873 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5874 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5875 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5876 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5877 && fcode != BUILT_IN_CHKP_BNDRET)
5878 return expand_call (exp, target, ignore);
5879
5880 /* The built-in function expanders test for target == const0_rtx
5881 to determine whether the function's result will be ignored. */
5882 if (ignore)
5883 target = const0_rtx;
5884
5885 /* If the result of a pure or const built-in function is ignored, and
5886 none of its arguments are volatile, we can avoid expanding the
5887 built-in call and just evaluate the arguments for side-effects. */
5888 if (target == const0_rtx
5889 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5890 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5891 {
5892 bool volatilep = false;
5893 tree arg;
5894 call_expr_arg_iterator iter;
5895
5896 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5897 if (TREE_THIS_VOLATILE (arg))
5898 {
5899 volatilep = true;
5900 break;
5901 }
5902
5903 if (! volatilep)
5904 {
5905 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5906 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5907 return const0_rtx;
5908 }
5909 }
5910
5911 /* expand_builtin_with_bounds is supposed to be used for
5912 instrumented builtin calls. */
5913 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5914
5915 switch (fcode)
5916 {
5917 CASE_FLT_FN (BUILT_IN_FABS):
5918 case BUILT_IN_FABSD32:
5919 case BUILT_IN_FABSD64:
5920 case BUILT_IN_FABSD128:
5921 target = expand_builtin_fabs (exp, target, subtarget);
5922 if (target)
5923 return target;
5924 break;
5925
5926 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5927 target = expand_builtin_copysign (exp, target, subtarget);
5928 if (target)
5929 return target;
5930 break;
5931
5932 /* Just do a normal library call if we were unable to fold
5933 the values. */
5934 CASE_FLT_FN (BUILT_IN_CABS):
5935 break;
5936
5937 CASE_FLT_FN (BUILT_IN_EXP):
5938 CASE_FLT_FN (BUILT_IN_EXP10):
5939 CASE_FLT_FN (BUILT_IN_POW10):
5940 CASE_FLT_FN (BUILT_IN_EXP2):
5941 CASE_FLT_FN (BUILT_IN_EXPM1):
5942 CASE_FLT_FN (BUILT_IN_LOGB):
5943 CASE_FLT_FN (BUILT_IN_LOG):
5944 CASE_FLT_FN (BUILT_IN_LOG10):
5945 CASE_FLT_FN (BUILT_IN_LOG2):
5946 CASE_FLT_FN (BUILT_IN_LOG1P):
5947 CASE_FLT_FN (BUILT_IN_TAN):
5948 CASE_FLT_FN (BUILT_IN_ASIN):
5949 CASE_FLT_FN (BUILT_IN_ACOS):
5950 CASE_FLT_FN (BUILT_IN_ATAN):
5951 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5952 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5953 because of possible accuracy problems. */
5954 if (! flag_unsafe_math_optimizations)
5955 break;
5956 CASE_FLT_FN (BUILT_IN_SQRT):
5957 CASE_FLT_FN (BUILT_IN_FLOOR):
5958 CASE_FLT_FN (BUILT_IN_CEIL):
5959 CASE_FLT_FN (BUILT_IN_TRUNC):
5960 CASE_FLT_FN (BUILT_IN_ROUND):
5961 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5962 CASE_FLT_FN (BUILT_IN_RINT):
5963 target = expand_builtin_mathfn (exp, target, subtarget);
5964 if (target)
5965 return target;
5966 break;
5967
5968 CASE_FLT_FN (BUILT_IN_FMA):
5969 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5970 if (target)
5971 return target;
5972 break;
5973
5974 CASE_FLT_FN (BUILT_IN_ILOGB):
5975 if (! flag_unsafe_math_optimizations)
5976 break;
5977 CASE_FLT_FN (BUILT_IN_ISINF):
5978 CASE_FLT_FN (BUILT_IN_FINITE):
5979 case BUILT_IN_ISFINITE:
5980 case BUILT_IN_ISNORMAL:
5981 target = expand_builtin_interclass_mathfn (exp, target);
5982 if (target)
5983 return target;
5984 break;
5985
5986 CASE_FLT_FN (BUILT_IN_ICEIL):
5987 CASE_FLT_FN (BUILT_IN_LCEIL):
5988 CASE_FLT_FN (BUILT_IN_LLCEIL):
5989 CASE_FLT_FN (BUILT_IN_LFLOOR):
5990 CASE_FLT_FN (BUILT_IN_IFLOOR):
5991 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5992 target = expand_builtin_int_roundingfn (exp, target);
5993 if (target)
5994 return target;
5995 break;
5996
5997 CASE_FLT_FN (BUILT_IN_IRINT):
5998 CASE_FLT_FN (BUILT_IN_LRINT):
5999 CASE_FLT_FN (BUILT_IN_LLRINT):
6000 CASE_FLT_FN (BUILT_IN_IROUND):
6001 CASE_FLT_FN (BUILT_IN_LROUND):
6002 CASE_FLT_FN (BUILT_IN_LLROUND):
6003 target = expand_builtin_int_roundingfn_2 (exp, target);
6004 if (target)
6005 return target;
6006 break;
6007
6008 CASE_FLT_FN (BUILT_IN_POWI):
6009 target = expand_builtin_powi (exp, target);
6010 if (target)
6011 return target;
6012 break;
6013
6014 CASE_FLT_FN (BUILT_IN_ATAN2):
6015 CASE_FLT_FN (BUILT_IN_LDEXP):
6016 CASE_FLT_FN (BUILT_IN_SCALB):
6017 CASE_FLT_FN (BUILT_IN_SCALBN):
6018 CASE_FLT_FN (BUILT_IN_SCALBLN):
6019 if (! flag_unsafe_math_optimizations)
6020 break;
6021
6022 CASE_FLT_FN (BUILT_IN_FMOD):
6023 CASE_FLT_FN (BUILT_IN_REMAINDER):
6024 CASE_FLT_FN (BUILT_IN_DREM):
6025 CASE_FLT_FN (BUILT_IN_POW):
6026 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6027 if (target)
6028 return target;
6029 break;
6030
6031 CASE_FLT_FN (BUILT_IN_CEXPI):
6032 target = expand_builtin_cexpi (exp, target);
6033 gcc_assert (target);
6034 return target;
6035
6036 CASE_FLT_FN (BUILT_IN_SIN):
6037 CASE_FLT_FN (BUILT_IN_COS):
6038 if (! flag_unsafe_math_optimizations)
6039 break;
6040 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6041 if (target)
6042 return target;
6043 break;
6044
6045 CASE_FLT_FN (BUILT_IN_SINCOS):
6046 if (! flag_unsafe_math_optimizations)
6047 break;
6048 target = expand_builtin_sincos (exp);
6049 if (target)
6050 return target;
6051 break;
6052
6053 case BUILT_IN_APPLY_ARGS:
6054 return expand_builtin_apply_args ();
6055
6056 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6057 FUNCTION with a copy of the parameters described by
6058 ARGUMENTS, and ARGSIZE. It returns a block of memory
6059 allocated on the stack into which is stored all the registers
6060 that might possibly be used for returning the result of a
6061 function. ARGUMENTS is the value returned by
6062 __builtin_apply_args. ARGSIZE is the number of bytes of
6063 arguments that must be copied. ??? How should this value be
6064 computed? We'll also need a safe worst case value for varargs
6065 functions. */
6066 case BUILT_IN_APPLY:
6067 if (!validate_arglist (exp, POINTER_TYPE,
6068 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6069 && !validate_arglist (exp, REFERENCE_TYPE,
6070 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6071 return const0_rtx;
6072 else
6073 {
6074 rtx ops[3];
6075
6076 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6077 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6078 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6079
6080 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6081 }
6082
6083 /* __builtin_return (RESULT) causes the function to return the
6084 value described by RESULT. RESULT is address of the block of
6085 memory returned by __builtin_apply. */
6086 case BUILT_IN_RETURN:
6087 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6088 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6089 return const0_rtx;
6090
6091 case BUILT_IN_SAVEREGS:
6092 return expand_builtin_saveregs ();
6093
6094 case BUILT_IN_VA_ARG_PACK:
6095 /* All valid uses of __builtin_va_arg_pack () are removed during
6096 inlining. */
6097 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6098 return const0_rtx;
6099
6100 case BUILT_IN_VA_ARG_PACK_LEN:
6101 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6102 inlining. */
6103 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6104 return const0_rtx;
6105
6106 /* Return the address of the first anonymous stack arg. */
6107 case BUILT_IN_NEXT_ARG:
6108 if (fold_builtin_next_arg (exp, false))
6109 return const0_rtx;
6110 return expand_builtin_next_arg ();
6111
6112 case BUILT_IN_CLEAR_CACHE:
6113 target = expand_builtin___clear_cache (exp);
6114 if (target)
6115 return target;
6116 break;
6117
6118 case BUILT_IN_CLASSIFY_TYPE:
6119 return expand_builtin_classify_type (exp);
6120
6121 case BUILT_IN_CONSTANT_P:
6122 return const0_rtx;
6123
6124 case BUILT_IN_FRAME_ADDRESS:
6125 case BUILT_IN_RETURN_ADDRESS:
6126 return expand_builtin_frame_address (fndecl, exp);
6127
6128 /* Returns the address of the area where the structure is returned.
6129 0 otherwise. */
6130 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6131 if (call_expr_nargs (exp) != 0
6132 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6133 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6134 return const0_rtx;
6135 else
6136 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6137
6138 case BUILT_IN_ALLOCA:
6139 case BUILT_IN_ALLOCA_WITH_ALIGN:
6140 /* If the allocation stems from the declaration of a variable-sized
6141 object, it cannot accumulate. */
6142 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6143 if (target)
6144 return target;
6145 break;
6146
6147 case BUILT_IN_STACK_SAVE:
6148 return expand_stack_save ();
6149
6150 case BUILT_IN_STACK_RESTORE:
6151 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6152 return const0_rtx;
6153
6154 case BUILT_IN_BSWAP16:
6155 case BUILT_IN_BSWAP32:
6156 case BUILT_IN_BSWAP64:
6157 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6158 if (target)
6159 return target;
6160 break;
6161
6162 CASE_INT_FN (BUILT_IN_FFS):
6163 target = expand_builtin_unop (target_mode, exp, target,
6164 subtarget, ffs_optab);
6165 if (target)
6166 return target;
6167 break;
6168
6169 CASE_INT_FN (BUILT_IN_CLZ):
6170 target = expand_builtin_unop (target_mode, exp, target,
6171 subtarget, clz_optab);
6172 if (target)
6173 return target;
6174 break;
6175
6176 CASE_INT_FN (BUILT_IN_CTZ):
6177 target = expand_builtin_unop (target_mode, exp, target,
6178 subtarget, ctz_optab);
6179 if (target)
6180 return target;
6181 break;
6182
6183 CASE_INT_FN (BUILT_IN_CLRSB):
6184 target = expand_builtin_unop (target_mode, exp, target,
6185 subtarget, clrsb_optab);
6186 if (target)
6187 return target;
6188 break;
6189
6190 CASE_INT_FN (BUILT_IN_POPCOUNT):
6191 target = expand_builtin_unop (target_mode, exp, target,
6192 subtarget, popcount_optab);
6193 if (target)
6194 return target;
6195 break;
6196
6197 CASE_INT_FN (BUILT_IN_PARITY):
6198 target = expand_builtin_unop (target_mode, exp, target,
6199 subtarget, parity_optab);
6200 if (target)
6201 return target;
6202 break;
6203
6204 case BUILT_IN_STRLEN:
6205 target = expand_builtin_strlen (exp, target, target_mode);
6206 if (target)
6207 return target;
6208 break;
6209
6210 case BUILT_IN_STRCPY:
6211 target = expand_builtin_strcpy (exp, target);
6212 if (target)
6213 return target;
6214 break;
6215
6216 case BUILT_IN_STRNCPY:
6217 target = expand_builtin_strncpy (exp, target);
6218 if (target)
6219 return target;
6220 break;
6221
6222 case BUILT_IN_STPCPY:
6223 target = expand_builtin_stpcpy (exp, target, mode);
6224 if (target)
6225 return target;
6226 break;
6227
6228 case BUILT_IN_MEMCPY:
6229 target = expand_builtin_memcpy (exp, target);
6230 if (target)
6231 return target;
6232 break;
6233
6234 case BUILT_IN_MEMPCPY:
6235 target = expand_builtin_mempcpy (exp, target, mode);
6236 if (target)
6237 return target;
6238 break;
6239
6240 case BUILT_IN_MEMSET:
6241 target = expand_builtin_memset (exp, target, mode);
6242 if (target)
6243 return target;
6244 break;
6245
6246 case BUILT_IN_BZERO:
6247 target = expand_builtin_bzero (exp);
6248 if (target)
6249 return target;
6250 break;
6251
6252 case BUILT_IN_STRCMP:
6253 target = expand_builtin_strcmp (exp, target);
6254 if (target)
6255 return target;
6256 break;
6257
6258 case BUILT_IN_STRNCMP:
6259 target = expand_builtin_strncmp (exp, target, mode);
6260 if (target)
6261 return target;
6262 break;
6263
6264 case BUILT_IN_BCMP:
6265 case BUILT_IN_MEMCMP:
6266 target = expand_builtin_memcmp (exp, target);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_SETJMP:
6272 /* This should have been lowered to the builtins below. */
6273 gcc_unreachable ();
6274
6275 case BUILT_IN_SETJMP_SETUP:
6276 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6277 and the receiver label. */
6278 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6279 {
6280 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6281 VOIDmode, EXPAND_NORMAL);
6282 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6283 rtx_insn *label_r = label_rtx (label);
6284
6285 /* This is copied from the handling of non-local gotos. */
6286 expand_builtin_setjmp_setup (buf_addr, label_r);
6287 nonlocal_goto_handler_labels
6288 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6289 nonlocal_goto_handler_labels);
6290 /* ??? Do not let expand_label treat us as such since we would
6291 not want to be both on the list of non-local labels and on
6292 the list of forced labels. */
6293 FORCED_LABEL (label) = 0;
6294 return const0_rtx;
6295 }
6296 break;
6297
6298 case BUILT_IN_SETJMP_RECEIVER:
6299 /* __builtin_setjmp_receiver is passed the receiver label. */
6300 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6301 {
6302 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6303 rtx_insn *label_r = label_rtx (label);
6304
6305 expand_builtin_setjmp_receiver (label_r);
6306 return const0_rtx;
6307 }
6308 break;
6309
6310 /* __builtin_longjmp is passed a pointer to an array of five words.
6311 It's similar to the C library longjmp function but works with
6312 __builtin_setjmp above. */
6313 case BUILT_IN_LONGJMP:
6314 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6315 {
6316 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6317 VOIDmode, EXPAND_NORMAL);
6318 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6319
6320 if (value != const1_rtx)
6321 {
6322 error ("%<__builtin_longjmp%> second argument must be 1");
6323 return const0_rtx;
6324 }
6325
6326 expand_builtin_longjmp (buf_addr, value);
6327 return const0_rtx;
6328 }
6329 break;
6330
6331 case BUILT_IN_NONLOCAL_GOTO:
6332 target = expand_builtin_nonlocal_goto (exp);
6333 if (target)
6334 return target;
6335 break;
6336
6337 /* This updates the setjmp buffer that is its argument with the value
6338 of the current stack pointer. */
6339 case BUILT_IN_UPDATE_SETJMP_BUF:
6340 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6341 {
6342 rtx buf_addr
6343 = expand_normal (CALL_EXPR_ARG (exp, 0));
6344
6345 expand_builtin_update_setjmp_buf (buf_addr);
6346 return const0_rtx;
6347 }
6348 break;
6349
6350 case BUILT_IN_TRAP:
6351 expand_builtin_trap ();
6352 return const0_rtx;
6353
6354 case BUILT_IN_UNREACHABLE:
6355 expand_builtin_unreachable ();
6356 return const0_rtx;
6357
6358 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6359 case BUILT_IN_SIGNBITD32:
6360 case BUILT_IN_SIGNBITD64:
6361 case BUILT_IN_SIGNBITD128:
6362 target = expand_builtin_signbit (exp, target);
6363 if (target)
6364 return target;
6365 break;
6366
6367 /* Various hooks for the DWARF 2 __throw routine. */
6368 case BUILT_IN_UNWIND_INIT:
6369 expand_builtin_unwind_init ();
6370 return const0_rtx;
6371 case BUILT_IN_DWARF_CFA:
6372 return virtual_cfa_rtx;
6373 #ifdef DWARF2_UNWIND_INFO
6374 case BUILT_IN_DWARF_SP_COLUMN:
6375 return expand_builtin_dwarf_sp_column ();
6376 case BUILT_IN_INIT_DWARF_REG_SIZES:
6377 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6378 return const0_rtx;
6379 #endif
6380 case BUILT_IN_FROB_RETURN_ADDR:
6381 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6382 case BUILT_IN_EXTRACT_RETURN_ADDR:
6383 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6384 case BUILT_IN_EH_RETURN:
6385 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6386 CALL_EXPR_ARG (exp, 1));
6387 return const0_rtx;
6388 case BUILT_IN_EH_RETURN_DATA_REGNO:
6389 return expand_builtin_eh_return_data_regno (exp);
6390 case BUILT_IN_EXTEND_POINTER:
6391 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6392 case BUILT_IN_EH_POINTER:
6393 return expand_builtin_eh_pointer (exp);
6394 case BUILT_IN_EH_FILTER:
6395 return expand_builtin_eh_filter (exp);
6396 case BUILT_IN_EH_COPY_VALUES:
6397 return expand_builtin_eh_copy_values (exp);
6398
6399 case BUILT_IN_VA_START:
6400 return expand_builtin_va_start (exp);
6401 case BUILT_IN_VA_END:
6402 return expand_builtin_va_end (exp);
6403 case BUILT_IN_VA_COPY:
6404 return expand_builtin_va_copy (exp);
6405 case BUILT_IN_EXPECT:
6406 return expand_builtin_expect (exp, target);
6407 case BUILT_IN_ASSUME_ALIGNED:
6408 return expand_builtin_assume_aligned (exp, target);
6409 case BUILT_IN_PREFETCH:
6410 expand_builtin_prefetch (exp);
6411 return const0_rtx;
6412
6413 case BUILT_IN_INIT_TRAMPOLINE:
6414 return expand_builtin_init_trampoline (exp, true);
6415 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6416 return expand_builtin_init_trampoline (exp, false);
6417 case BUILT_IN_ADJUST_TRAMPOLINE:
6418 return expand_builtin_adjust_trampoline (exp);
6419
6420 case BUILT_IN_FORK:
6421 case BUILT_IN_EXECL:
6422 case BUILT_IN_EXECV:
6423 case BUILT_IN_EXECLP:
6424 case BUILT_IN_EXECLE:
6425 case BUILT_IN_EXECVP:
6426 case BUILT_IN_EXECVE:
6427 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6428 if (target)
6429 return target;
6430 break;
6431
6432 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6433 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6434 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6435 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6436 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6438 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6439 if (target)
6440 return target;
6441 break;
6442
6443 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6444 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6445 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6446 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6447 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6449 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6450 if (target)
6451 return target;
6452 break;
6453
6454 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6455 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6456 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6457 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6458 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6460 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6461 if (target)
6462 return target;
6463 break;
6464
6465 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6466 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6467 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6468 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6469 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6471 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6472 if (target)
6473 return target;
6474 break;
6475
6476 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6477 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6478 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6479 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6480 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6482 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6483 if (target)
6484 return target;
6485 break;
6486
6487 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6488 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6489 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6490 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6491 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6493 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6494 if (target)
6495 return target;
6496 break;
6497
6498 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6499 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6500 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6501 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6502 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6504 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6505 if (target)
6506 return target;
6507 break;
6508
6509 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6510 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6511 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6512 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6513 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6515 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6521 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6522 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6523 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6524 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6526 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6527 if (target)
6528 return target;
6529 break;
6530
6531 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6532 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6533 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6534 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6535 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6537 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6543 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6544 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6545 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6546 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6547 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6548 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6549 if (target)
6550 return target;
6551 break;
6552
6553 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6554 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6555 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6556 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6557 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6558 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6559 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6560 if (target)
6561 return target;
6562 break;
6563
6564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6566 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6567 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6568 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6569 if (mode == VOIDmode)
6570 mode = TYPE_MODE (boolean_type_node);
6571 if (!target || !register_operand (target, mode))
6572 target = gen_reg_rtx (mode);
6573
6574 mode = get_builtin_sync_mode
6575 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6576 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6583 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6584 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6585 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6586 mode = get_builtin_sync_mode
6587 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6588 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6595 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6596 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6597 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6599 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6600 if (target)
6601 return target;
6602 break;
6603
6604 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6605 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6606 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6607 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6608 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6610 expand_builtin_sync_lock_release (mode, exp);
6611 return const0_rtx;
6612
6613 case BUILT_IN_SYNC_SYNCHRONIZE:
6614 expand_builtin_sync_synchronize ();
6615 return const0_rtx;
6616
6617 case BUILT_IN_ATOMIC_EXCHANGE_1:
6618 case BUILT_IN_ATOMIC_EXCHANGE_2:
6619 case BUILT_IN_ATOMIC_EXCHANGE_4:
6620 case BUILT_IN_ATOMIC_EXCHANGE_8:
6621 case BUILT_IN_ATOMIC_EXCHANGE_16:
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6623 target = expand_builtin_atomic_exchange (mode, exp, target);
6624 if (target)
6625 return target;
6626 break;
6627
6628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6631 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6632 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6633 {
6634 unsigned int nargs, z;
6635 vec<tree, va_gc> *vec;
6636
6637 mode =
6638 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6639 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6640 if (target)
6641 return target;
6642
6643 /* If this is turned into an external library call, the weak parameter
6644 must be dropped to match the expected parameter list. */
6645 nargs = call_expr_nargs (exp);
6646 vec_alloc (vec, nargs - 1);
6647 for (z = 0; z < 3; z++)
6648 vec->quick_push (CALL_EXPR_ARG (exp, z));
6649 /* Skip the boolean weak parameter. */
6650 for (z = 4; z < 6; z++)
6651 vec->quick_push (CALL_EXPR_ARG (exp, z));
6652 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6653 break;
6654 }
6655
6656 case BUILT_IN_ATOMIC_LOAD_1:
6657 case BUILT_IN_ATOMIC_LOAD_2:
6658 case BUILT_IN_ATOMIC_LOAD_4:
6659 case BUILT_IN_ATOMIC_LOAD_8:
6660 case BUILT_IN_ATOMIC_LOAD_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6662 target = expand_builtin_atomic_load (mode, exp, target);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_ATOMIC_STORE_1:
6668 case BUILT_IN_ATOMIC_STORE_2:
6669 case BUILT_IN_ATOMIC_STORE_4:
6670 case BUILT_IN_ATOMIC_STORE_8:
6671 case BUILT_IN_ATOMIC_STORE_16:
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6673 target = expand_builtin_atomic_store (mode, exp);
6674 if (target)
6675 return const0_rtx;
6676 break;
6677
6678 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6679 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6680 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6681 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6682 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6683 {
6684 enum built_in_function lib;
6685 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6686 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6687 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6688 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6689 ignore, lib);
6690 if (target)
6691 return target;
6692 break;
6693 }
6694 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6695 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6696 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6697 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6698 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6699 {
6700 enum built_in_function lib;
6701 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6702 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6703 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6704 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6705 ignore, lib);
6706 if (target)
6707 return target;
6708 break;
6709 }
6710 case BUILT_IN_ATOMIC_AND_FETCH_1:
6711 case BUILT_IN_ATOMIC_AND_FETCH_2:
6712 case BUILT_IN_ATOMIC_AND_FETCH_4:
6713 case BUILT_IN_ATOMIC_AND_FETCH_8:
6714 case BUILT_IN_ATOMIC_AND_FETCH_16:
6715 {
6716 enum built_in_function lib;
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6718 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6719 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6720 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6721 ignore, lib);
6722 if (target)
6723 return target;
6724 break;
6725 }
6726 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6727 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6728 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6729 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6730 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6731 {
6732 enum built_in_function lib;
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6734 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6735 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6736 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6737 ignore, lib);
6738 if (target)
6739 return target;
6740 break;
6741 }
6742 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6743 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6744 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6745 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6746 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6747 {
6748 enum built_in_function lib;
6749 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6750 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6751 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6752 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6753 ignore, lib);
6754 if (target)
6755 return target;
6756 break;
6757 }
6758 case BUILT_IN_ATOMIC_OR_FETCH_1:
6759 case BUILT_IN_ATOMIC_OR_FETCH_2:
6760 case BUILT_IN_ATOMIC_OR_FETCH_4:
6761 case BUILT_IN_ATOMIC_OR_FETCH_8:
6762 case BUILT_IN_ATOMIC_OR_FETCH_16:
6763 {
6764 enum built_in_function lib;
6765 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6766 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6767 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6768 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6769 ignore, lib);
6770 if (target)
6771 return target;
6772 break;
6773 }
6774 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6775 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6776 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6777 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6778 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6779 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6780 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6781 ignore, BUILT_IN_NONE);
6782 if (target)
6783 return target;
6784 break;
6785
6786 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6787 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6788 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6789 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6790 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6791 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6792 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6793 ignore, BUILT_IN_NONE);
6794 if (target)
6795 return target;
6796 break;
6797
6798 case BUILT_IN_ATOMIC_FETCH_AND_1:
6799 case BUILT_IN_ATOMIC_FETCH_AND_2:
6800 case BUILT_IN_ATOMIC_FETCH_AND_4:
6801 case BUILT_IN_ATOMIC_FETCH_AND_8:
6802 case BUILT_IN_ATOMIC_FETCH_AND_16:
6803 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6804 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6805 ignore, BUILT_IN_NONE);
6806 if (target)
6807 return target;
6808 break;
6809
6810 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6811 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6812 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6813 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6814 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6815 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6816 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6817 ignore, BUILT_IN_NONE);
6818 if (target)
6819 return target;
6820 break;
6821
6822 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6823 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6824 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6825 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6826 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6827 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6828 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6829 ignore, BUILT_IN_NONE);
6830 if (target)
6831 return target;
6832 break;
6833
6834 case BUILT_IN_ATOMIC_FETCH_OR_1:
6835 case BUILT_IN_ATOMIC_FETCH_OR_2:
6836 case BUILT_IN_ATOMIC_FETCH_OR_4:
6837 case BUILT_IN_ATOMIC_FETCH_OR_8:
6838 case BUILT_IN_ATOMIC_FETCH_OR_16:
6839 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6840 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6841 ignore, BUILT_IN_NONE);
6842 if (target)
6843 return target;
6844 break;
6845
6846 case BUILT_IN_ATOMIC_TEST_AND_SET:
6847 return expand_builtin_atomic_test_and_set (exp, target);
6848
6849 case BUILT_IN_ATOMIC_CLEAR:
6850 return expand_builtin_atomic_clear (exp);
6851
6852 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6853 return expand_builtin_atomic_always_lock_free (exp);
6854
6855 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6856 target = expand_builtin_atomic_is_lock_free (exp);
6857 if (target)
6858 return target;
6859 break;
6860
6861 case BUILT_IN_ATOMIC_THREAD_FENCE:
6862 expand_builtin_atomic_thread_fence (exp);
6863 return const0_rtx;
6864
6865 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6866 expand_builtin_atomic_signal_fence (exp);
6867 return const0_rtx;
6868
6869 case BUILT_IN_OBJECT_SIZE:
6870 return expand_builtin_object_size (exp);
6871
6872 case BUILT_IN_MEMCPY_CHK:
6873 case BUILT_IN_MEMPCPY_CHK:
6874 case BUILT_IN_MEMMOVE_CHK:
6875 case BUILT_IN_MEMSET_CHK:
6876 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6877 if (target)
6878 return target;
6879 break;
6880
6881 case BUILT_IN_STRCPY_CHK:
6882 case BUILT_IN_STPCPY_CHK:
6883 case BUILT_IN_STRNCPY_CHK:
6884 case BUILT_IN_STPNCPY_CHK:
6885 case BUILT_IN_STRCAT_CHK:
6886 case BUILT_IN_STRNCAT_CHK:
6887 case BUILT_IN_SNPRINTF_CHK:
6888 case BUILT_IN_VSNPRINTF_CHK:
6889 maybe_emit_chk_warning (exp, fcode);
6890 break;
6891
6892 case BUILT_IN_SPRINTF_CHK:
6893 case BUILT_IN_VSPRINTF_CHK:
6894 maybe_emit_sprintf_chk_warning (exp, fcode);
6895 break;
6896
6897 case BUILT_IN_FREE:
6898 if (warn_free_nonheap_object)
6899 maybe_emit_free_warning (exp);
6900 break;
6901
6902 case BUILT_IN_THREAD_POINTER:
6903 return expand_builtin_thread_pointer (exp, target);
6904
6905 case BUILT_IN_SET_THREAD_POINTER:
6906 expand_builtin_set_thread_pointer (exp);
6907 return const0_rtx;
6908
6909 case BUILT_IN_CILK_DETACH:
6910 expand_builtin_cilk_detach (exp);
6911 return const0_rtx;
6912
6913 case BUILT_IN_CILK_POP_FRAME:
6914 expand_builtin_cilk_pop_frame (exp);
6915 return const0_rtx;
6916
6917 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6918 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6919 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6920 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6921 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6922 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6923 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6924 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6925 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6926 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6927 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6928 /* We allow user CHKP builtins if Pointer Bounds
6929 Checker is off. */
6930 if (!chkp_function_instrumented_p (current_function_decl))
6931 {
6932 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6933 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6934 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6935 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6936 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6937 return expand_normal (CALL_EXPR_ARG (exp, 0));
6938 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6939 return expand_normal (size_zero_node);
6940 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6941 return expand_normal (size_int (-1));
6942 else
6943 return const0_rtx;
6944 }
6945 /* FALLTHROUGH */
6946
6947 case BUILT_IN_CHKP_BNDMK:
6948 case BUILT_IN_CHKP_BNDSTX:
6949 case BUILT_IN_CHKP_BNDCL:
6950 case BUILT_IN_CHKP_BNDCU:
6951 case BUILT_IN_CHKP_BNDLDX:
6952 case BUILT_IN_CHKP_BNDRET:
6953 case BUILT_IN_CHKP_INTERSECT:
6954 case BUILT_IN_CHKP_NARROW:
6955 case BUILT_IN_CHKP_EXTRACT_LOWER:
6956 case BUILT_IN_CHKP_EXTRACT_UPPER:
6957 /* Software implementation of Pointer Bounds Checker is NYI.
6958 Target support is required. */
6959 error ("Your target platform does not support -fcheck-pointer-bounds");
6960 break;
6961
6962 case BUILT_IN_ACC_ON_DEVICE:
6963 /* Do library call, if we failed to expand the builtin when
6964 folding. */
6965 break;
6966
6967 default: /* just do library call, if unknown builtin */
6968 break;
6969 }
6970
6971 /* The switch statement above can drop through to cause the function
6972 to be called normally. */
6973 return expand_call (exp, target, ignore);
6974 }
6975
6976 /* Similar to expand_builtin but is used for instrumented calls. */
6977
6978 rtx
6979 expand_builtin_with_bounds (tree exp, rtx target,
6980 rtx subtarget ATTRIBUTE_UNUSED,
6981 machine_mode mode, int ignore)
6982 {
6983 tree fndecl = get_callee_fndecl (exp);
6984 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6985
6986 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6987
6988 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6989 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6990
6991 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6992 && fcode < END_CHKP_BUILTINS);
6993
6994 switch (fcode)
6995 {
6996 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6997 target = expand_builtin_memcpy_with_bounds (exp, target);
6998 if (target)
6999 return target;
7000 break;
7001
7002 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7003 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7004 if (target)
7005 return target;
7006 break;
7007
7008 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7009 target = expand_builtin_memset_with_bounds (exp, target, mode);
7010 if (target)
7011 return target;
7012 break;
7013
7014 default:
7015 break;
7016 }
7017
7018 /* The switch statement above can drop through to cause the function
7019 to be called normally. */
7020 return expand_call (exp, target, ignore);
7021 }
7022
7023 /* Determine whether a tree node represents a call to a built-in
7024 function. If the tree T is a call to a built-in function with
7025 the right number of arguments of the appropriate types, return
7026 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7027 Otherwise the return value is END_BUILTINS. */
7028
7029 enum built_in_function
7030 builtin_mathfn_code (const_tree t)
7031 {
7032 const_tree fndecl, arg, parmlist;
7033 const_tree argtype, parmtype;
7034 const_call_expr_arg_iterator iter;
7035
7036 if (TREE_CODE (t) != CALL_EXPR
7037 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7038 return END_BUILTINS;
7039
7040 fndecl = get_callee_fndecl (t);
7041 if (fndecl == NULL_TREE
7042 || TREE_CODE (fndecl) != FUNCTION_DECL
7043 || ! DECL_BUILT_IN (fndecl)
7044 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7045 return END_BUILTINS;
7046
7047 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7048 init_const_call_expr_arg_iterator (t, &iter);
7049 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7050 {
7051 /* If a function doesn't take a variable number of arguments,
7052 the last element in the list will have type `void'. */
7053 parmtype = TREE_VALUE (parmlist);
7054 if (VOID_TYPE_P (parmtype))
7055 {
7056 if (more_const_call_expr_args_p (&iter))
7057 return END_BUILTINS;
7058 return DECL_FUNCTION_CODE (fndecl);
7059 }
7060
7061 if (! more_const_call_expr_args_p (&iter))
7062 return END_BUILTINS;
7063
7064 arg = next_const_call_expr_arg (&iter);
7065 argtype = TREE_TYPE (arg);
7066
7067 if (SCALAR_FLOAT_TYPE_P (parmtype))
7068 {
7069 if (! SCALAR_FLOAT_TYPE_P (argtype))
7070 return END_BUILTINS;
7071 }
7072 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7073 {
7074 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7075 return END_BUILTINS;
7076 }
7077 else if (POINTER_TYPE_P (parmtype))
7078 {
7079 if (! POINTER_TYPE_P (argtype))
7080 return END_BUILTINS;
7081 }
7082 else if (INTEGRAL_TYPE_P (parmtype))
7083 {
7084 if (! INTEGRAL_TYPE_P (argtype))
7085 return END_BUILTINS;
7086 }
7087 else
7088 return END_BUILTINS;
7089 }
7090
7091 /* Variable-length argument list. */
7092 return DECL_FUNCTION_CODE (fndecl);
7093 }
7094
7095 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7096 evaluate to a constant. */
7097
7098 static tree
7099 fold_builtin_constant_p (tree arg)
7100 {
7101 /* We return 1 for a numeric type that's known to be a constant
7102 value at compile-time or for an aggregate type that's a
7103 literal constant. */
7104 STRIP_NOPS (arg);
7105
7106 /* If we know this is a constant, emit the constant of one. */
7107 if (CONSTANT_CLASS_P (arg)
7108 || (TREE_CODE (arg) == CONSTRUCTOR
7109 && TREE_CONSTANT (arg)))
7110 return integer_one_node;
7111 if (TREE_CODE (arg) == ADDR_EXPR)
7112 {
7113 tree op = TREE_OPERAND (arg, 0);
7114 if (TREE_CODE (op) == STRING_CST
7115 || (TREE_CODE (op) == ARRAY_REF
7116 && integer_zerop (TREE_OPERAND (op, 1))
7117 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7118 return integer_one_node;
7119 }
7120
7121 /* If this expression has side effects, show we don't know it to be a
7122 constant. Likewise if it's a pointer or aggregate type since in
7123 those case we only want literals, since those are only optimized
7124 when generating RTL, not later.
7125 And finally, if we are compiling an initializer, not code, we
7126 need to return a definite result now; there's not going to be any
7127 more optimization done. */
7128 if (TREE_SIDE_EFFECTS (arg)
7129 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7130 || POINTER_TYPE_P (TREE_TYPE (arg))
7131 || cfun == 0
7132 || folding_initializer
7133 || force_folding_builtin_constant_p)
7134 return integer_zero_node;
7135
7136 return NULL_TREE;
7137 }
7138
7139 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7140 return it as a truthvalue. */
7141
7142 static tree
7143 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7144 tree predictor)
7145 {
7146 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7147
7148 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7149 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7150 ret_type = TREE_TYPE (TREE_TYPE (fn));
7151 pred_type = TREE_VALUE (arg_types);
7152 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7153
7154 pred = fold_convert_loc (loc, pred_type, pred);
7155 expected = fold_convert_loc (loc, expected_type, expected);
7156 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7157 predictor);
7158
7159 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7160 build_int_cst (ret_type, 0));
7161 }
7162
7163 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7164 NULL_TREE if no simplification is possible. */
7165
7166 tree
7167 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7168 {
7169 tree inner, fndecl, inner_arg0;
7170 enum tree_code code;
7171
7172 /* Distribute the expected value over short-circuiting operators.
7173 See through the cast from truthvalue_type_node to long. */
7174 inner_arg0 = arg0;
7175 while (CONVERT_EXPR_P (inner_arg0)
7176 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7177 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7178 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7179
7180 /* If this is a builtin_expect within a builtin_expect keep the
7181 inner one. See through a comparison against a constant. It
7182 might have been added to create a thruthvalue. */
7183 inner = inner_arg0;
7184
7185 if (COMPARISON_CLASS_P (inner)
7186 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7187 inner = TREE_OPERAND (inner, 0);
7188
7189 if (TREE_CODE (inner) == CALL_EXPR
7190 && (fndecl = get_callee_fndecl (inner))
7191 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7192 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7193 return arg0;
7194
7195 inner = inner_arg0;
7196 code = TREE_CODE (inner);
7197 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7198 {
7199 tree op0 = TREE_OPERAND (inner, 0);
7200 tree op1 = TREE_OPERAND (inner, 1);
7201
7202 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7203 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7204 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7205
7206 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7207 }
7208
7209 /* If the argument isn't invariant then there's nothing else we can do. */
7210 if (!TREE_CONSTANT (inner_arg0))
7211 return NULL_TREE;
7212
7213 /* If we expect that a comparison against the argument will fold to
7214 a constant return the constant. In practice, this means a true
7215 constant or the address of a non-weak symbol. */
7216 inner = inner_arg0;
7217 STRIP_NOPS (inner);
7218 if (TREE_CODE (inner) == ADDR_EXPR)
7219 {
7220 do
7221 {
7222 inner = TREE_OPERAND (inner, 0);
7223 }
7224 while (TREE_CODE (inner) == COMPONENT_REF
7225 || TREE_CODE (inner) == ARRAY_REF);
7226 if ((TREE_CODE (inner) == VAR_DECL
7227 || TREE_CODE (inner) == FUNCTION_DECL)
7228 && DECL_WEAK (inner))
7229 return NULL_TREE;
7230 }
7231
7232 /* Otherwise, ARG0 already has the proper type for the return value. */
7233 return arg0;
7234 }
7235
7236 /* Fold a call to __builtin_classify_type with argument ARG. */
7237
7238 static tree
7239 fold_builtin_classify_type (tree arg)
7240 {
7241 if (arg == 0)
7242 return build_int_cst (integer_type_node, no_type_class);
7243
7244 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7245 }
7246
7247 /* Fold a call to __builtin_strlen with argument ARG. */
7248
7249 static tree
7250 fold_builtin_strlen (location_t loc, tree type, tree arg)
7251 {
7252 if (!validate_arg (arg, POINTER_TYPE))
7253 return NULL_TREE;
7254 else
7255 {
7256 tree len = c_strlen (arg, 0);
7257
7258 if (len)
7259 return fold_convert_loc (loc, type, len);
7260
7261 return NULL_TREE;
7262 }
7263 }
7264
7265 /* If ARG is a foldable constant real, use FN to round it to an integer
7266 value and try to represent the result in integer type ITYPE. Return
7267 the value on success, otherwise return null. */
7268
7269 static tree
7270 do_real_to_int_conversion (tree itype, tree arg,
7271 void (*fn) (REAL_VALUE_TYPE *, format_helper,
7272 const REAL_VALUE_TYPE *))
7273 {
7274 if (TREE_CODE (arg) != REAL_CST || TREE_OVERFLOW (arg))
7275 return NULL_TREE;
7276
7277 const REAL_VALUE_TYPE *value = TREE_REAL_CST_PTR (arg);
7278 if (!real_isfinite (value))
7279 return NULL_TREE;
7280
7281 tree ftype = TREE_TYPE (arg);
7282 REAL_VALUE_TYPE rounded;
7283 fn (&rounded, TYPE_MODE (ftype), value);
7284
7285 bool fail = false;
7286 wide_int ival = real_to_integer (&rounded, &fail, TYPE_PRECISION (itype));
7287 if (fail)
7288 return NULL_TREE;
7289
7290 return wide_int_to_tree (itype, ival);
7291 }
7292
7293
7294 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7295
7296 static tree
7297 fold_builtin_inf (location_t loc, tree type, int warn)
7298 {
7299 REAL_VALUE_TYPE real;
7300
7301 /* __builtin_inff is intended to be usable to define INFINITY on all
7302 targets. If an infinity is not available, INFINITY expands "to a
7303 positive constant of type float that overflows at translation
7304 time", footnote "In this case, using INFINITY will violate the
7305 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7306 Thus we pedwarn to ensure this constraint violation is
7307 diagnosed. */
7308 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7309 pedwarn (loc, 0, "target format does not support infinity");
7310
7311 real_inf (&real);
7312 return build_real (type, real);
7313 }
7314
7315 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7316
7317 static tree
7318 fold_builtin_nan (tree arg, tree type, int quiet)
7319 {
7320 REAL_VALUE_TYPE real;
7321 const char *str;
7322
7323 if (!validate_arg (arg, POINTER_TYPE))
7324 return NULL_TREE;
7325 str = c_getstr (arg);
7326 if (!str)
7327 return NULL_TREE;
7328
7329 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7330 return NULL_TREE;
7331
7332 return build_real (type, real);
7333 }
7334
7335 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7336 NULL_TREE if no simplification can be made. */
7337
7338 static tree
7339 fold_builtin_sincos (location_t loc,
7340 tree arg0, tree arg1, tree arg2)
7341 {
7342 tree type;
7343 tree res, fn, call;
7344
7345 if (!validate_arg (arg0, REAL_TYPE)
7346 || !validate_arg (arg1, POINTER_TYPE)
7347 || !validate_arg (arg2, POINTER_TYPE))
7348 return NULL_TREE;
7349
7350 type = TREE_TYPE (arg0);
7351
7352 /* Calculate the result when the argument is a constant. */
7353 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7354 return res;
7355
7356 /* Canonicalize sincos to cexpi. */
7357 if (!targetm.libc_has_function (function_c99_math_complex))
7358 return NULL_TREE;
7359 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7360 if (!fn)
7361 return NULL_TREE;
7362
7363 call = build_call_expr_loc (loc, fn, 1, arg0);
7364 call = builtin_save_expr (call);
7365
7366 return build2 (COMPOUND_EXPR, void_type_node,
7367 build2 (MODIFY_EXPR, void_type_node,
7368 build_fold_indirect_ref_loc (loc, arg1),
7369 build1 (IMAGPART_EXPR, type, call)),
7370 build2 (MODIFY_EXPR, void_type_node,
7371 build_fold_indirect_ref_loc (loc, arg2),
7372 build1 (REALPART_EXPR, type, call)));
7373 }
7374
7375 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7376 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7377 the argument to the call. Return NULL_TREE if no simplification can
7378 be made. */
7379
7380 static tree
7381 fold_builtin_bitop (tree fndecl, tree arg)
7382 {
7383 if (!validate_arg (arg, INTEGER_TYPE))
7384 return NULL_TREE;
7385
7386 /* Optimize for constant argument. */
7387 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7388 {
7389 tree type = TREE_TYPE (arg);
7390 int result;
7391
7392 switch (DECL_FUNCTION_CODE (fndecl))
7393 {
7394 CASE_INT_FN (BUILT_IN_FFS):
7395 result = wi::ffs (arg);
7396 break;
7397
7398 CASE_INT_FN (BUILT_IN_CLZ):
7399 if (wi::ne_p (arg, 0))
7400 result = wi::clz (arg);
7401 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7402 result = TYPE_PRECISION (type);
7403 break;
7404
7405 CASE_INT_FN (BUILT_IN_CTZ):
7406 if (wi::ne_p (arg, 0))
7407 result = wi::ctz (arg);
7408 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7409 result = TYPE_PRECISION (type);
7410 break;
7411
7412 CASE_INT_FN (BUILT_IN_CLRSB):
7413 result = wi::clrsb (arg);
7414 break;
7415
7416 CASE_INT_FN (BUILT_IN_POPCOUNT):
7417 result = wi::popcount (arg);
7418 break;
7419
7420 CASE_INT_FN (BUILT_IN_PARITY):
7421 result = wi::parity (arg);
7422 break;
7423
7424 default:
7425 gcc_unreachable ();
7426 }
7427
7428 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7429 }
7430
7431 return NULL_TREE;
7432 }
7433
7434 /* Fold function call to builtin_bswap and the short, long and long long
7435 variants. Return NULL_TREE if no simplification can be made. */
7436 static tree
7437 fold_builtin_bswap (tree fndecl, tree arg)
7438 {
7439 if (! validate_arg (arg, INTEGER_TYPE))
7440 return NULL_TREE;
7441
7442 /* Optimize constant value. */
7443 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7444 {
7445 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7446
7447 switch (DECL_FUNCTION_CODE (fndecl))
7448 {
7449 case BUILT_IN_BSWAP16:
7450 case BUILT_IN_BSWAP32:
7451 case BUILT_IN_BSWAP64:
7452 {
7453 signop sgn = TYPE_SIGN (type);
7454 tree result =
7455 wide_int_to_tree (type,
7456 wide_int::from (arg, TYPE_PRECISION (type),
7457 sgn).bswap ());
7458 return result;
7459 }
7460 default:
7461 gcc_unreachable ();
7462 }
7463 }
7464
7465 return NULL_TREE;
7466 }
7467
7468 /* Fold a builtin function call to pow, powf, or powl. Return
7469 NULL_TREE if no simplification can be made. */
7470 static tree
7471 fold_const_builtin_pow (tree arg0, tree arg1, tree type)
7472 {
7473 tree res;
7474
7475 if (!validate_arg (arg0, REAL_TYPE)
7476 || !validate_arg (arg1, REAL_TYPE))
7477 return NULL_TREE;
7478
7479 /* Calculate the result when the argument is a constant. */
7480 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7481 return res;
7482
7483 /* Check for an integer exponent. */
7484 if (TREE_CODE (arg0) == REAL_CST
7485 && !TREE_OVERFLOW (arg0)
7486 && TREE_CODE (arg1) == REAL_CST
7487 && !TREE_OVERFLOW (arg1))
7488 {
7489 REAL_VALUE_TYPE cint1;
7490 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (arg0);
7491 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (arg1);
7492 HOST_WIDE_INT n1 = real_to_integer (c1);
7493 real_from_integer (&cint1, VOIDmode, n1, SIGNED);
7494 /* Attempt to evaluate pow at compile-time, unless this should
7495 raise an exception. */
7496 if (real_identical (c1, &cint1)
7497 && (n1 > 0
7498 || (!flag_trapping_math && !flag_errno_math)
7499 || !real_equal (c0, &dconst0)))
7500 {
7501 REAL_VALUE_TYPE x;
7502 bool inexact = real_powi (&x, TYPE_MODE (type), c0, n1);
7503 if (flag_unsafe_math_optimizations || !inexact)
7504 return build_real (type, x);
7505 }
7506 }
7507
7508 return NULL_TREE;
7509 }
7510
7511 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7512 arguments to the call, and TYPE is its return type.
7513 Return NULL_TREE if no simplification can be made. */
7514
7515 static tree
7516 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7517 {
7518 if (!validate_arg (arg1, POINTER_TYPE)
7519 || !validate_arg (arg2, INTEGER_TYPE)
7520 || !validate_arg (len, INTEGER_TYPE))
7521 return NULL_TREE;
7522 else
7523 {
7524 const char *p1;
7525
7526 if (TREE_CODE (arg2) != INTEGER_CST
7527 || !tree_fits_uhwi_p (len))
7528 return NULL_TREE;
7529
7530 p1 = c_getstr (arg1);
7531 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7532 {
7533 char c;
7534 const char *r;
7535 tree tem;
7536
7537 if (target_char_cast (arg2, &c))
7538 return NULL_TREE;
7539
7540 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7541
7542 if (r == NULL)
7543 return build_int_cst (TREE_TYPE (arg1), 0);
7544
7545 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7546 return fold_convert_loc (loc, type, tem);
7547 }
7548 return NULL_TREE;
7549 }
7550 }
7551
7552 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7553 Return NULL_TREE if no simplification can be made. */
7554
7555 static tree
7556 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7557 {
7558 const char *p1, *p2;
7559
7560 if (!validate_arg (arg1, POINTER_TYPE)
7561 || !validate_arg (arg2, POINTER_TYPE)
7562 || !validate_arg (len, INTEGER_TYPE))
7563 return NULL_TREE;
7564
7565 /* If the LEN parameter is zero, return zero. */
7566 if (integer_zerop (len))
7567 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7568 arg1, arg2);
7569
7570 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7571 if (operand_equal_p (arg1, arg2, 0))
7572 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7573
7574 p1 = c_getstr (arg1);
7575 p2 = c_getstr (arg2);
7576
7577 /* If all arguments are constant, and the value of len is not greater
7578 than the lengths of arg1 and arg2, evaluate at compile-time. */
7579 if (tree_fits_uhwi_p (len) && p1 && p2
7580 && compare_tree_int (len, strlen (p1) + 1) <= 0
7581 && compare_tree_int (len, strlen (p2) + 1) <= 0)
7582 {
7583 const int r = memcmp (p1, p2, tree_to_uhwi (len));
7584
7585 if (r > 0)
7586 return integer_one_node;
7587 else if (r < 0)
7588 return integer_minus_one_node;
7589 else
7590 return integer_zero_node;
7591 }
7592
7593 /* If len parameter is one, return an expression corresponding to
7594 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7595 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7596 {
7597 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7598 tree cst_uchar_ptr_node
7599 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7600
7601 tree ind1
7602 = fold_convert_loc (loc, integer_type_node,
7603 build1 (INDIRECT_REF, cst_uchar_node,
7604 fold_convert_loc (loc,
7605 cst_uchar_ptr_node,
7606 arg1)));
7607 tree ind2
7608 = fold_convert_loc (loc, integer_type_node,
7609 build1 (INDIRECT_REF, cst_uchar_node,
7610 fold_convert_loc (loc,
7611 cst_uchar_ptr_node,
7612 arg2)));
7613 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7614 }
7615
7616 return NULL_TREE;
7617 }
7618
7619 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7620 Return NULL_TREE if no simplification can be made. */
7621
7622 static tree
7623 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7624 {
7625 const char *p1, *p2;
7626
7627 if (!validate_arg (arg1, POINTER_TYPE)
7628 || !validate_arg (arg2, POINTER_TYPE))
7629 return NULL_TREE;
7630
7631 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7632 if (operand_equal_p (arg1, arg2, 0))
7633 return integer_zero_node;
7634
7635 p1 = c_getstr (arg1);
7636 p2 = c_getstr (arg2);
7637
7638 if (p1 && p2)
7639 {
7640 const int i = strcmp (p1, p2);
7641 if (i < 0)
7642 return integer_minus_one_node;
7643 else if (i > 0)
7644 return integer_one_node;
7645 else
7646 return integer_zero_node;
7647 }
7648
7649 /* If the second arg is "", return *(const unsigned char*)arg1. */
7650 if (p2 && *p2 == '\0')
7651 {
7652 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7653 tree cst_uchar_ptr_node
7654 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7655
7656 return fold_convert_loc (loc, integer_type_node,
7657 build1 (INDIRECT_REF, cst_uchar_node,
7658 fold_convert_loc (loc,
7659 cst_uchar_ptr_node,
7660 arg1)));
7661 }
7662
7663 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7664 if (p1 && *p1 == '\0')
7665 {
7666 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7667 tree cst_uchar_ptr_node
7668 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7669
7670 tree temp
7671 = fold_convert_loc (loc, integer_type_node,
7672 build1 (INDIRECT_REF, cst_uchar_node,
7673 fold_convert_loc (loc,
7674 cst_uchar_ptr_node,
7675 arg2)));
7676 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7677 }
7678
7679 return NULL_TREE;
7680 }
7681
7682 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7683 Return NULL_TREE if no simplification can be made. */
7684
7685 static tree
7686 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7687 {
7688 const char *p1, *p2;
7689
7690 if (!validate_arg (arg1, POINTER_TYPE)
7691 || !validate_arg (arg2, POINTER_TYPE)
7692 || !validate_arg (len, INTEGER_TYPE))
7693 return NULL_TREE;
7694
7695 /* If the LEN parameter is zero, return zero. */
7696 if (integer_zerop (len))
7697 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7698 arg1, arg2);
7699
7700 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7701 if (operand_equal_p (arg1, arg2, 0))
7702 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7703
7704 p1 = c_getstr (arg1);
7705 p2 = c_getstr (arg2);
7706
7707 if (tree_fits_uhwi_p (len) && p1 && p2)
7708 {
7709 const int i = strncmp (p1, p2, tree_to_uhwi (len));
7710 if (i > 0)
7711 return integer_one_node;
7712 else if (i < 0)
7713 return integer_minus_one_node;
7714 else
7715 return integer_zero_node;
7716 }
7717
7718 /* If the second arg is "", and the length is greater than zero,
7719 return *(const unsigned char*)arg1. */
7720 if (p2 && *p2 == '\0'
7721 && TREE_CODE (len) == INTEGER_CST
7722 && tree_int_cst_sgn (len) == 1)
7723 {
7724 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7725 tree cst_uchar_ptr_node
7726 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7727
7728 return fold_convert_loc (loc, integer_type_node,
7729 build1 (INDIRECT_REF, cst_uchar_node,
7730 fold_convert_loc (loc,
7731 cst_uchar_ptr_node,
7732 arg1)));
7733 }
7734
7735 /* If the first arg is "", and the length is greater than zero,
7736 return -*(const unsigned char*)arg2. */
7737 if (p1 && *p1 == '\0'
7738 && TREE_CODE (len) == INTEGER_CST
7739 && tree_int_cst_sgn (len) == 1)
7740 {
7741 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7742 tree cst_uchar_ptr_node
7743 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7744
7745 tree temp = fold_convert_loc (loc, integer_type_node,
7746 build1 (INDIRECT_REF, cst_uchar_node,
7747 fold_convert_loc (loc,
7748 cst_uchar_ptr_node,
7749 arg2)));
7750 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7751 }
7752
7753 /* If len parameter is one, return an expression corresponding to
7754 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7755 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7756 {
7757 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7758 tree cst_uchar_ptr_node
7759 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7760
7761 tree ind1 = fold_convert_loc (loc, integer_type_node,
7762 build1 (INDIRECT_REF, cst_uchar_node,
7763 fold_convert_loc (loc,
7764 cst_uchar_ptr_node,
7765 arg1)));
7766 tree ind2 = fold_convert_loc (loc, integer_type_node,
7767 build1 (INDIRECT_REF, cst_uchar_node,
7768 fold_convert_loc (loc,
7769 cst_uchar_ptr_node,
7770 arg2)));
7771 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7772 }
7773
7774 return NULL_TREE;
7775 }
7776
7777 /* Fold a call to builtin isascii with argument ARG. */
7778
7779 static tree
7780 fold_builtin_isascii (location_t loc, tree arg)
7781 {
7782 if (!validate_arg (arg, INTEGER_TYPE))
7783 return NULL_TREE;
7784 else
7785 {
7786 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7787 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7788 build_int_cst (integer_type_node,
7789 ~ (unsigned HOST_WIDE_INT) 0x7f));
7790 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7791 arg, integer_zero_node);
7792 }
7793 }
7794
7795 /* Fold a call to builtin toascii with argument ARG. */
7796
7797 static tree
7798 fold_builtin_toascii (location_t loc, tree arg)
7799 {
7800 if (!validate_arg (arg, INTEGER_TYPE))
7801 return NULL_TREE;
7802
7803 /* Transform toascii(c) -> (c & 0x7f). */
7804 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7805 build_int_cst (integer_type_node, 0x7f));
7806 }
7807
7808 /* Fold a call to builtin isdigit with argument ARG. */
7809
7810 static tree
7811 fold_builtin_isdigit (location_t loc, tree arg)
7812 {
7813 if (!validate_arg (arg, INTEGER_TYPE))
7814 return NULL_TREE;
7815 else
7816 {
7817 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7818 /* According to the C standard, isdigit is unaffected by locale.
7819 However, it definitely is affected by the target character set. */
7820 unsigned HOST_WIDE_INT target_digit0
7821 = lang_hooks.to_target_charset ('0');
7822
7823 if (target_digit0 == 0)
7824 return NULL_TREE;
7825
7826 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7827 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7828 build_int_cst (unsigned_type_node, target_digit0));
7829 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7830 build_int_cst (unsigned_type_node, 9));
7831 }
7832 }
7833
7834 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7835
7836 static tree
7837 fold_builtin_fabs (location_t loc, tree arg, tree type)
7838 {
7839 if (!validate_arg (arg, REAL_TYPE))
7840 return NULL_TREE;
7841
7842 arg = fold_convert_loc (loc, type, arg);
7843 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7844 }
7845
7846 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7847
7848 static tree
7849 fold_builtin_abs (location_t loc, tree arg, tree type)
7850 {
7851 if (!validate_arg (arg, INTEGER_TYPE))
7852 return NULL_TREE;
7853
7854 arg = fold_convert_loc (loc, type, arg);
7855 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7856 }
7857
7858 /* Fold a fma operation with arguments ARG[012]. */
7859
7860 tree
7861 fold_fma (location_t loc ATTRIBUTE_UNUSED,
7862 tree type, tree arg0, tree arg1, tree arg2)
7863 {
7864 if (TREE_CODE (arg0) == REAL_CST
7865 && TREE_CODE (arg1) == REAL_CST
7866 && TREE_CODE (arg2) == REAL_CST)
7867 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
7868
7869 return NULL_TREE;
7870 }
7871
7872 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7873
7874 static tree
7875 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7876 {
7877 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7878 if (validate_arg (arg0, REAL_TYPE)
7879 && validate_arg (arg1, REAL_TYPE)
7880 && validate_arg (arg2, REAL_TYPE)
7881 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7882 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7883
7884 return NULL_TREE;
7885 }
7886
7887 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7888
7889 static tree
7890 fold_builtin_carg (location_t loc, tree arg, tree type)
7891 {
7892 if (validate_arg (arg, COMPLEX_TYPE)
7893 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7894 {
7895 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7896
7897 if (atan2_fn)
7898 {
7899 tree new_arg = builtin_save_expr (arg);
7900 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7901 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7902 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7903 }
7904 }
7905
7906 return NULL_TREE;
7907 }
7908
7909 /* Fold a call to builtin logb/ilogb. */
7910
7911 static tree
7912 fold_const_builtin_logb (location_t loc, tree arg, tree rettype)
7913 {
7914 if (! validate_arg (arg, REAL_TYPE))
7915 return NULL_TREE;
7916
7917 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7918 {
7919 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7920
7921 switch (value->cl)
7922 {
7923 case rvc_nan:
7924 case rvc_inf:
7925 /* If arg is Inf or NaN and we're logb, return it. */
7926 if (TREE_CODE (rettype) == REAL_TYPE)
7927 {
7928 /* For logb(-Inf) we have to return +Inf. */
7929 if (real_isinf (value) && real_isneg (value))
7930 {
7931 REAL_VALUE_TYPE tem;
7932 real_inf (&tem);
7933 return build_real (rettype, tem);
7934 }
7935 return fold_convert_loc (loc, rettype, arg);
7936 }
7937 /* Fall through... */
7938 case rvc_zero:
7939 /* Zero may set errno and/or raise an exception for logb, also
7940 for ilogb we don't know FP_ILOGB0. */
7941 return NULL_TREE;
7942 case rvc_normal:
7943 /* For normal numbers, proceed iff radix == 2. In GCC,
7944 normalized significands are in the range [0.5, 1.0). We
7945 want the exponent as if they were [1.0, 2.0) so get the
7946 exponent and subtract 1. */
7947 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
7948 return fold_convert_loc (loc, rettype,
7949 build_int_cst (integer_type_node,
7950 REAL_EXP (value)-1));
7951 break;
7952 }
7953 }
7954
7955 return NULL_TREE;
7956 }
7957
7958 /* Fold a call to builtin significand, if radix == 2. */
7959
7960 static tree
7961 fold_const_builtin_significand (location_t loc, tree arg, tree rettype)
7962 {
7963 if (! validate_arg (arg, REAL_TYPE))
7964 return NULL_TREE;
7965
7966 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7967 {
7968 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7969
7970 switch (value->cl)
7971 {
7972 case rvc_zero:
7973 case rvc_nan:
7974 case rvc_inf:
7975 /* If arg is +-0, +-Inf or +-NaN, then return it. */
7976 return fold_convert_loc (loc, rettype, arg);
7977 case rvc_normal:
7978 /* For normal numbers, proceed iff radix == 2. */
7979 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
7980 {
7981 REAL_VALUE_TYPE result = *value;
7982 /* In GCC, normalized significands are in the range [0.5,
7983 1.0). We want them to be [1.0, 2.0) so set the
7984 exponent to 1. */
7985 SET_REAL_EXP (&result, 1);
7986 return build_real (rettype, result);
7987 }
7988 break;
7989 }
7990 }
7991
7992 return NULL_TREE;
7993 }
7994
7995 /* Fold a call to builtin frexp, we can assume the base is 2. */
7996
7997 static tree
7998 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7999 {
8000 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8001 return NULL_TREE;
8002
8003 STRIP_NOPS (arg0);
8004
8005 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8006 return NULL_TREE;
8007
8008 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8009
8010 /* Proceed if a valid pointer type was passed in. */
8011 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8012 {
8013 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8014 tree frac, exp;
8015
8016 switch (value->cl)
8017 {
8018 case rvc_zero:
8019 /* For +-0, return (*exp = 0, +-0). */
8020 exp = integer_zero_node;
8021 frac = arg0;
8022 break;
8023 case rvc_nan:
8024 case rvc_inf:
8025 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8026 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8027 case rvc_normal:
8028 {
8029 /* Since the frexp function always expects base 2, and in
8030 GCC normalized significands are already in the range
8031 [0.5, 1.0), we have exactly what frexp wants. */
8032 REAL_VALUE_TYPE frac_rvt = *value;
8033 SET_REAL_EXP (&frac_rvt, 0);
8034 frac = build_real (rettype, frac_rvt);
8035 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8036 }
8037 break;
8038 default:
8039 gcc_unreachable ();
8040 }
8041
8042 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8043 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8044 TREE_SIDE_EFFECTS (arg1) = 1;
8045 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8046 }
8047
8048 return NULL_TREE;
8049 }
8050
8051 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8052 then we can assume the base is two. If it's false, then we have to
8053 check the mode of the TYPE parameter in certain cases. */
8054
8055 static tree
8056 fold_const_builtin_load_exponent (tree arg0, tree arg1,
8057 tree type, bool ldexp)
8058 {
8059 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8060 {
8061 /* If both arguments are constant, then try to evaluate it. */
8062 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8063 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8064 && tree_fits_shwi_p (arg1))
8065 {
8066 /* Bound the maximum adjustment to twice the range of the
8067 mode's valid exponents. Use abs to ensure the range is
8068 positive as a sanity check. */
8069 const long max_exp_adj = 2 *
8070 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8071 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8072
8073 /* Get the user-requested adjustment. */
8074 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8075
8076 /* The requested adjustment must be inside this range. This
8077 is a preliminary cap to avoid things like overflow, we
8078 may still fail to compute the result for other reasons. */
8079 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8080 {
8081 REAL_VALUE_TYPE initial_result;
8082
8083 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8084
8085 /* Ensure we didn't overflow. */
8086 if (! real_isinf (&initial_result))
8087 {
8088 const REAL_VALUE_TYPE trunc_result
8089 = real_value_truncate (TYPE_MODE (type), initial_result);
8090
8091 /* Only proceed if the target mode can hold the
8092 resulting value. */
8093 if (real_equal (&initial_result, &trunc_result))
8094 return build_real (type, trunc_result);
8095 }
8096 }
8097 }
8098 }
8099
8100 return NULL_TREE;
8101 }
8102
8103 /* Fold a call to builtin modf. */
8104
8105 static tree
8106 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8107 {
8108 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8109 return NULL_TREE;
8110
8111 STRIP_NOPS (arg0);
8112
8113 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8114 return NULL_TREE;
8115
8116 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8117
8118 /* Proceed if a valid pointer type was passed in. */
8119 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8120 {
8121 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8122 REAL_VALUE_TYPE trunc, frac;
8123
8124 switch (value->cl)
8125 {
8126 case rvc_nan:
8127 case rvc_zero:
8128 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8129 trunc = frac = *value;
8130 break;
8131 case rvc_inf:
8132 /* For +-Inf, return (*arg1 = arg0, +-0). */
8133 frac = dconst0;
8134 frac.sign = value->sign;
8135 trunc = *value;
8136 break;
8137 case rvc_normal:
8138 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8139 real_trunc (&trunc, VOIDmode, value);
8140 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8141 /* If the original number was negative and already
8142 integral, then the fractional part is -0.0. */
8143 if (value->sign && frac.cl == rvc_zero)
8144 frac.sign = value->sign;
8145 break;
8146 }
8147
8148 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8149 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8150 build_real (rettype, trunc));
8151 TREE_SIDE_EFFECTS (arg1) = 1;
8152 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8153 build_real (rettype, frac));
8154 }
8155
8156 return NULL_TREE;
8157 }
8158
8159 /* Given a location LOC, an interclass builtin function decl FNDECL
8160 and its single argument ARG, return an folded expression computing
8161 the same, or NULL_TREE if we either couldn't or didn't want to fold
8162 (the latter happen if there's an RTL instruction available). */
8163
8164 static tree
8165 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8166 {
8167 machine_mode mode;
8168
8169 if (!validate_arg (arg, REAL_TYPE))
8170 return NULL_TREE;
8171
8172 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8173 return NULL_TREE;
8174
8175 mode = TYPE_MODE (TREE_TYPE (arg));
8176
8177 /* If there is no optab, try generic code. */
8178 switch (DECL_FUNCTION_CODE (fndecl))
8179 {
8180 tree result;
8181
8182 CASE_FLT_FN (BUILT_IN_ISINF):
8183 {
8184 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8185 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8186 tree const type = TREE_TYPE (arg);
8187 REAL_VALUE_TYPE r;
8188 char buf[128];
8189
8190 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8191 real_from_string (&r, buf);
8192 result = build_call_expr (isgr_fn, 2,
8193 fold_build1_loc (loc, ABS_EXPR, type, arg),
8194 build_real (type, r));
8195 return result;
8196 }
8197 CASE_FLT_FN (BUILT_IN_FINITE):
8198 case BUILT_IN_ISFINITE:
8199 {
8200 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8201 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8202 tree const type = TREE_TYPE (arg);
8203 REAL_VALUE_TYPE r;
8204 char buf[128];
8205
8206 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8207 real_from_string (&r, buf);
8208 result = build_call_expr (isle_fn, 2,
8209 fold_build1_loc (loc, ABS_EXPR, type, arg),
8210 build_real (type, r));
8211 /*result = fold_build2_loc (loc, UNGT_EXPR,
8212 TREE_TYPE (TREE_TYPE (fndecl)),
8213 fold_build1_loc (loc, ABS_EXPR, type, arg),
8214 build_real (type, r));
8215 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8216 TREE_TYPE (TREE_TYPE (fndecl)),
8217 result);*/
8218 return result;
8219 }
8220 case BUILT_IN_ISNORMAL:
8221 {
8222 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8223 islessequal(fabs(x),DBL_MAX). */
8224 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8225 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8226 tree const type = TREE_TYPE (arg);
8227 REAL_VALUE_TYPE rmax, rmin;
8228 char buf[128];
8229
8230 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8231 real_from_string (&rmax, buf);
8232 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8233 real_from_string (&rmin, buf);
8234 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8235 result = build_call_expr (isle_fn, 2, arg,
8236 build_real (type, rmax));
8237 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
8238 build_call_expr (isge_fn, 2, arg,
8239 build_real (type, rmin)));
8240 return result;
8241 }
8242 default:
8243 break;
8244 }
8245
8246 return NULL_TREE;
8247 }
8248
8249 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8250 ARG is the argument for the call. */
8251
8252 static tree
8253 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8254 {
8255 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8256 REAL_VALUE_TYPE r;
8257
8258 if (!validate_arg (arg, REAL_TYPE))
8259 return NULL_TREE;
8260
8261 switch (builtin_index)
8262 {
8263 case BUILT_IN_ISINF:
8264 if (!HONOR_INFINITIES (arg))
8265 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8266
8267 if (TREE_CODE (arg) == REAL_CST)
8268 {
8269 r = TREE_REAL_CST (arg);
8270 if (real_isinf (&r))
8271 return real_compare (GT_EXPR, &r, &dconst0)
8272 ? integer_one_node : integer_minus_one_node;
8273 else
8274 return integer_zero_node;
8275 }
8276
8277 return NULL_TREE;
8278
8279 case BUILT_IN_ISINF_SIGN:
8280 {
8281 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8282 /* In a boolean context, GCC will fold the inner COND_EXPR to
8283 1. So e.g. "if (isinf_sign(x))" would be folded to just
8284 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8285 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
8286 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8287 tree tmp = NULL_TREE;
8288
8289 arg = builtin_save_expr (arg);
8290
8291 if (signbit_fn && isinf_fn)
8292 {
8293 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8294 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8295
8296 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8297 signbit_call, integer_zero_node);
8298 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8299 isinf_call, integer_zero_node);
8300
8301 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8302 integer_minus_one_node, integer_one_node);
8303 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8304 isinf_call, tmp,
8305 integer_zero_node);
8306 }
8307
8308 return tmp;
8309 }
8310
8311 case BUILT_IN_ISFINITE:
8312 if (!HONOR_NANS (arg)
8313 && !HONOR_INFINITIES (arg))
8314 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8315
8316 if (TREE_CODE (arg) == REAL_CST)
8317 {
8318 r = TREE_REAL_CST (arg);
8319 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
8320 }
8321
8322 return NULL_TREE;
8323
8324 case BUILT_IN_ISNAN:
8325 if (!HONOR_NANS (arg))
8326 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8327
8328 if (TREE_CODE (arg) == REAL_CST)
8329 {
8330 r = TREE_REAL_CST (arg);
8331 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8332 }
8333
8334 arg = builtin_save_expr (arg);
8335 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8336
8337 default:
8338 gcc_unreachable ();
8339 }
8340 }
8341
8342 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8343 This builtin will generate code to return the appropriate floating
8344 point classification depending on the value of the floating point
8345 number passed in. The possible return values must be supplied as
8346 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8347 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8348 one floating point argument which is "type generic". */
8349
8350 static tree
8351 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8352 {
8353 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8354 arg, type, res, tmp;
8355 machine_mode mode;
8356 REAL_VALUE_TYPE r;
8357 char buf[128];
8358
8359 /* Verify the required arguments in the original call. */
8360 if (nargs != 6
8361 || !validate_arg (args[0], INTEGER_TYPE)
8362 || !validate_arg (args[1], INTEGER_TYPE)
8363 || !validate_arg (args[2], INTEGER_TYPE)
8364 || !validate_arg (args[3], INTEGER_TYPE)
8365 || !validate_arg (args[4], INTEGER_TYPE)
8366 || !validate_arg (args[5], REAL_TYPE))
8367 return NULL_TREE;
8368
8369 fp_nan = args[0];
8370 fp_infinite = args[1];
8371 fp_normal = args[2];
8372 fp_subnormal = args[3];
8373 fp_zero = args[4];
8374 arg = args[5];
8375 type = TREE_TYPE (arg);
8376 mode = TYPE_MODE (type);
8377 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8378
8379 /* fpclassify(x) ->
8380 isnan(x) ? FP_NAN :
8381 (fabs(x) == Inf ? FP_INFINITE :
8382 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8383 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8384
8385 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8386 build_real (type, dconst0));
8387 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8388 tmp, fp_zero, fp_subnormal);
8389
8390 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8391 real_from_string (&r, buf);
8392 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8393 arg, build_real (type, r));
8394 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8395
8396 if (HONOR_INFINITIES (mode))
8397 {
8398 real_inf (&r);
8399 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8400 build_real (type, r));
8401 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8402 fp_infinite, res);
8403 }
8404
8405 if (HONOR_NANS (mode))
8406 {
8407 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8408 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8409 }
8410
8411 return res;
8412 }
8413
8414 /* Fold a call to an unordered comparison function such as
8415 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8416 being called and ARG0 and ARG1 are the arguments for the call.
8417 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8418 the opposite of the desired result. UNORDERED_CODE is used
8419 for modes that can hold NaNs and ORDERED_CODE is used for
8420 the rest. */
8421
8422 static tree
8423 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8424 enum tree_code unordered_code,
8425 enum tree_code ordered_code)
8426 {
8427 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8428 enum tree_code code;
8429 tree type0, type1;
8430 enum tree_code code0, code1;
8431 tree cmp_type = NULL_TREE;
8432
8433 type0 = TREE_TYPE (arg0);
8434 type1 = TREE_TYPE (arg1);
8435
8436 code0 = TREE_CODE (type0);
8437 code1 = TREE_CODE (type1);
8438
8439 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8440 /* Choose the wider of two real types. */
8441 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8442 ? type0 : type1;
8443 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8444 cmp_type = type0;
8445 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8446 cmp_type = type1;
8447
8448 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8449 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8450
8451 if (unordered_code == UNORDERED_EXPR)
8452 {
8453 if (!HONOR_NANS (arg0))
8454 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8455 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8456 }
8457
8458 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8459 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8460 fold_build2_loc (loc, code, type, arg0, arg1));
8461 }
8462
8463 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8464 arithmetics if it can never overflow, or into internal functions that
8465 return both result of arithmetics and overflowed boolean flag in
8466 a complex integer result, or some other check for overflow. */
8467
8468 static tree
8469 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8470 tree arg0, tree arg1, tree arg2)
8471 {
8472 enum internal_fn ifn = IFN_LAST;
8473 tree type = TREE_TYPE (TREE_TYPE (arg2));
8474 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8475 switch (fcode)
8476 {
8477 case BUILT_IN_ADD_OVERFLOW:
8478 case BUILT_IN_SADD_OVERFLOW:
8479 case BUILT_IN_SADDL_OVERFLOW:
8480 case BUILT_IN_SADDLL_OVERFLOW:
8481 case BUILT_IN_UADD_OVERFLOW:
8482 case BUILT_IN_UADDL_OVERFLOW:
8483 case BUILT_IN_UADDLL_OVERFLOW:
8484 ifn = IFN_ADD_OVERFLOW;
8485 break;
8486 case BUILT_IN_SUB_OVERFLOW:
8487 case BUILT_IN_SSUB_OVERFLOW:
8488 case BUILT_IN_SSUBL_OVERFLOW:
8489 case BUILT_IN_SSUBLL_OVERFLOW:
8490 case BUILT_IN_USUB_OVERFLOW:
8491 case BUILT_IN_USUBL_OVERFLOW:
8492 case BUILT_IN_USUBLL_OVERFLOW:
8493 ifn = IFN_SUB_OVERFLOW;
8494 break;
8495 case BUILT_IN_MUL_OVERFLOW:
8496 case BUILT_IN_SMUL_OVERFLOW:
8497 case BUILT_IN_SMULL_OVERFLOW:
8498 case BUILT_IN_SMULLL_OVERFLOW:
8499 case BUILT_IN_UMUL_OVERFLOW:
8500 case BUILT_IN_UMULL_OVERFLOW:
8501 case BUILT_IN_UMULLL_OVERFLOW:
8502 ifn = IFN_MUL_OVERFLOW;
8503 break;
8504 default:
8505 gcc_unreachable ();
8506 }
8507 tree ctype = build_complex_type (type);
8508 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8509 2, arg0, arg1);
8510 tree tgt = save_expr (call);
8511 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8512 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8513 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8514 tree store
8515 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8516 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8517 }
8518
8519 /* Fold a call to built-in function FNDECL with 0 arguments.
8520 This function returns NULL_TREE if no simplification was possible. */
8521
8522 static tree
8523 fold_builtin_0 (location_t loc, tree fndecl)
8524 {
8525 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8526 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8527 switch (fcode)
8528 {
8529 CASE_FLT_FN (BUILT_IN_INF):
8530 case BUILT_IN_INFD32:
8531 case BUILT_IN_INFD64:
8532 case BUILT_IN_INFD128:
8533 return fold_builtin_inf (loc, type, true);
8534
8535 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8536 return fold_builtin_inf (loc, type, false);
8537
8538 case BUILT_IN_CLASSIFY_TYPE:
8539 return fold_builtin_classify_type (NULL_TREE);
8540
8541 default:
8542 break;
8543 }
8544 return NULL_TREE;
8545 }
8546
8547 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8548 This function returns NULL_TREE if no simplification was possible. */
8549
8550 static tree
8551 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8552 {
8553 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8554 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8555 switch (fcode)
8556 {
8557 case BUILT_IN_CONSTANT_P:
8558 {
8559 tree val = fold_builtin_constant_p (arg0);
8560
8561 /* Gimplification will pull the CALL_EXPR for the builtin out of
8562 an if condition. When not optimizing, we'll not CSE it back.
8563 To avoid link error types of regressions, return false now. */
8564 if (!val && !optimize)
8565 val = integer_zero_node;
8566
8567 return val;
8568 }
8569
8570 case BUILT_IN_CLASSIFY_TYPE:
8571 return fold_builtin_classify_type (arg0);
8572
8573 case BUILT_IN_STRLEN:
8574 return fold_builtin_strlen (loc, type, arg0);
8575
8576 CASE_FLT_FN (BUILT_IN_FABS):
8577 case BUILT_IN_FABSD32:
8578 case BUILT_IN_FABSD64:
8579 case BUILT_IN_FABSD128:
8580 return fold_builtin_fabs (loc, arg0, type);
8581
8582 case BUILT_IN_ABS:
8583 case BUILT_IN_LABS:
8584 case BUILT_IN_LLABS:
8585 case BUILT_IN_IMAXABS:
8586 return fold_builtin_abs (loc, arg0, type);
8587
8588 CASE_FLT_FN (BUILT_IN_CONJ):
8589 if (validate_arg (arg0, COMPLEX_TYPE)
8590 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8591 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8592 break;
8593
8594 CASE_FLT_FN (BUILT_IN_CREAL):
8595 if (validate_arg (arg0, COMPLEX_TYPE)
8596 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8597 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8598 break;
8599
8600 CASE_FLT_FN (BUILT_IN_CIMAG):
8601 if (validate_arg (arg0, COMPLEX_TYPE)
8602 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8603 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8604 break;
8605
8606 CASE_FLT_FN (BUILT_IN_CCOS):
8607 if (validate_arg (arg0, COMPLEX_TYPE)
8608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8609 return do_mpc_arg1 (arg0, type, mpc_cos);
8610 break;
8611
8612 CASE_FLT_FN (BUILT_IN_CCOSH):
8613 if (validate_arg (arg0, COMPLEX_TYPE)
8614 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8615 return do_mpc_arg1 (arg0, type, mpc_cosh);
8616 break;
8617
8618 CASE_FLT_FN (BUILT_IN_CPROJ):
8619 if (TREE_CODE (arg0) == COMPLEX_CST
8620 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8621 {
8622 const REAL_VALUE_TYPE *real
8623 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
8624 const REAL_VALUE_TYPE *imag
8625 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
8626
8627 if (real_isinf (real) || real_isinf (imag))
8628 return build_complex_inf (type, imag->sign);
8629 else
8630 return arg0;
8631 }
8632 break;
8633
8634 CASE_FLT_FN (BUILT_IN_CSIN):
8635 if (validate_arg (arg0, COMPLEX_TYPE)
8636 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8637 return do_mpc_arg1 (arg0, type, mpc_sin);
8638 break;
8639
8640 CASE_FLT_FN (BUILT_IN_CSINH):
8641 if (validate_arg (arg0, COMPLEX_TYPE)
8642 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8643 return do_mpc_arg1 (arg0, type, mpc_sinh);
8644 break;
8645
8646 CASE_FLT_FN (BUILT_IN_CTAN):
8647 if (validate_arg (arg0, COMPLEX_TYPE)
8648 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8649 return do_mpc_arg1 (arg0, type, mpc_tan);
8650 break;
8651
8652 CASE_FLT_FN (BUILT_IN_CTANH):
8653 if (validate_arg (arg0, COMPLEX_TYPE)
8654 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8655 return do_mpc_arg1 (arg0, type, mpc_tanh);
8656 break;
8657
8658 CASE_FLT_FN (BUILT_IN_CLOG):
8659 if (validate_arg (arg0, COMPLEX_TYPE)
8660 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8661 return do_mpc_arg1 (arg0, type, mpc_log);
8662 break;
8663
8664 CASE_FLT_FN (BUILT_IN_CSQRT):
8665 if (validate_arg (arg0, COMPLEX_TYPE)
8666 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8667 return do_mpc_arg1 (arg0, type, mpc_sqrt);
8668 break;
8669
8670 CASE_FLT_FN (BUILT_IN_CASIN):
8671 if (validate_arg (arg0, COMPLEX_TYPE)
8672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8673 return do_mpc_arg1 (arg0, type, mpc_asin);
8674 break;
8675
8676 CASE_FLT_FN (BUILT_IN_CACOS):
8677 if (validate_arg (arg0, COMPLEX_TYPE)
8678 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8679 return do_mpc_arg1 (arg0, type, mpc_acos);
8680 break;
8681
8682 CASE_FLT_FN (BUILT_IN_CATAN):
8683 if (validate_arg (arg0, COMPLEX_TYPE)
8684 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8685 return do_mpc_arg1 (arg0, type, mpc_atan);
8686 break;
8687
8688 CASE_FLT_FN (BUILT_IN_CASINH):
8689 if (validate_arg (arg0, COMPLEX_TYPE)
8690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8691 return do_mpc_arg1 (arg0, type, mpc_asinh);
8692 break;
8693
8694 CASE_FLT_FN (BUILT_IN_CACOSH):
8695 if (validate_arg (arg0, COMPLEX_TYPE)
8696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8697 return do_mpc_arg1 (arg0, type, mpc_acosh);
8698 break;
8699
8700 CASE_FLT_FN (BUILT_IN_CATANH):
8701 if (validate_arg (arg0, COMPLEX_TYPE)
8702 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8703 return do_mpc_arg1 (arg0, type, mpc_atanh);
8704 break;
8705
8706 CASE_FLT_FN (BUILT_IN_CABS):
8707 if (TREE_CODE (arg0) == COMPLEX_CST
8708 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8709 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
8710 type, mpfr_hypot);
8711 break;
8712
8713 CASE_FLT_FN (BUILT_IN_CARG):
8714 return fold_builtin_carg (loc, arg0, type);
8715
8716 CASE_FLT_FN (BUILT_IN_SQRT):
8717 if (validate_arg (arg0, REAL_TYPE))
8718 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
8719 break;
8720
8721 CASE_FLT_FN (BUILT_IN_CBRT):
8722 if (validate_arg (arg0, REAL_TYPE))
8723 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
8724 break;
8725
8726 CASE_FLT_FN (BUILT_IN_ASIN):
8727 if (validate_arg (arg0, REAL_TYPE))
8728 return do_mpfr_arg1 (arg0, type, mpfr_asin,
8729 &dconstm1, &dconst1, true);
8730 break;
8731
8732 CASE_FLT_FN (BUILT_IN_ACOS):
8733 if (validate_arg (arg0, REAL_TYPE))
8734 return do_mpfr_arg1 (arg0, type, mpfr_acos,
8735 &dconstm1, &dconst1, true);
8736 break;
8737
8738 CASE_FLT_FN (BUILT_IN_ATAN):
8739 if (validate_arg (arg0, REAL_TYPE))
8740 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
8741 break;
8742
8743 CASE_FLT_FN (BUILT_IN_ASINH):
8744 if (validate_arg (arg0, REAL_TYPE))
8745 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
8746 break;
8747
8748 CASE_FLT_FN (BUILT_IN_ACOSH):
8749 if (validate_arg (arg0, REAL_TYPE))
8750 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
8751 &dconst1, NULL, true);
8752 break;
8753
8754 CASE_FLT_FN (BUILT_IN_ATANH):
8755 if (validate_arg (arg0, REAL_TYPE))
8756 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
8757 &dconstm1, &dconst1, false);
8758 break;
8759
8760 CASE_FLT_FN (BUILT_IN_SIN):
8761 if (validate_arg (arg0, REAL_TYPE))
8762 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
8763 break;
8764
8765 CASE_FLT_FN (BUILT_IN_COS):
8766 if (validate_arg (arg0, REAL_TYPE))
8767 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
8768 break;
8769
8770 CASE_FLT_FN (BUILT_IN_TAN):
8771 if (validate_arg (arg0, REAL_TYPE))
8772 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
8773 break;
8774
8775 CASE_FLT_FN (BUILT_IN_CEXP):
8776 if (validate_arg (arg0, COMPLEX_TYPE)
8777 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8778 return do_mpc_arg1 (arg0, type, mpc_exp);
8779 break;
8780
8781 CASE_FLT_FN (BUILT_IN_CEXPI):
8782 if (validate_arg (arg0, REAL_TYPE))
8783 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
8784 break;
8785
8786 CASE_FLT_FN (BUILT_IN_SINH):
8787 if (validate_arg (arg0, REAL_TYPE))
8788 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
8789 break;
8790
8791 CASE_FLT_FN (BUILT_IN_COSH):
8792 if (validate_arg (arg0, REAL_TYPE))
8793 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
8794 break;
8795
8796 CASE_FLT_FN (BUILT_IN_TANH):
8797 if (validate_arg (arg0, REAL_TYPE))
8798 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
8799 break;
8800
8801 CASE_FLT_FN (BUILT_IN_ERF):
8802 if (validate_arg (arg0, REAL_TYPE))
8803 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
8804 break;
8805
8806 CASE_FLT_FN (BUILT_IN_ERFC):
8807 if (validate_arg (arg0, REAL_TYPE))
8808 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
8809 break;
8810
8811 CASE_FLT_FN (BUILT_IN_TGAMMA):
8812 if (validate_arg (arg0, REAL_TYPE))
8813 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
8814 break;
8815
8816 CASE_FLT_FN (BUILT_IN_EXP):
8817 if (validate_arg (arg0, REAL_TYPE))
8818 return do_mpfr_arg1 (arg0, type, mpfr_exp, NULL, NULL, 0);
8819 break;
8820
8821 CASE_FLT_FN (BUILT_IN_EXP2):
8822 if (validate_arg (arg0, REAL_TYPE))
8823 return do_mpfr_arg1 (arg0, type, mpfr_exp2, NULL, NULL, 0);
8824 break;
8825
8826 CASE_FLT_FN (BUILT_IN_EXP10):
8827 CASE_FLT_FN (BUILT_IN_POW10):
8828 if (validate_arg (arg0, REAL_TYPE))
8829 return do_mpfr_arg1 (arg0, type, mpfr_exp10, NULL, NULL, 0);
8830 break;
8831
8832 CASE_FLT_FN (BUILT_IN_EXPM1):
8833 if (validate_arg (arg0, REAL_TYPE))
8834 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
8835 break;
8836
8837 CASE_FLT_FN (BUILT_IN_LOG):
8838 if (validate_arg (arg0, REAL_TYPE))
8839 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
8840 break;
8841
8842 CASE_FLT_FN (BUILT_IN_LOG2):
8843 if (validate_arg (arg0, REAL_TYPE))
8844 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
8845 break;
8846
8847 CASE_FLT_FN (BUILT_IN_LOG10):
8848 if (validate_arg (arg0, REAL_TYPE))
8849 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
8850 break;
8851
8852 CASE_FLT_FN (BUILT_IN_LOG1P):
8853 if (validate_arg (arg0, REAL_TYPE))
8854 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
8855 &dconstm1, NULL, false);
8856 break;
8857
8858 CASE_FLT_FN (BUILT_IN_J0):
8859 if (validate_arg (arg0, REAL_TYPE))
8860 return do_mpfr_arg1 (arg0, type, mpfr_j0,
8861 NULL, NULL, 0);
8862 break;
8863
8864 CASE_FLT_FN (BUILT_IN_J1):
8865 if (validate_arg (arg0, REAL_TYPE))
8866 return do_mpfr_arg1 (arg0, type, mpfr_j1,
8867 NULL, NULL, 0);
8868 break;
8869
8870 CASE_FLT_FN (BUILT_IN_Y0):
8871 if (validate_arg (arg0, REAL_TYPE))
8872 return do_mpfr_arg1 (arg0, type, mpfr_y0,
8873 &dconst0, NULL, false);
8874 break;
8875
8876 CASE_FLT_FN (BUILT_IN_Y1):
8877 if (validate_arg (arg0, REAL_TYPE))
8878 return do_mpfr_arg1 (arg0, type, mpfr_y1,
8879 &dconst0, NULL, false);
8880 break;
8881
8882 CASE_FLT_FN (BUILT_IN_NAN):
8883 case BUILT_IN_NAND32:
8884 case BUILT_IN_NAND64:
8885 case BUILT_IN_NAND128:
8886 return fold_builtin_nan (arg0, type, true);
8887
8888 CASE_FLT_FN (BUILT_IN_NANS):
8889 return fold_builtin_nan (arg0, type, false);
8890
8891 CASE_FLT_FN (BUILT_IN_FLOOR):
8892 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8893 {
8894 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8895 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8896 {
8897 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8898 REAL_VALUE_TYPE r;
8899 real_floor (&r, TYPE_MODE (type), &x);
8900 return build_real (type, r);
8901 }
8902 }
8903 break;
8904
8905 CASE_FLT_FN (BUILT_IN_CEIL):
8906 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8907 {
8908 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8909 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8910 {
8911 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8912 REAL_VALUE_TYPE r;
8913 real_ceil (&r, TYPE_MODE (type), &x);
8914 return build_real (type, r);
8915 }
8916 }
8917 break;
8918
8919 CASE_FLT_FN (BUILT_IN_TRUNC):
8920 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8921 {
8922 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8923 REAL_VALUE_TYPE r;
8924 real_trunc (&r, TYPE_MODE (type), &x);
8925 return build_real (type, r);
8926 }
8927 break;
8928
8929 CASE_FLT_FN (BUILT_IN_ROUND):
8930 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8931 {
8932 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8933 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8934 {
8935 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8936 REAL_VALUE_TYPE r;
8937 real_round (&r, TYPE_MODE (type), &x);
8938 return build_real (type, r);
8939 }
8940 }
8941 break;
8942
8943 CASE_FLT_FN (BUILT_IN_ICEIL):
8944 CASE_FLT_FN (BUILT_IN_LCEIL):
8945 CASE_FLT_FN (BUILT_IN_LLCEIL):
8946 return do_real_to_int_conversion (type, arg0, real_ceil);
8947
8948 CASE_FLT_FN (BUILT_IN_LFLOOR):
8949 CASE_FLT_FN (BUILT_IN_IFLOOR):
8950 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8951 return do_real_to_int_conversion (type, arg0, real_floor);
8952
8953 CASE_FLT_FN (BUILT_IN_IROUND):
8954 CASE_FLT_FN (BUILT_IN_LROUND):
8955 CASE_FLT_FN (BUILT_IN_LLROUND):
8956 return do_real_to_int_conversion (type, arg0, real_round);
8957
8958 CASE_FLT_FN (BUILT_IN_IRINT):
8959 CASE_FLT_FN (BUILT_IN_LRINT):
8960 CASE_FLT_FN (BUILT_IN_LLRINT):
8961 /* Not yet folded to a constant. */
8962 return NULL_TREE;
8963
8964 case BUILT_IN_BSWAP16:
8965 case BUILT_IN_BSWAP32:
8966 case BUILT_IN_BSWAP64:
8967 return fold_builtin_bswap (fndecl, arg0);
8968
8969 CASE_INT_FN (BUILT_IN_FFS):
8970 CASE_INT_FN (BUILT_IN_CLZ):
8971 CASE_INT_FN (BUILT_IN_CTZ):
8972 CASE_INT_FN (BUILT_IN_CLRSB):
8973 CASE_INT_FN (BUILT_IN_POPCOUNT):
8974 CASE_INT_FN (BUILT_IN_PARITY):
8975 return fold_builtin_bitop (fndecl, arg0);
8976
8977 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8978 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8979 return (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))
8980 ? build_one_cst (type)
8981 : build_zero_cst (type));
8982 break;
8983
8984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
8985 return fold_const_builtin_significand (loc, arg0, type);
8986
8987 CASE_FLT_FN (BUILT_IN_ILOGB):
8988 CASE_FLT_FN (BUILT_IN_LOGB):
8989 return fold_const_builtin_logb (loc, arg0, type);
8990
8991 case BUILT_IN_ISASCII:
8992 return fold_builtin_isascii (loc, arg0);
8993
8994 case BUILT_IN_TOASCII:
8995 return fold_builtin_toascii (loc, arg0);
8996
8997 case BUILT_IN_ISDIGIT:
8998 return fold_builtin_isdigit (loc, arg0);
8999
9000 CASE_FLT_FN (BUILT_IN_FINITE):
9001 case BUILT_IN_FINITED32:
9002 case BUILT_IN_FINITED64:
9003 case BUILT_IN_FINITED128:
9004 case BUILT_IN_ISFINITE:
9005 {
9006 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9007 if (ret)
9008 return ret;
9009 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9010 }
9011
9012 CASE_FLT_FN (BUILT_IN_ISINF):
9013 case BUILT_IN_ISINFD32:
9014 case BUILT_IN_ISINFD64:
9015 case BUILT_IN_ISINFD128:
9016 {
9017 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9018 if (ret)
9019 return ret;
9020 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9021 }
9022
9023 case BUILT_IN_ISNORMAL:
9024 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9025
9026 case BUILT_IN_ISINF_SIGN:
9027 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9028
9029 CASE_FLT_FN (BUILT_IN_ISNAN):
9030 case BUILT_IN_ISNAND32:
9031 case BUILT_IN_ISNAND64:
9032 case BUILT_IN_ISNAND128:
9033 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9034
9035 case BUILT_IN_FREE:
9036 if (integer_zerop (arg0))
9037 return build_empty_stmt (loc);
9038 break;
9039
9040 default:
9041 break;
9042 }
9043
9044 return NULL_TREE;
9045
9046 }
9047
9048 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9049 This function returns NULL_TREE if no simplification was possible. */
9050
9051 static tree
9052 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9053 {
9054 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9055 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9056
9057 switch (fcode)
9058 {
9059 CASE_FLT_FN (BUILT_IN_JN):
9060 if (validate_arg (arg0, INTEGER_TYPE)
9061 && validate_arg (arg1, REAL_TYPE))
9062 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9063 break;
9064
9065 CASE_FLT_FN (BUILT_IN_YN):
9066 if (validate_arg (arg0, INTEGER_TYPE)
9067 && validate_arg (arg1, REAL_TYPE))
9068 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9069 &dconst0, false);
9070 break;
9071
9072 CASE_FLT_FN (BUILT_IN_DREM):
9073 CASE_FLT_FN (BUILT_IN_REMAINDER):
9074 if (validate_arg (arg0, REAL_TYPE)
9075 && validate_arg (arg1, REAL_TYPE))
9076 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9077 break;
9078
9079 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9080 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9081 if (validate_arg (arg0, REAL_TYPE)
9082 && validate_arg (arg1, POINTER_TYPE))
9083 return do_mpfr_lgamma_r (arg0, arg1, type);
9084 break;
9085
9086 CASE_FLT_FN (BUILT_IN_ATAN2):
9087 if (validate_arg (arg0, REAL_TYPE)
9088 && validate_arg (arg1, REAL_TYPE))
9089 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9090 break;
9091
9092 CASE_FLT_FN (BUILT_IN_FDIM):
9093 if (validate_arg (arg0, REAL_TYPE)
9094 && validate_arg (arg1, REAL_TYPE))
9095 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9096 break;
9097
9098 CASE_FLT_FN (BUILT_IN_HYPOT):
9099 if (validate_arg (arg0, REAL_TYPE)
9100 && validate_arg (arg1, REAL_TYPE))
9101 return do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot);
9102 break;
9103
9104 CASE_FLT_FN (BUILT_IN_CPOW):
9105 if (validate_arg (arg0, COMPLEX_TYPE)
9106 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9107 && validate_arg (arg1, COMPLEX_TYPE)
9108 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9109 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9110 break;
9111
9112 CASE_FLT_FN (BUILT_IN_LDEXP):
9113 return fold_const_builtin_load_exponent (arg0, arg1, type,
9114 /*ldexp=*/true);
9115 CASE_FLT_FN (BUILT_IN_SCALBN):
9116 CASE_FLT_FN (BUILT_IN_SCALBLN):
9117 return fold_const_builtin_load_exponent (arg0, arg1, type,
9118 /*ldexp=*/false);
9119
9120 CASE_FLT_FN (BUILT_IN_FREXP):
9121 return fold_builtin_frexp (loc, arg0, arg1, type);
9122
9123 CASE_FLT_FN (BUILT_IN_MODF):
9124 return fold_builtin_modf (loc, arg0, arg1, type);
9125
9126 case BUILT_IN_STRSTR:
9127 return fold_builtin_strstr (loc, arg0, arg1, type);
9128
9129 case BUILT_IN_STRSPN:
9130 return fold_builtin_strspn (loc, arg0, arg1);
9131
9132 case BUILT_IN_STRCSPN:
9133 return fold_builtin_strcspn (loc, arg0, arg1);
9134
9135 case BUILT_IN_STRCHR:
9136 case BUILT_IN_INDEX:
9137 return fold_builtin_strchr (loc, arg0, arg1, type);
9138
9139 case BUILT_IN_STRRCHR:
9140 case BUILT_IN_RINDEX:
9141 return fold_builtin_strrchr (loc, arg0, arg1, type);
9142
9143 case BUILT_IN_STRCMP:
9144 return fold_builtin_strcmp (loc, arg0, arg1);
9145
9146 case BUILT_IN_STRPBRK:
9147 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9148
9149 case BUILT_IN_EXPECT:
9150 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9151
9152 CASE_FLT_FN (BUILT_IN_POW):
9153 return fold_const_builtin_pow (arg0, arg1, type);
9154
9155 CASE_FLT_FN (BUILT_IN_POWI):
9156 if (TREE_CODE (arg0) == REAL_CST
9157 && !TREE_OVERFLOW (arg0)
9158 && tree_fits_shwi_p (arg1))
9159 {
9160 HOST_WIDE_INT c = tree_to_shwi (arg1);
9161 REAL_VALUE_TYPE x;
9162 real_powi (&x, TYPE_MODE (type), TREE_REAL_CST_PTR (arg0), c);
9163 return build_real (type, x);
9164 }
9165 break;
9166
9167 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9168 if (TREE_CODE (arg0) == REAL_CST
9169 && TREE_CODE (arg1) == REAL_CST
9170 && !TREE_OVERFLOW (arg0)
9171 && !TREE_OVERFLOW (arg1))
9172 {
9173 REAL_VALUE_TYPE c1 = TREE_REAL_CST (arg0);
9174 real_copysign (&c1, TREE_REAL_CST_PTR (arg1));
9175 return build_real (type, c1);
9176 }
9177 break;
9178
9179 CASE_FLT_FN (BUILT_IN_FMIN):
9180 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9181 return do_mpfr_arg2 (arg0, arg1, type, mpfr_min);
9182 break;
9183
9184 CASE_FLT_FN (BUILT_IN_FMAX):
9185 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9186 return do_mpfr_arg2 (arg0, arg1, type, mpfr_max);
9187 break;
9188
9189 case BUILT_IN_ISGREATER:
9190 return fold_builtin_unordered_cmp (loc, fndecl,
9191 arg0, arg1, UNLE_EXPR, LE_EXPR);
9192 case BUILT_IN_ISGREATEREQUAL:
9193 return fold_builtin_unordered_cmp (loc, fndecl,
9194 arg0, arg1, UNLT_EXPR, LT_EXPR);
9195 case BUILT_IN_ISLESS:
9196 return fold_builtin_unordered_cmp (loc, fndecl,
9197 arg0, arg1, UNGE_EXPR, GE_EXPR);
9198 case BUILT_IN_ISLESSEQUAL:
9199 return fold_builtin_unordered_cmp (loc, fndecl,
9200 arg0, arg1, UNGT_EXPR, GT_EXPR);
9201 case BUILT_IN_ISLESSGREATER:
9202 return fold_builtin_unordered_cmp (loc, fndecl,
9203 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9204 case BUILT_IN_ISUNORDERED:
9205 return fold_builtin_unordered_cmp (loc, fndecl,
9206 arg0, arg1, UNORDERED_EXPR,
9207 NOP_EXPR);
9208
9209 /* We do the folding for va_start in the expander. */
9210 case BUILT_IN_VA_START:
9211 break;
9212
9213 case BUILT_IN_OBJECT_SIZE:
9214 return fold_builtin_object_size (arg0, arg1);
9215
9216 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9217 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9218
9219 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9220 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9221
9222 default:
9223 break;
9224 }
9225 return NULL_TREE;
9226 }
9227
9228 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9229 and ARG2.
9230 This function returns NULL_TREE if no simplification was possible. */
9231
9232 static tree
9233 fold_builtin_3 (location_t loc, tree fndecl,
9234 tree arg0, tree arg1, tree arg2)
9235 {
9236 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9237 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9238 switch (fcode)
9239 {
9240
9241 CASE_FLT_FN (BUILT_IN_SINCOS):
9242 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9243
9244 CASE_FLT_FN (BUILT_IN_FMA):
9245 if (tree tem = fold_fma (loc, type, arg0, arg1, arg2))
9246 return tem;
9247 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9248
9249 CASE_FLT_FN (BUILT_IN_REMQUO):
9250 if (validate_arg (arg0, REAL_TYPE)
9251 && validate_arg (arg1, REAL_TYPE)
9252 && validate_arg (arg2, POINTER_TYPE))
9253 return do_mpfr_remquo (arg0, arg1, arg2);
9254 break;
9255
9256 case BUILT_IN_STRNCMP:
9257 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
9258
9259 case BUILT_IN_MEMCHR:
9260 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
9261
9262 case BUILT_IN_BCMP:
9263 case BUILT_IN_MEMCMP:
9264 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9265
9266 case BUILT_IN_EXPECT:
9267 return fold_builtin_expect (loc, arg0, arg1, arg2);
9268
9269 case BUILT_IN_ADD_OVERFLOW:
9270 case BUILT_IN_SUB_OVERFLOW:
9271 case BUILT_IN_MUL_OVERFLOW:
9272 case BUILT_IN_SADD_OVERFLOW:
9273 case BUILT_IN_SADDL_OVERFLOW:
9274 case BUILT_IN_SADDLL_OVERFLOW:
9275 case BUILT_IN_SSUB_OVERFLOW:
9276 case BUILT_IN_SSUBL_OVERFLOW:
9277 case BUILT_IN_SSUBLL_OVERFLOW:
9278 case BUILT_IN_SMUL_OVERFLOW:
9279 case BUILT_IN_SMULL_OVERFLOW:
9280 case BUILT_IN_SMULLL_OVERFLOW:
9281 case BUILT_IN_UADD_OVERFLOW:
9282 case BUILT_IN_UADDL_OVERFLOW:
9283 case BUILT_IN_UADDLL_OVERFLOW:
9284 case BUILT_IN_USUB_OVERFLOW:
9285 case BUILT_IN_USUBL_OVERFLOW:
9286 case BUILT_IN_USUBLL_OVERFLOW:
9287 case BUILT_IN_UMUL_OVERFLOW:
9288 case BUILT_IN_UMULL_OVERFLOW:
9289 case BUILT_IN_UMULLL_OVERFLOW:
9290 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9291
9292 default:
9293 break;
9294 }
9295 return NULL_TREE;
9296 }
9297
9298 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9299 arguments. IGNORE is true if the result of the
9300 function call is ignored. This function returns NULL_TREE if no
9301 simplification was possible. */
9302
9303 tree
9304 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9305 {
9306 tree ret = NULL_TREE;
9307
9308 switch (nargs)
9309 {
9310 case 0:
9311 ret = fold_builtin_0 (loc, fndecl);
9312 break;
9313 case 1:
9314 ret = fold_builtin_1 (loc, fndecl, args[0]);
9315 break;
9316 case 2:
9317 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9318 break;
9319 case 3:
9320 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9321 break;
9322 default:
9323 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9324 break;
9325 }
9326 if (ret)
9327 {
9328 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9329 SET_EXPR_LOCATION (ret, loc);
9330 TREE_NO_WARNING (ret) = 1;
9331 return ret;
9332 }
9333 return NULL_TREE;
9334 }
9335
9336 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9337 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9338 of arguments in ARGS to be omitted. OLDNARGS is the number of
9339 elements in ARGS. */
9340
9341 static tree
9342 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9343 int skip, tree fndecl, int n, va_list newargs)
9344 {
9345 int nargs = oldnargs - skip + n;
9346 tree *buffer;
9347
9348 if (n > 0)
9349 {
9350 int i, j;
9351
9352 buffer = XALLOCAVEC (tree, nargs);
9353 for (i = 0; i < n; i++)
9354 buffer[i] = va_arg (newargs, tree);
9355 for (j = skip; j < oldnargs; j++, i++)
9356 buffer[i] = args[j];
9357 }
9358 else
9359 buffer = args + skip;
9360
9361 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9362 }
9363
9364 /* Return true if FNDECL shouldn't be folded right now.
9365 If a built-in function has an inline attribute always_inline
9366 wrapper, defer folding it after always_inline functions have
9367 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9368 might not be performed. */
9369
9370 bool
9371 avoid_folding_inline_builtin (tree fndecl)
9372 {
9373 return (DECL_DECLARED_INLINE_P (fndecl)
9374 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9375 && cfun
9376 && !cfun->always_inline_functions_inlined
9377 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9378 }
9379
9380 /* A wrapper function for builtin folding that prevents warnings for
9381 "statement without effect" and the like, caused by removing the
9382 call node earlier than the warning is generated. */
9383
9384 tree
9385 fold_call_expr (location_t loc, tree exp, bool ignore)
9386 {
9387 tree ret = NULL_TREE;
9388 tree fndecl = get_callee_fndecl (exp);
9389 if (fndecl
9390 && TREE_CODE (fndecl) == FUNCTION_DECL
9391 && DECL_BUILT_IN (fndecl)
9392 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9393 yet. Defer folding until we see all the arguments
9394 (after inlining). */
9395 && !CALL_EXPR_VA_ARG_PACK (exp))
9396 {
9397 int nargs = call_expr_nargs (exp);
9398
9399 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9400 instead last argument is __builtin_va_arg_pack (). Defer folding
9401 even in that case, until arguments are finalized. */
9402 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9403 {
9404 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9405 if (fndecl2
9406 && TREE_CODE (fndecl2) == FUNCTION_DECL
9407 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9408 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9409 return NULL_TREE;
9410 }
9411
9412 if (avoid_folding_inline_builtin (fndecl))
9413 return NULL_TREE;
9414
9415 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9416 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9417 CALL_EXPR_ARGP (exp), ignore);
9418 else
9419 {
9420 tree *args = CALL_EXPR_ARGP (exp);
9421 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9422 if (ret)
9423 return ret;
9424 }
9425 }
9426 return NULL_TREE;
9427 }
9428
9429 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9430 N arguments are passed in the array ARGARRAY. Return a folded
9431 expression or NULL_TREE if no simplification was possible. */
9432
9433 tree
9434 fold_builtin_call_array (location_t loc, tree,
9435 tree fn,
9436 int n,
9437 tree *argarray)
9438 {
9439 if (TREE_CODE (fn) != ADDR_EXPR)
9440 return NULL_TREE;
9441
9442 tree fndecl = TREE_OPERAND (fn, 0);
9443 if (TREE_CODE (fndecl) == FUNCTION_DECL
9444 && DECL_BUILT_IN (fndecl))
9445 {
9446 /* If last argument is __builtin_va_arg_pack (), arguments to this
9447 function are not finalized yet. Defer folding until they are. */
9448 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9449 {
9450 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9451 if (fndecl2
9452 && TREE_CODE (fndecl2) == FUNCTION_DECL
9453 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9454 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9455 return NULL_TREE;
9456 }
9457 if (avoid_folding_inline_builtin (fndecl))
9458 return NULL_TREE;
9459 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9460 return targetm.fold_builtin (fndecl, n, argarray, false);
9461 else
9462 return fold_builtin_n (loc, fndecl, argarray, n, false);
9463 }
9464
9465 return NULL_TREE;
9466 }
9467
9468 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9469 along with N new arguments specified as the "..." parameters. SKIP
9470 is the number of arguments in EXP to be omitted. This function is used
9471 to do varargs-to-varargs transformations. */
9472
9473 static tree
9474 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9475 {
9476 va_list ap;
9477 tree t;
9478
9479 va_start (ap, n);
9480 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9481 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9482 va_end (ap);
9483
9484 return t;
9485 }
9486
9487 /* Validate a single argument ARG against a tree code CODE representing
9488 a type. */
9489
9490 static bool
9491 validate_arg (const_tree arg, enum tree_code code)
9492 {
9493 if (!arg)
9494 return false;
9495 else if (code == POINTER_TYPE)
9496 return POINTER_TYPE_P (TREE_TYPE (arg));
9497 else if (code == INTEGER_TYPE)
9498 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9499 return code == TREE_CODE (TREE_TYPE (arg));
9500 }
9501
9502 /* This function validates the types of a function call argument list
9503 against a specified list of tree_codes. If the last specifier is a 0,
9504 that represents an ellipses, otherwise the last specifier must be a
9505 VOID_TYPE.
9506
9507 This is the GIMPLE version of validate_arglist. Eventually we want to
9508 completely convert builtins.c to work from GIMPLEs and the tree based
9509 validate_arglist will then be removed. */
9510
9511 bool
9512 validate_gimple_arglist (const gcall *call, ...)
9513 {
9514 enum tree_code code;
9515 bool res = 0;
9516 va_list ap;
9517 const_tree arg;
9518 size_t i;
9519
9520 va_start (ap, call);
9521 i = 0;
9522
9523 do
9524 {
9525 code = (enum tree_code) va_arg (ap, int);
9526 switch (code)
9527 {
9528 case 0:
9529 /* This signifies an ellipses, any further arguments are all ok. */
9530 res = true;
9531 goto end;
9532 case VOID_TYPE:
9533 /* This signifies an endlink, if no arguments remain, return
9534 true, otherwise return false. */
9535 res = (i == gimple_call_num_args (call));
9536 goto end;
9537 default:
9538 /* If no parameters remain or the parameter's code does not
9539 match the specified code, return false. Otherwise continue
9540 checking any remaining arguments. */
9541 arg = gimple_call_arg (call, i++);
9542 if (!validate_arg (arg, code))
9543 goto end;
9544 break;
9545 }
9546 }
9547 while (1);
9548
9549 /* We need gotos here since we can only have one VA_CLOSE in a
9550 function. */
9551 end: ;
9552 va_end (ap);
9553
9554 return res;
9555 }
9556
9557 /* Default target-specific builtin expander that does nothing. */
9558
9559 rtx
9560 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9561 rtx target ATTRIBUTE_UNUSED,
9562 rtx subtarget ATTRIBUTE_UNUSED,
9563 machine_mode mode ATTRIBUTE_UNUSED,
9564 int ignore ATTRIBUTE_UNUSED)
9565 {
9566 return NULL_RTX;
9567 }
9568
9569 /* Returns true is EXP represents data that would potentially reside
9570 in a readonly section. */
9571
9572 bool
9573 readonly_data_expr (tree exp)
9574 {
9575 STRIP_NOPS (exp);
9576
9577 if (TREE_CODE (exp) != ADDR_EXPR)
9578 return false;
9579
9580 exp = get_base_address (TREE_OPERAND (exp, 0));
9581 if (!exp)
9582 return false;
9583
9584 /* Make sure we call decl_readonly_section only for trees it
9585 can handle (since it returns true for everything it doesn't
9586 understand). */
9587 if (TREE_CODE (exp) == STRING_CST
9588 || TREE_CODE (exp) == CONSTRUCTOR
9589 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
9590 return decl_readonly_section (exp, 0);
9591 else
9592 return false;
9593 }
9594
9595 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
9596 to the call, and TYPE is its return type.
9597
9598 Return NULL_TREE if no simplification was possible, otherwise return the
9599 simplified form of the call as a tree.
9600
9601 The simplified form may be a constant or other expression which
9602 computes the same value, but in a more efficient manner (including
9603 calls to other builtin functions).
9604
9605 The call may contain arguments which need to be evaluated, but
9606 which are not useful to determine the result of the call. In
9607 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9608 COMPOUND_EXPR will be an argument which must be evaluated.
9609 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9610 COMPOUND_EXPR in the chain will contain the tree for the simplified
9611 form of the builtin function call. */
9612
9613 static tree
9614 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
9615 {
9616 if (!validate_arg (s1, POINTER_TYPE)
9617 || !validate_arg (s2, POINTER_TYPE))
9618 return NULL_TREE;
9619 else
9620 {
9621 tree fn;
9622 const char *p1, *p2;
9623
9624 p2 = c_getstr (s2);
9625 if (p2 == NULL)
9626 return NULL_TREE;
9627
9628 p1 = c_getstr (s1);
9629 if (p1 != NULL)
9630 {
9631 const char *r = strstr (p1, p2);
9632 tree tem;
9633
9634 if (r == NULL)
9635 return build_int_cst (TREE_TYPE (s1), 0);
9636
9637 /* Return an offset into the constant string argument. */
9638 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9639 return fold_convert_loc (loc, type, tem);
9640 }
9641
9642 /* The argument is const char *, and the result is char *, so we need
9643 a type conversion here to avoid a warning. */
9644 if (p2[0] == '\0')
9645 return fold_convert_loc (loc, type, s1);
9646
9647 if (p2[1] != '\0')
9648 return NULL_TREE;
9649
9650 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9651 if (!fn)
9652 return NULL_TREE;
9653
9654 /* New argument list transforming strstr(s1, s2) to
9655 strchr(s1, s2[0]). */
9656 return build_call_expr_loc (loc, fn, 2, s1,
9657 build_int_cst (integer_type_node, p2[0]));
9658 }
9659 }
9660
9661 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
9662 the call, and TYPE is its return type.
9663
9664 Return NULL_TREE if no simplification was possible, otherwise return the
9665 simplified form of the call as a tree.
9666
9667 The simplified form may be a constant or other expression which
9668 computes the same value, but in a more efficient manner (including
9669 calls to other builtin functions).
9670
9671 The call may contain arguments which need to be evaluated, but
9672 which are not useful to determine the result of the call. In
9673 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9674 COMPOUND_EXPR will be an argument which must be evaluated.
9675 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9676 COMPOUND_EXPR in the chain will contain the tree for the simplified
9677 form of the builtin function call. */
9678
9679 static tree
9680 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
9681 {
9682 if (!validate_arg (s1, POINTER_TYPE)
9683 || !validate_arg (s2, INTEGER_TYPE))
9684 return NULL_TREE;
9685 else
9686 {
9687 const char *p1;
9688
9689 if (TREE_CODE (s2) != INTEGER_CST)
9690 return NULL_TREE;
9691
9692 p1 = c_getstr (s1);
9693 if (p1 != NULL)
9694 {
9695 char c;
9696 const char *r;
9697 tree tem;
9698
9699 if (target_char_cast (s2, &c))
9700 return NULL_TREE;
9701
9702 r = strchr (p1, c);
9703
9704 if (r == NULL)
9705 return build_int_cst (TREE_TYPE (s1), 0);
9706
9707 /* Return an offset into the constant string argument. */
9708 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9709 return fold_convert_loc (loc, type, tem);
9710 }
9711 return NULL_TREE;
9712 }
9713 }
9714
9715 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
9716 the call, and TYPE is its return type.
9717
9718 Return NULL_TREE if no simplification was possible, otherwise return the
9719 simplified form of the call as a tree.
9720
9721 The simplified form may be a constant or other expression which
9722 computes the same value, but in a more efficient manner (including
9723 calls to other builtin functions).
9724
9725 The call may contain arguments which need to be evaluated, but
9726 which are not useful to determine the result of the call. In
9727 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9728 COMPOUND_EXPR will be an argument which must be evaluated.
9729 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9730 COMPOUND_EXPR in the chain will contain the tree for the simplified
9731 form of the builtin function call. */
9732
9733 static tree
9734 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
9735 {
9736 if (!validate_arg (s1, POINTER_TYPE)
9737 || !validate_arg (s2, INTEGER_TYPE))
9738 return NULL_TREE;
9739 else
9740 {
9741 tree fn;
9742 const char *p1;
9743
9744 if (TREE_CODE (s2) != INTEGER_CST)
9745 return NULL_TREE;
9746
9747 p1 = c_getstr (s1);
9748 if (p1 != NULL)
9749 {
9750 char c;
9751 const char *r;
9752 tree tem;
9753
9754 if (target_char_cast (s2, &c))
9755 return NULL_TREE;
9756
9757 r = strrchr (p1, c);
9758
9759 if (r == NULL)
9760 return build_int_cst (TREE_TYPE (s1), 0);
9761
9762 /* Return an offset into the constant string argument. */
9763 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9764 return fold_convert_loc (loc, type, tem);
9765 }
9766
9767 if (! integer_zerop (s2))
9768 return NULL_TREE;
9769
9770 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9771 if (!fn)
9772 return NULL_TREE;
9773
9774 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9775 return build_call_expr_loc (loc, fn, 2, s1, s2);
9776 }
9777 }
9778
9779 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9780 to the call, and TYPE is its return type.
9781
9782 Return NULL_TREE if no simplification was possible, otherwise return the
9783 simplified form of the call as a tree.
9784
9785 The simplified form may be a constant or other expression which
9786 computes the same value, but in a more efficient manner (including
9787 calls to other builtin functions).
9788
9789 The call may contain arguments which need to be evaluated, but
9790 which are not useful to determine the result of the call. In
9791 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9792 COMPOUND_EXPR will be an argument which must be evaluated.
9793 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9794 COMPOUND_EXPR in the chain will contain the tree for the simplified
9795 form of the builtin function call. */
9796
9797 static tree
9798 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9799 {
9800 if (!validate_arg (s1, POINTER_TYPE)
9801 || !validate_arg (s2, POINTER_TYPE))
9802 return NULL_TREE;
9803 else
9804 {
9805 tree fn;
9806 const char *p1, *p2;
9807
9808 p2 = c_getstr (s2);
9809 if (p2 == NULL)
9810 return NULL_TREE;
9811
9812 p1 = c_getstr (s1);
9813 if (p1 != NULL)
9814 {
9815 const char *r = strpbrk (p1, p2);
9816 tree tem;
9817
9818 if (r == NULL)
9819 return build_int_cst (TREE_TYPE (s1), 0);
9820
9821 /* Return an offset into the constant string argument. */
9822 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9823 return fold_convert_loc (loc, type, tem);
9824 }
9825
9826 if (p2[0] == '\0')
9827 /* strpbrk(x, "") == NULL.
9828 Evaluate and ignore s1 in case it had side-effects. */
9829 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9830
9831 if (p2[1] != '\0')
9832 return NULL_TREE; /* Really call strpbrk. */
9833
9834 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9835 if (!fn)
9836 return NULL_TREE;
9837
9838 /* New argument list transforming strpbrk(s1, s2) to
9839 strchr(s1, s2[0]). */
9840 return build_call_expr_loc (loc, fn, 2, s1,
9841 build_int_cst (integer_type_node, p2[0]));
9842 }
9843 }
9844
9845 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9846 to the call.
9847
9848 Return NULL_TREE if no simplification was possible, otherwise return the
9849 simplified form of the call as a tree.
9850
9851 The simplified form may be a constant or other expression which
9852 computes the same value, but in a more efficient manner (including
9853 calls to other builtin functions).
9854
9855 The call may contain arguments which need to be evaluated, but
9856 which are not useful to determine the result of the call. In
9857 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9858 COMPOUND_EXPR will be an argument which must be evaluated.
9859 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9860 COMPOUND_EXPR in the chain will contain the tree for the simplified
9861 form of the builtin function call. */
9862
9863 static tree
9864 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9865 {
9866 if (!validate_arg (s1, POINTER_TYPE)
9867 || !validate_arg (s2, POINTER_TYPE))
9868 return NULL_TREE;
9869 else
9870 {
9871 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9872
9873 /* If both arguments are constants, evaluate at compile-time. */
9874 if (p1 && p2)
9875 {
9876 const size_t r = strspn (p1, p2);
9877 return build_int_cst (size_type_node, r);
9878 }
9879
9880 /* If either argument is "", return NULL_TREE. */
9881 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9882 /* Evaluate and ignore both arguments in case either one has
9883 side-effects. */
9884 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9885 s1, s2);
9886 return NULL_TREE;
9887 }
9888 }
9889
9890 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9891 to the call.
9892
9893 Return NULL_TREE if no simplification was possible, otherwise return the
9894 simplified form of the call as a tree.
9895
9896 The simplified form may be a constant or other expression which
9897 computes the same value, but in a more efficient manner (including
9898 calls to other builtin functions).
9899
9900 The call may contain arguments which need to be evaluated, but
9901 which are not useful to determine the result of the call. In
9902 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9903 COMPOUND_EXPR will be an argument which must be evaluated.
9904 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9905 COMPOUND_EXPR in the chain will contain the tree for the simplified
9906 form of the builtin function call. */
9907
9908 static tree
9909 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9910 {
9911 if (!validate_arg (s1, POINTER_TYPE)
9912 || !validate_arg (s2, POINTER_TYPE))
9913 return NULL_TREE;
9914 else
9915 {
9916 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9917
9918 /* If both arguments are constants, evaluate at compile-time. */
9919 if (p1 && p2)
9920 {
9921 const size_t r = strcspn (p1, p2);
9922 return build_int_cst (size_type_node, r);
9923 }
9924
9925 /* If the first argument is "", return NULL_TREE. */
9926 if (p1 && *p1 == '\0')
9927 {
9928 /* Evaluate and ignore argument s2 in case it has
9929 side-effects. */
9930 return omit_one_operand_loc (loc, size_type_node,
9931 size_zero_node, s2);
9932 }
9933
9934 /* If the second argument is "", return __builtin_strlen(s1). */
9935 if (p2 && *p2 == '\0')
9936 {
9937 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9938
9939 /* If the replacement _DECL isn't initialized, don't do the
9940 transformation. */
9941 if (!fn)
9942 return NULL_TREE;
9943
9944 return build_call_expr_loc (loc, fn, 1, s1);
9945 }
9946 return NULL_TREE;
9947 }
9948 }
9949
9950 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9951 produced. False otherwise. This is done so that we don't output the error
9952 or warning twice or three times. */
9953
9954 bool
9955 fold_builtin_next_arg (tree exp, bool va_start_p)
9956 {
9957 tree fntype = TREE_TYPE (current_function_decl);
9958 int nargs = call_expr_nargs (exp);
9959 tree arg;
9960 /* There is good chance the current input_location points inside the
9961 definition of the va_start macro (perhaps on the token for
9962 builtin) in a system header, so warnings will not be emitted.
9963 Use the location in real source code. */
9964 source_location current_location =
9965 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9966 NULL);
9967
9968 if (!stdarg_p (fntype))
9969 {
9970 error ("%<va_start%> used in function with fixed args");
9971 return true;
9972 }
9973
9974 if (va_start_p)
9975 {
9976 if (va_start_p && (nargs != 2))
9977 {
9978 error ("wrong number of arguments to function %<va_start%>");
9979 return true;
9980 }
9981 arg = CALL_EXPR_ARG (exp, 1);
9982 }
9983 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9984 when we checked the arguments and if needed issued a warning. */
9985 else
9986 {
9987 if (nargs == 0)
9988 {
9989 /* Evidently an out of date version of <stdarg.h>; can't validate
9990 va_start's second argument, but can still work as intended. */
9991 warning_at (current_location,
9992 OPT_Wvarargs,
9993 "%<__builtin_next_arg%> called without an argument");
9994 return true;
9995 }
9996 else if (nargs > 1)
9997 {
9998 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9999 return true;
10000 }
10001 arg = CALL_EXPR_ARG (exp, 0);
10002 }
10003
10004 if (TREE_CODE (arg) == SSA_NAME)
10005 arg = SSA_NAME_VAR (arg);
10006
10007 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10008 or __builtin_next_arg (0) the first time we see it, after checking
10009 the arguments and if needed issuing a warning. */
10010 if (!integer_zerop (arg))
10011 {
10012 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10013
10014 /* Strip off all nops for the sake of the comparison. This
10015 is not quite the same as STRIP_NOPS. It does more.
10016 We must also strip off INDIRECT_EXPR for C++ reference
10017 parameters. */
10018 while (CONVERT_EXPR_P (arg)
10019 || TREE_CODE (arg) == INDIRECT_REF)
10020 arg = TREE_OPERAND (arg, 0);
10021 if (arg != last_parm)
10022 {
10023 /* FIXME: Sometimes with the tree optimizers we can get the
10024 not the last argument even though the user used the last
10025 argument. We just warn and set the arg to be the last
10026 argument so that we will get wrong-code because of
10027 it. */
10028 warning_at (current_location,
10029 OPT_Wvarargs,
10030 "second parameter of %<va_start%> not last named argument");
10031 }
10032
10033 /* Undefined by C99 7.15.1.4p4 (va_start):
10034 "If the parameter parmN is declared with the register storage
10035 class, with a function or array type, or with a type that is
10036 not compatible with the type that results after application of
10037 the default argument promotions, the behavior is undefined."
10038 */
10039 else if (DECL_REGISTER (arg))
10040 {
10041 warning_at (current_location,
10042 OPT_Wvarargs,
10043 "undefined behaviour when second parameter of "
10044 "%<va_start%> is declared with %<register%> storage");
10045 }
10046
10047 /* We want to verify the second parameter just once before the tree
10048 optimizers are run and then avoid keeping it in the tree,
10049 as otherwise we could warn even for correct code like:
10050 void foo (int i, ...)
10051 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10052 if (va_start_p)
10053 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10054 else
10055 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10056 }
10057 return false;
10058 }
10059
10060
10061 /* Expand a call EXP to __builtin_object_size. */
10062
10063 static rtx
10064 expand_builtin_object_size (tree exp)
10065 {
10066 tree ost;
10067 int object_size_type;
10068 tree fndecl = get_callee_fndecl (exp);
10069
10070 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10071 {
10072 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10073 exp, fndecl);
10074 expand_builtin_trap ();
10075 return const0_rtx;
10076 }
10077
10078 ost = CALL_EXPR_ARG (exp, 1);
10079 STRIP_NOPS (ost);
10080
10081 if (TREE_CODE (ost) != INTEGER_CST
10082 || tree_int_cst_sgn (ost) < 0
10083 || compare_tree_int (ost, 3) > 0)
10084 {
10085 error ("%Klast argument of %D is not integer constant between 0 and 3",
10086 exp, fndecl);
10087 expand_builtin_trap ();
10088 return const0_rtx;
10089 }
10090
10091 object_size_type = tree_to_shwi (ost);
10092
10093 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10094 }
10095
10096 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10097 FCODE is the BUILT_IN_* to use.
10098 Return NULL_RTX if we failed; the caller should emit a normal call,
10099 otherwise try to get the result in TARGET, if convenient (and in
10100 mode MODE if that's convenient). */
10101
10102 static rtx
10103 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10104 enum built_in_function fcode)
10105 {
10106 tree dest, src, len, size;
10107
10108 if (!validate_arglist (exp,
10109 POINTER_TYPE,
10110 fcode == BUILT_IN_MEMSET_CHK
10111 ? INTEGER_TYPE : POINTER_TYPE,
10112 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10113 return NULL_RTX;
10114
10115 dest = CALL_EXPR_ARG (exp, 0);
10116 src = CALL_EXPR_ARG (exp, 1);
10117 len = CALL_EXPR_ARG (exp, 2);
10118 size = CALL_EXPR_ARG (exp, 3);
10119
10120 if (! tree_fits_uhwi_p (size))
10121 return NULL_RTX;
10122
10123 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10124 {
10125 tree fn;
10126
10127 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10128 {
10129 warning_at (tree_nonartificial_location (exp),
10130 0, "%Kcall to %D will always overflow destination buffer",
10131 exp, get_callee_fndecl (exp));
10132 return NULL_RTX;
10133 }
10134
10135 fn = NULL_TREE;
10136 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10137 mem{cpy,pcpy,move,set} is available. */
10138 switch (fcode)
10139 {
10140 case BUILT_IN_MEMCPY_CHK:
10141 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10142 break;
10143 case BUILT_IN_MEMPCPY_CHK:
10144 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10145 break;
10146 case BUILT_IN_MEMMOVE_CHK:
10147 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10148 break;
10149 case BUILT_IN_MEMSET_CHK:
10150 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10151 break;
10152 default:
10153 break;
10154 }
10155
10156 if (! fn)
10157 return NULL_RTX;
10158
10159 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10160 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10161 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10162 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10163 }
10164 else if (fcode == BUILT_IN_MEMSET_CHK)
10165 return NULL_RTX;
10166 else
10167 {
10168 unsigned int dest_align = get_pointer_alignment (dest);
10169
10170 /* If DEST is not a pointer type, call the normal function. */
10171 if (dest_align == 0)
10172 return NULL_RTX;
10173
10174 /* If SRC and DEST are the same (and not volatile), do nothing. */
10175 if (operand_equal_p (src, dest, 0))
10176 {
10177 tree expr;
10178
10179 if (fcode != BUILT_IN_MEMPCPY_CHK)
10180 {
10181 /* Evaluate and ignore LEN in case it has side-effects. */
10182 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10183 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10184 }
10185
10186 expr = fold_build_pointer_plus (dest, len);
10187 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10188 }
10189
10190 /* __memmove_chk special case. */
10191 if (fcode == BUILT_IN_MEMMOVE_CHK)
10192 {
10193 unsigned int src_align = get_pointer_alignment (src);
10194
10195 if (src_align == 0)
10196 return NULL_RTX;
10197
10198 /* If src is categorized for a readonly section we can use
10199 normal __memcpy_chk. */
10200 if (readonly_data_expr (src))
10201 {
10202 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10203 if (!fn)
10204 return NULL_RTX;
10205 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10206 dest, src, len, size);
10207 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10208 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10209 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10210 }
10211 }
10212 return NULL_RTX;
10213 }
10214 }
10215
10216 /* Emit warning if a buffer overflow is detected at compile time. */
10217
10218 static void
10219 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10220 {
10221 int is_strlen = 0;
10222 tree len, size;
10223 location_t loc = tree_nonartificial_location (exp);
10224
10225 switch (fcode)
10226 {
10227 case BUILT_IN_STRCPY_CHK:
10228 case BUILT_IN_STPCPY_CHK:
10229 /* For __strcat_chk the warning will be emitted only if overflowing
10230 by at least strlen (dest) + 1 bytes. */
10231 case BUILT_IN_STRCAT_CHK:
10232 len = CALL_EXPR_ARG (exp, 1);
10233 size = CALL_EXPR_ARG (exp, 2);
10234 is_strlen = 1;
10235 break;
10236 case BUILT_IN_STRNCAT_CHK:
10237 case BUILT_IN_STRNCPY_CHK:
10238 case BUILT_IN_STPNCPY_CHK:
10239 len = CALL_EXPR_ARG (exp, 2);
10240 size = CALL_EXPR_ARG (exp, 3);
10241 break;
10242 case BUILT_IN_SNPRINTF_CHK:
10243 case BUILT_IN_VSNPRINTF_CHK:
10244 len = CALL_EXPR_ARG (exp, 1);
10245 size = CALL_EXPR_ARG (exp, 3);
10246 break;
10247 default:
10248 gcc_unreachable ();
10249 }
10250
10251 if (!len || !size)
10252 return;
10253
10254 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10255 return;
10256
10257 if (is_strlen)
10258 {
10259 len = c_strlen (len, 1);
10260 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10261 return;
10262 }
10263 else if (fcode == BUILT_IN_STRNCAT_CHK)
10264 {
10265 tree src = CALL_EXPR_ARG (exp, 1);
10266 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10267 return;
10268 src = c_strlen (src, 1);
10269 if (! src || ! tree_fits_uhwi_p (src))
10270 {
10271 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
10272 exp, get_callee_fndecl (exp));
10273 return;
10274 }
10275 else if (tree_int_cst_lt (src, size))
10276 return;
10277 }
10278 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
10279 return;
10280
10281 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
10282 exp, get_callee_fndecl (exp));
10283 }
10284
10285 /* Emit warning if a buffer overflow is detected at compile time
10286 in __sprintf_chk/__vsprintf_chk calls. */
10287
10288 static void
10289 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10290 {
10291 tree size, len, fmt;
10292 const char *fmt_str;
10293 int nargs = call_expr_nargs (exp);
10294
10295 /* Verify the required arguments in the original call. */
10296
10297 if (nargs < 4)
10298 return;
10299 size = CALL_EXPR_ARG (exp, 2);
10300 fmt = CALL_EXPR_ARG (exp, 3);
10301
10302 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10303 return;
10304
10305 /* Check whether the format is a literal string constant. */
10306 fmt_str = c_getstr (fmt);
10307 if (fmt_str == NULL)
10308 return;
10309
10310 if (!init_target_chars ())
10311 return;
10312
10313 /* If the format doesn't contain % args or %%, we know its size. */
10314 if (strchr (fmt_str, target_percent) == 0)
10315 len = build_int_cstu (size_type_node, strlen (fmt_str));
10316 /* If the format is "%s" and first ... argument is a string literal,
10317 we know it too. */
10318 else if (fcode == BUILT_IN_SPRINTF_CHK
10319 && strcmp (fmt_str, target_percent_s) == 0)
10320 {
10321 tree arg;
10322
10323 if (nargs < 5)
10324 return;
10325 arg = CALL_EXPR_ARG (exp, 4);
10326 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10327 return;
10328
10329 len = c_strlen (arg, 1);
10330 if (!len || ! tree_fits_uhwi_p (len))
10331 return;
10332 }
10333 else
10334 return;
10335
10336 if (! tree_int_cst_lt (len, size))
10337 warning_at (tree_nonartificial_location (exp),
10338 0, "%Kcall to %D will always overflow destination buffer",
10339 exp, get_callee_fndecl (exp));
10340 }
10341
10342 /* Emit warning if a free is called with address of a variable. */
10343
10344 static void
10345 maybe_emit_free_warning (tree exp)
10346 {
10347 tree arg = CALL_EXPR_ARG (exp, 0);
10348
10349 STRIP_NOPS (arg);
10350 if (TREE_CODE (arg) != ADDR_EXPR)
10351 return;
10352
10353 arg = get_base_address (TREE_OPERAND (arg, 0));
10354 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10355 return;
10356
10357 if (SSA_VAR_P (arg))
10358 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10359 "%Kattempt to free a non-heap object %qD", exp, arg);
10360 else
10361 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10362 "%Kattempt to free a non-heap object", exp);
10363 }
10364
10365 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10366 if possible. */
10367
10368 static tree
10369 fold_builtin_object_size (tree ptr, tree ost)
10370 {
10371 unsigned HOST_WIDE_INT bytes;
10372 int object_size_type;
10373
10374 if (!validate_arg (ptr, POINTER_TYPE)
10375 || !validate_arg (ost, INTEGER_TYPE))
10376 return NULL_TREE;
10377
10378 STRIP_NOPS (ost);
10379
10380 if (TREE_CODE (ost) != INTEGER_CST
10381 || tree_int_cst_sgn (ost) < 0
10382 || compare_tree_int (ost, 3) > 0)
10383 return NULL_TREE;
10384
10385 object_size_type = tree_to_shwi (ost);
10386
10387 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10388 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10389 and (size_t) 0 for types 2 and 3. */
10390 if (TREE_SIDE_EFFECTS (ptr))
10391 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10392
10393 if (TREE_CODE (ptr) == ADDR_EXPR)
10394 {
10395 bytes = compute_builtin_object_size (ptr, object_size_type);
10396 if (wi::fits_to_tree_p (bytes, size_type_node))
10397 return build_int_cstu (size_type_node, bytes);
10398 }
10399 else if (TREE_CODE (ptr) == SSA_NAME)
10400 {
10401 /* If object size is not known yet, delay folding until
10402 later. Maybe subsequent passes will help determining
10403 it. */
10404 bytes = compute_builtin_object_size (ptr, object_size_type);
10405 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
10406 && wi::fits_to_tree_p (bytes, size_type_node))
10407 return build_int_cstu (size_type_node, bytes);
10408 }
10409
10410 return NULL_TREE;
10411 }
10412
10413 /* Builtins with folding operations that operate on "..." arguments
10414 need special handling; we need to store the arguments in a convenient
10415 data structure before attempting any folding. Fortunately there are
10416 only a few builtins that fall into this category. FNDECL is the
10417 function, EXP is the CALL_EXPR for the call. */
10418
10419 static tree
10420 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10421 {
10422 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10423 tree ret = NULL_TREE;
10424
10425 switch (fcode)
10426 {
10427 case BUILT_IN_FPCLASSIFY:
10428 ret = fold_builtin_fpclassify (loc, args, nargs);
10429 break;
10430
10431 default:
10432 break;
10433 }
10434 if (ret)
10435 {
10436 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10437 SET_EXPR_LOCATION (ret, loc);
10438 TREE_NO_WARNING (ret) = 1;
10439 return ret;
10440 }
10441 return NULL_TREE;
10442 }
10443
10444 /* Initialize format string characters in the target charset. */
10445
10446 bool
10447 init_target_chars (void)
10448 {
10449 static bool init;
10450 if (!init)
10451 {
10452 target_newline = lang_hooks.to_target_charset ('\n');
10453 target_percent = lang_hooks.to_target_charset ('%');
10454 target_c = lang_hooks.to_target_charset ('c');
10455 target_s = lang_hooks.to_target_charset ('s');
10456 if (target_newline == 0 || target_percent == 0 || target_c == 0
10457 || target_s == 0)
10458 return false;
10459
10460 target_percent_c[0] = target_percent;
10461 target_percent_c[1] = target_c;
10462 target_percent_c[2] = '\0';
10463
10464 target_percent_s[0] = target_percent;
10465 target_percent_s[1] = target_s;
10466 target_percent_s[2] = '\0';
10467
10468 target_percent_s_newline[0] = target_percent;
10469 target_percent_s_newline[1] = target_s;
10470 target_percent_s_newline[2] = target_newline;
10471 target_percent_s_newline[3] = '\0';
10472
10473 init = true;
10474 }
10475 return true;
10476 }
10477
10478 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10479 and no overflow/underflow occurred. INEXACT is true if M was not
10480 exactly calculated. TYPE is the tree type for the result. This
10481 function assumes that you cleared the MPFR flags and then
10482 calculated M to see if anything subsequently set a flag prior to
10483 entering this function. Return NULL_TREE if any checks fail. */
10484
10485 static tree
10486 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10487 {
10488 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10489 overflow/underflow occurred. If -frounding-math, proceed iff the
10490 result of calling FUNC was exact. */
10491 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10492 && (!flag_rounding_math || !inexact))
10493 {
10494 REAL_VALUE_TYPE rr;
10495
10496 real_from_mpfr (&rr, m, type, GMP_RNDN);
10497 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10498 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10499 but the mpft_t is not, then we underflowed in the
10500 conversion. */
10501 if (real_isfinite (&rr)
10502 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10503 {
10504 REAL_VALUE_TYPE rmode;
10505
10506 real_convert (&rmode, TYPE_MODE (type), &rr);
10507 /* Proceed iff the specified mode can hold the value. */
10508 if (real_identical (&rmode, &rr))
10509 return build_real (type, rmode);
10510 }
10511 }
10512 return NULL_TREE;
10513 }
10514
10515 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10516 number and no overflow/underflow occurred. INEXACT is true if M
10517 was not exactly calculated. TYPE is the tree type for the result.
10518 This function assumes that you cleared the MPFR flags and then
10519 calculated M to see if anything subsequently set a flag prior to
10520 entering this function. Return NULL_TREE if any checks fail, if
10521 FORCE_CONVERT is true, then bypass the checks. */
10522
10523 static tree
10524 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10525 {
10526 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10527 overflow/underflow occurred. If -frounding-math, proceed iff the
10528 result of calling FUNC was exact. */
10529 if (force_convert
10530 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10531 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10532 && (!flag_rounding_math || !inexact)))
10533 {
10534 REAL_VALUE_TYPE re, im;
10535
10536 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10537 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10538 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10539 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10540 but the mpft_t is not, then we underflowed in the
10541 conversion. */
10542 if (force_convert
10543 || (real_isfinite (&re) && real_isfinite (&im)
10544 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10545 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10546 {
10547 REAL_VALUE_TYPE re_mode, im_mode;
10548
10549 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10550 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10551 /* Proceed iff the specified mode can hold the value. */
10552 if (force_convert
10553 || (real_identical (&re_mode, &re)
10554 && real_identical (&im_mode, &im)))
10555 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10556 build_real (TREE_TYPE (type), im_mode));
10557 }
10558 }
10559 return NULL_TREE;
10560 }
10561
10562 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
10563 FUNC on it and return the resulting value as a tree with type TYPE.
10564 If MIN and/or MAX are not NULL, then the supplied ARG must be
10565 within those bounds. If INCLUSIVE is true, then MIN/MAX are
10566 acceptable values, otherwise they are not. The mpfr precision is
10567 set to the precision of TYPE. We assume that function FUNC returns
10568 zero if the result could be calculated exactly within the requested
10569 precision. */
10570
10571 static tree
10572 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
10573 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
10574 bool inclusive)
10575 {
10576 tree result = NULL_TREE;
10577
10578 STRIP_NOPS (arg);
10579
10580 /* To proceed, MPFR must exactly represent the target floating point
10581 format, which only happens when the target base equals two. */
10582 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10583 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
10584 {
10585 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10586
10587 if (real_isfinite (ra)
10588 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
10589 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
10590 {
10591 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10592 const int prec = fmt->p;
10593 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10594 int inexact;
10595 mpfr_t m;
10596
10597 mpfr_init2 (m, prec);
10598 mpfr_from_real (m, ra, GMP_RNDN);
10599 mpfr_clear_flags ();
10600 inexact = func (m, m, rnd);
10601 result = do_mpfr_ckconv (m, type, inexact);
10602 mpfr_clear (m);
10603 }
10604 }
10605
10606 return result;
10607 }
10608
10609 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
10610 FUNC on it and return the resulting value as a tree with type TYPE.
10611 The mpfr precision is set to the precision of TYPE. We assume that
10612 function FUNC returns zero if the result could be calculated
10613 exactly within the requested precision. */
10614
10615 static tree
10616 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
10617 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10618 {
10619 tree result = NULL_TREE;
10620
10621 STRIP_NOPS (arg1);
10622 STRIP_NOPS (arg2);
10623
10624 /* To proceed, MPFR must exactly represent the target floating point
10625 format, which only happens when the target base equals two. */
10626 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10627 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10628 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10629 {
10630 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10631 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10632
10633 if (real_isfinite (ra1) && real_isfinite (ra2))
10634 {
10635 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10636 const int prec = fmt->p;
10637 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10638 int inexact;
10639 mpfr_t m1, m2;
10640
10641 mpfr_inits2 (prec, m1, m2, NULL);
10642 mpfr_from_real (m1, ra1, GMP_RNDN);
10643 mpfr_from_real (m2, ra2, GMP_RNDN);
10644 mpfr_clear_flags ();
10645 inexact = func (m1, m1, m2, rnd);
10646 result = do_mpfr_ckconv (m1, type, inexact);
10647 mpfr_clears (m1, m2, NULL);
10648 }
10649 }
10650
10651 return result;
10652 }
10653
10654 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
10655 FUNC on it and return the resulting value as a tree with type TYPE.
10656 The mpfr precision is set to the precision of TYPE. We assume that
10657 function FUNC returns zero if the result could be calculated
10658 exactly within the requested precision. */
10659
10660 static tree
10661 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
10662 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10663 {
10664 tree result = NULL_TREE;
10665
10666 STRIP_NOPS (arg1);
10667 STRIP_NOPS (arg2);
10668 STRIP_NOPS (arg3);
10669
10670 /* To proceed, MPFR must exactly represent the target floating point
10671 format, which only happens when the target base equals two. */
10672 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10673 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10674 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
10675 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
10676 {
10677 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10678 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10679 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
10680
10681 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
10682 {
10683 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10684 const int prec = fmt->p;
10685 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10686 int inexact;
10687 mpfr_t m1, m2, m3;
10688
10689 mpfr_inits2 (prec, m1, m2, m3, NULL);
10690 mpfr_from_real (m1, ra1, GMP_RNDN);
10691 mpfr_from_real (m2, ra2, GMP_RNDN);
10692 mpfr_from_real (m3, ra3, GMP_RNDN);
10693 mpfr_clear_flags ();
10694 inexact = func (m1, m1, m2, m3, rnd);
10695 result = do_mpfr_ckconv (m1, type, inexact);
10696 mpfr_clears (m1, m2, m3, NULL);
10697 }
10698 }
10699
10700 return result;
10701 }
10702
10703 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
10704 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
10705 If ARG_SINP and ARG_COSP are NULL then the result is returned
10706 as a complex value.
10707 The type is taken from the type of ARG and is used for setting the
10708 precision of the calculation and results. */
10709
10710 static tree
10711 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
10712 {
10713 tree const type = TREE_TYPE (arg);
10714 tree result = NULL_TREE;
10715
10716 STRIP_NOPS (arg);
10717
10718 /* To proceed, MPFR must exactly represent the target floating point
10719 format, which only happens when the target base equals two. */
10720 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10721 && TREE_CODE (arg) == REAL_CST
10722 && !TREE_OVERFLOW (arg))
10723 {
10724 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10725
10726 if (real_isfinite (ra))
10727 {
10728 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10729 const int prec = fmt->p;
10730 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10731 tree result_s, result_c;
10732 int inexact;
10733 mpfr_t m, ms, mc;
10734
10735 mpfr_inits2 (prec, m, ms, mc, NULL);
10736 mpfr_from_real (m, ra, GMP_RNDN);
10737 mpfr_clear_flags ();
10738 inexact = mpfr_sin_cos (ms, mc, m, rnd);
10739 result_s = do_mpfr_ckconv (ms, type, inexact);
10740 result_c = do_mpfr_ckconv (mc, type, inexact);
10741 mpfr_clears (m, ms, mc, NULL);
10742 if (result_s && result_c)
10743 {
10744 /* If we are to return in a complex value do so. */
10745 if (!arg_sinp && !arg_cosp)
10746 return build_complex (build_complex_type (type),
10747 result_c, result_s);
10748
10749 /* Dereference the sin/cos pointer arguments. */
10750 arg_sinp = build_fold_indirect_ref (arg_sinp);
10751 arg_cosp = build_fold_indirect_ref (arg_cosp);
10752 /* Proceed if valid pointer type were passed in. */
10753 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
10754 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
10755 {
10756 /* Set the values. */
10757 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
10758 result_s);
10759 TREE_SIDE_EFFECTS (result_s) = 1;
10760 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
10761 result_c);
10762 TREE_SIDE_EFFECTS (result_c) = 1;
10763 /* Combine the assignments into a compound expr. */
10764 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10765 result_s, result_c));
10766 }
10767 }
10768 }
10769 }
10770 return result;
10771 }
10772
10773 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
10774 two-argument mpfr order N Bessel function FUNC on them and return
10775 the resulting value as a tree with type TYPE. The mpfr precision
10776 is set to the precision of TYPE. We assume that function FUNC
10777 returns zero if the result could be calculated exactly within the
10778 requested precision. */
10779 static tree
10780 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
10781 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
10782 const REAL_VALUE_TYPE *min, bool inclusive)
10783 {
10784 tree result = NULL_TREE;
10785
10786 STRIP_NOPS (arg1);
10787 STRIP_NOPS (arg2);
10788
10789 /* To proceed, MPFR must exactly represent the target floating point
10790 format, which only happens when the target base equals two. */
10791 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10792 && tree_fits_shwi_p (arg1)
10793 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10794 {
10795 const HOST_WIDE_INT n = tree_to_shwi (arg1);
10796 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
10797
10798 if (n == (long)n
10799 && real_isfinite (ra)
10800 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
10801 {
10802 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10803 const int prec = fmt->p;
10804 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10805 int inexact;
10806 mpfr_t m;
10807
10808 mpfr_init2 (m, prec);
10809 mpfr_from_real (m, ra, GMP_RNDN);
10810 mpfr_clear_flags ();
10811 inexact = func (m, n, m, rnd);
10812 result = do_mpfr_ckconv (m, type, inexact);
10813 mpfr_clear (m);
10814 }
10815 }
10816
10817 return result;
10818 }
10819
10820 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10821 the pointer *(ARG_QUO) and return the result. The type is taken
10822 from the type of ARG0 and is used for setting the precision of the
10823 calculation and results. */
10824
10825 static tree
10826 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10827 {
10828 tree const type = TREE_TYPE (arg0);
10829 tree result = NULL_TREE;
10830
10831 STRIP_NOPS (arg0);
10832 STRIP_NOPS (arg1);
10833
10834 /* To proceed, MPFR must exactly represent the target floating point
10835 format, which only happens when the target base equals two. */
10836 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10837 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10838 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10839 {
10840 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10841 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10842
10843 if (real_isfinite (ra0) && real_isfinite (ra1))
10844 {
10845 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10846 const int prec = fmt->p;
10847 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10848 tree result_rem;
10849 long integer_quo;
10850 mpfr_t m0, m1;
10851
10852 mpfr_inits2 (prec, m0, m1, NULL);
10853 mpfr_from_real (m0, ra0, GMP_RNDN);
10854 mpfr_from_real (m1, ra1, GMP_RNDN);
10855 mpfr_clear_flags ();
10856 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10857 /* Remquo is independent of the rounding mode, so pass
10858 inexact=0 to do_mpfr_ckconv(). */
10859 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10860 mpfr_clears (m0, m1, NULL);
10861 if (result_rem)
10862 {
10863 /* MPFR calculates quo in the host's long so it may
10864 return more bits in quo than the target int can hold
10865 if sizeof(host long) > sizeof(target int). This can
10866 happen even for native compilers in LP64 mode. In
10867 these cases, modulo the quo value with the largest
10868 number that the target int can hold while leaving one
10869 bit for the sign. */
10870 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10871 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10872
10873 /* Dereference the quo pointer argument. */
10874 arg_quo = build_fold_indirect_ref (arg_quo);
10875 /* Proceed iff a valid pointer type was passed in. */
10876 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10877 {
10878 /* Set the value. */
10879 tree result_quo
10880 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10881 build_int_cst (TREE_TYPE (arg_quo),
10882 integer_quo));
10883 TREE_SIDE_EFFECTS (result_quo) = 1;
10884 /* Combine the quo assignment with the rem. */
10885 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10886 result_quo, result_rem));
10887 }
10888 }
10889 }
10890 }
10891 return result;
10892 }
10893
10894 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10895 resulting value as a tree with type TYPE. The mpfr precision is
10896 set to the precision of TYPE. We assume that this mpfr function
10897 returns zero if the result could be calculated exactly within the
10898 requested precision. In addition, the integer pointer represented
10899 by ARG_SG will be dereferenced and set to the appropriate signgam
10900 (-1,1) value. */
10901
10902 static tree
10903 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10904 {
10905 tree result = NULL_TREE;
10906
10907 STRIP_NOPS (arg);
10908
10909 /* To proceed, MPFR must exactly represent the target floating point
10910 format, which only happens when the target base equals two. Also
10911 verify ARG is a constant and that ARG_SG is an int pointer. */
10912 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10913 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10914 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10915 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10916 {
10917 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10918
10919 /* In addition to NaN and Inf, the argument cannot be zero or a
10920 negative integer. */
10921 if (real_isfinite (ra)
10922 && ra->cl != rvc_zero
10923 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10924 {
10925 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10926 const int prec = fmt->p;
10927 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10928 int inexact, sg;
10929 mpfr_t m;
10930 tree result_lg;
10931
10932 mpfr_init2 (m, prec);
10933 mpfr_from_real (m, ra, GMP_RNDN);
10934 mpfr_clear_flags ();
10935 inexact = mpfr_lgamma (m, &sg, m, rnd);
10936 result_lg = do_mpfr_ckconv (m, type, inexact);
10937 mpfr_clear (m);
10938 if (result_lg)
10939 {
10940 tree result_sg;
10941
10942 /* Dereference the arg_sg pointer argument. */
10943 arg_sg = build_fold_indirect_ref (arg_sg);
10944 /* Assign the signgam value into *arg_sg. */
10945 result_sg = fold_build2 (MODIFY_EXPR,
10946 TREE_TYPE (arg_sg), arg_sg,
10947 build_int_cst (TREE_TYPE (arg_sg), sg));
10948 TREE_SIDE_EFFECTS (result_sg) = 1;
10949 /* Combine the signgam assignment with the lgamma result. */
10950 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10951 result_sg, result_lg));
10952 }
10953 }
10954 }
10955
10956 return result;
10957 }
10958
10959 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
10960 function FUNC on it and return the resulting value as a tree with
10961 type TYPE. The mpfr precision is set to the precision of TYPE. We
10962 assume that function FUNC returns zero if the result could be
10963 calculated exactly within the requested precision. */
10964
10965 static tree
10966 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
10967 {
10968 tree result = NULL_TREE;
10969
10970 STRIP_NOPS (arg);
10971
10972 /* To proceed, MPFR must exactly represent the target floating point
10973 format, which only happens when the target base equals two. */
10974 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
10975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
10976 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
10977 {
10978 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
10979 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
10980
10981 if (real_isfinite (re) && real_isfinite (im))
10982 {
10983 const struct real_format *const fmt =
10984 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10985 const int prec = fmt->p;
10986 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10987 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10988 int inexact;
10989 mpc_t m;
10990
10991 mpc_init2 (m, prec);
10992 mpfr_from_real (mpc_realref (m), re, rnd);
10993 mpfr_from_real (mpc_imagref (m), im, rnd);
10994 mpfr_clear_flags ();
10995 inexact = func (m, m, crnd);
10996 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
10997 mpc_clear (m);
10998 }
10999 }
11000
11001 return result;
11002 }
11003
11004 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11005 mpc function FUNC on it and return the resulting value as a tree
11006 with type TYPE. The mpfr precision is set to the precision of
11007 TYPE. We assume that function FUNC returns zero if the result
11008 could be calculated exactly within the requested precision. If
11009 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11010 in the arguments and/or results. */
11011
11012 tree
11013 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11014 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11015 {
11016 tree result = NULL_TREE;
11017
11018 STRIP_NOPS (arg0);
11019 STRIP_NOPS (arg1);
11020
11021 /* To proceed, MPFR must exactly represent the target floating point
11022 format, which only happens when the target base equals two. */
11023 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11025 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11027 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11028 {
11029 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11030 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11031 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11032 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11033
11034 if (do_nonfinite
11035 || (real_isfinite (re0) && real_isfinite (im0)
11036 && real_isfinite (re1) && real_isfinite (im1)))
11037 {
11038 const struct real_format *const fmt =
11039 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11040 const int prec = fmt->p;
11041 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11042 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11043 int inexact;
11044 mpc_t m0, m1;
11045
11046 mpc_init2 (m0, prec);
11047 mpc_init2 (m1, prec);
11048 mpfr_from_real (mpc_realref (m0), re0, rnd);
11049 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11050 mpfr_from_real (mpc_realref (m1), re1, rnd);
11051 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11052 mpfr_clear_flags ();
11053 inexact = func (m0, m0, m1, crnd);
11054 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11055 mpc_clear (m0);
11056 mpc_clear (m1);
11057 }
11058 }
11059
11060 return result;
11061 }
11062
11063 /* A wrapper function for builtin folding that prevents warnings for
11064 "statement without effect" and the like, caused by removing the
11065 call node earlier than the warning is generated. */
11066
11067 tree
11068 fold_call_stmt (gcall *stmt, bool ignore)
11069 {
11070 tree ret = NULL_TREE;
11071 tree fndecl = gimple_call_fndecl (stmt);
11072 location_t loc = gimple_location (stmt);
11073 if (fndecl
11074 && TREE_CODE (fndecl) == FUNCTION_DECL
11075 && DECL_BUILT_IN (fndecl)
11076 && !gimple_call_va_arg_pack_p (stmt))
11077 {
11078 int nargs = gimple_call_num_args (stmt);
11079 tree *args = (nargs > 0
11080 ? gimple_call_arg_ptr (stmt, 0)
11081 : &error_mark_node);
11082
11083 if (avoid_folding_inline_builtin (fndecl))
11084 return NULL_TREE;
11085 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11086 {
11087 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11088 }
11089 else
11090 {
11091 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11092 if (ret)
11093 {
11094 /* Propagate location information from original call to
11095 expansion of builtin. Otherwise things like
11096 maybe_emit_chk_warning, that operate on the expansion
11097 of a builtin, will use the wrong location information. */
11098 if (gimple_has_location (stmt))
11099 {
11100 tree realret = ret;
11101 if (TREE_CODE (ret) == NOP_EXPR)
11102 realret = TREE_OPERAND (ret, 0);
11103 if (CAN_HAVE_LOCATION_P (realret)
11104 && !EXPR_HAS_LOCATION (realret))
11105 SET_EXPR_LOCATION (realret, loc);
11106 return realret;
11107 }
11108 return ret;
11109 }
11110 }
11111 }
11112 return NULL_TREE;
11113 }
11114
11115 /* Look up the function in builtin_decl that corresponds to DECL
11116 and set ASMSPEC as its user assembler name. DECL must be a
11117 function decl that declares a builtin. */
11118
11119 void
11120 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11121 {
11122 tree builtin;
11123 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11124 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11125 && asmspec != 0);
11126
11127 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11128 set_user_assembler_name (builtin, asmspec);
11129 switch (DECL_FUNCTION_CODE (decl))
11130 {
11131 case BUILT_IN_MEMCPY:
11132 init_block_move_fn (asmspec);
11133 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11134 break;
11135 case BUILT_IN_MEMSET:
11136 init_block_clear_fn (asmspec);
11137 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11138 break;
11139 case BUILT_IN_MEMMOVE:
11140 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11141 break;
11142 case BUILT_IN_MEMCMP:
11143 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11144 break;
11145 case BUILT_IN_ABORT:
11146 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11147 break;
11148 case BUILT_IN_FFS:
11149 if (INT_TYPE_SIZE < BITS_PER_WORD)
11150 {
11151 set_user_assembler_libfunc ("ffs", asmspec);
11152 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11153 MODE_INT, 0), "ffs");
11154 }
11155 break;
11156 default:
11157 break;
11158 }
11159 }
11160
11161 /* Return true if DECL is a builtin that expands to a constant or similarly
11162 simple code. */
11163 bool
11164 is_simple_builtin (tree decl)
11165 {
11166 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11167 switch (DECL_FUNCTION_CODE (decl))
11168 {
11169 /* Builtins that expand to constants. */
11170 case BUILT_IN_CONSTANT_P:
11171 case BUILT_IN_EXPECT:
11172 case BUILT_IN_OBJECT_SIZE:
11173 case BUILT_IN_UNREACHABLE:
11174 /* Simple register moves or loads from stack. */
11175 case BUILT_IN_ASSUME_ALIGNED:
11176 case BUILT_IN_RETURN_ADDRESS:
11177 case BUILT_IN_EXTRACT_RETURN_ADDR:
11178 case BUILT_IN_FROB_RETURN_ADDR:
11179 case BUILT_IN_RETURN:
11180 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11181 case BUILT_IN_FRAME_ADDRESS:
11182 case BUILT_IN_VA_END:
11183 case BUILT_IN_STACK_SAVE:
11184 case BUILT_IN_STACK_RESTORE:
11185 /* Exception state returns or moves registers around. */
11186 case BUILT_IN_EH_FILTER:
11187 case BUILT_IN_EH_POINTER:
11188 case BUILT_IN_EH_COPY_VALUES:
11189 return true;
11190
11191 default:
11192 return false;
11193 }
11194
11195 return false;
11196 }
11197
11198 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11199 most probably expanded inline into reasonably simple code. This is a
11200 superset of is_simple_builtin. */
11201 bool
11202 is_inexpensive_builtin (tree decl)
11203 {
11204 if (!decl)
11205 return false;
11206 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11207 return true;
11208 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11209 switch (DECL_FUNCTION_CODE (decl))
11210 {
11211 case BUILT_IN_ABS:
11212 case BUILT_IN_ALLOCA:
11213 case BUILT_IN_ALLOCA_WITH_ALIGN:
11214 case BUILT_IN_BSWAP16:
11215 case BUILT_IN_BSWAP32:
11216 case BUILT_IN_BSWAP64:
11217 case BUILT_IN_CLZ:
11218 case BUILT_IN_CLZIMAX:
11219 case BUILT_IN_CLZL:
11220 case BUILT_IN_CLZLL:
11221 case BUILT_IN_CTZ:
11222 case BUILT_IN_CTZIMAX:
11223 case BUILT_IN_CTZL:
11224 case BUILT_IN_CTZLL:
11225 case BUILT_IN_FFS:
11226 case BUILT_IN_FFSIMAX:
11227 case BUILT_IN_FFSL:
11228 case BUILT_IN_FFSLL:
11229 case BUILT_IN_IMAXABS:
11230 case BUILT_IN_FINITE:
11231 case BUILT_IN_FINITEF:
11232 case BUILT_IN_FINITEL:
11233 case BUILT_IN_FINITED32:
11234 case BUILT_IN_FINITED64:
11235 case BUILT_IN_FINITED128:
11236 case BUILT_IN_FPCLASSIFY:
11237 case BUILT_IN_ISFINITE:
11238 case BUILT_IN_ISINF_SIGN:
11239 case BUILT_IN_ISINF:
11240 case BUILT_IN_ISINFF:
11241 case BUILT_IN_ISINFL:
11242 case BUILT_IN_ISINFD32:
11243 case BUILT_IN_ISINFD64:
11244 case BUILT_IN_ISINFD128:
11245 case BUILT_IN_ISNAN:
11246 case BUILT_IN_ISNANF:
11247 case BUILT_IN_ISNANL:
11248 case BUILT_IN_ISNAND32:
11249 case BUILT_IN_ISNAND64:
11250 case BUILT_IN_ISNAND128:
11251 case BUILT_IN_ISNORMAL:
11252 case BUILT_IN_ISGREATER:
11253 case BUILT_IN_ISGREATEREQUAL:
11254 case BUILT_IN_ISLESS:
11255 case BUILT_IN_ISLESSEQUAL:
11256 case BUILT_IN_ISLESSGREATER:
11257 case BUILT_IN_ISUNORDERED:
11258 case BUILT_IN_VA_ARG_PACK:
11259 case BUILT_IN_VA_ARG_PACK_LEN:
11260 case BUILT_IN_VA_COPY:
11261 case BUILT_IN_TRAP:
11262 case BUILT_IN_SAVEREGS:
11263 case BUILT_IN_POPCOUNTL:
11264 case BUILT_IN_POPCOUNTLL:
11265 case BUILT_IN_POPCOUNTIMAX:
11266 case BUILT_IN_POPCOUNT:
11267 case BUILT_IN_PARITYL:
11268 case BUILT_IN_PARITYLL:
11269 case BUILT_IN_PARITYIMAX:
11270 case BUILT_IN_PARITY:
11271 case BUILT_IN_LABS:
11272 case BUILT_IN_LLABS:
11273 case BUILT_IN_PREFETCH:
11274 case BUILT_IN_ACC_ON_DEVICE:
11275 return true;
11276
11277 default:
11278 return is_simple_builtin (decl);
11279 }
11280
11281 return false;
11282 }