target-insns.def (movstr): New pattern.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "predict.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "alias.h"
29 #include "fold-const.h"
30 #include "stringpool.h"
31 #include "stor-layout.h"
32 #include "calls.h"
33 #include "varasm.h"
34 #include "tree-object-size.h"
35 #include "realmpfr.h"
36 #include "cfgrtl.h"
37 #include "internal-fn.h"
38 #include "flags.h"
39 #include "regs.h"
40 #include "except.h"
41 #include "insn-config.h"
42 #include "expmed.h"
43 #include "dojump.h"
44 #include "explow.h"
45 #include "emit-rtl.h"
46 #include "stmt.h"
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "typeclass.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "langhooks.h"
57 #include "tree-ssanames.h"
58 #include "tree-dfa.h"
59 #include "value-prof.h"
60 #include "diagnostic-core.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "cgraph.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "gomp-constants.h"
68
69
70 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86 #undef DEF_BUILTIN
87
88 /* Setup an array of builtin_info_type, make sure each element decl is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info[(int)END_BUILTINS];
91
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p;
94
95 static rtx c_readstr (const char *, machine_mode);
96 static int target_char_cast (tree, char *);
97 static rtx get_memory_rtx (tree, tree);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 static rtx result_vector (int, rtx);
101 static void expand_builtin_prefetch (tree);
102 static rtx expand_builtin_apply_args (void);
103 static rtx expand_builtin_apply_args_1 (void);
104 static rtx expand_builtin_apply (rtx, rtx, rtx);
105 static void expand_builtin_return (rtx);
106 static enum type_class type_to_class (tree);
107 static rtx expand_builtin_classify_type (tree);
108 static void expand_errno_check (tree, rtx);
109 static rtx expand_builtin_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
128 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
129 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
132 machine_mode, int, tree);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree, bool);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_sqrt (location_t, tree, tree);
160 static tree fold_builtin_cbrt (location_t, tree, tree);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_cos (location_t, tree, tree, tree);
164 static tree fold_builtin_cosh (location_t, tree, tree, tree);
165 static tree fold_builtin_tan (tree, tree);
166 static tree fold_builtin_trunc (location_t, tree, tree);
167 static tree fold_builtin_floor (location_t, tree, tree);
168 static tree fold_builtin_ceil (location_t, tree, tree);
169 static tree fold_builtin_round (location_t, tree, tree);
170 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
171 static tree fold_builtin_bitop (tree, tree);
172 static tree fold_builtin_strchr (location_t, tree, tree, tree);
173 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175 static tree fold_builtin_strcmp (location_t, tree, tree);
176 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177 static tree fold_builtin_signbit (location_t, tree, tree);
178 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_isascii (location_t, tree);
180 static tree fold_builtin_toascii (location_t, tree);
181 static tree fold_builtin_isdigit (location_t, tree);
182 static tree fold_builtin_fabs (location_t, tree, tree);
183 static tree fold_builtin_abs (location_t, tree, tree);
184 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
185 enum tree_code);
186 static tree fold_builtin_0 (location_t, tree);
187 static tree fold_builtin_1 (location_t, tree, tree);
188 static tree fold_builtin_2 (location_t, tree, tree, tree);
189 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190 static tree fold_builtin_varargs (location_t, tree, tree*, int);
191
192 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193 static tree fold_builtin_strstr (location_t, tree, tree, tree);
194 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205
206 unsigned HOST_WIDE_INT target_newline;
207 unsigned HOST_WIDE_INT target_percent;
208 static unsigned HOST_WIDE_INT target_c;
209 static unsigned HOST_WIDE_INT target_s;
210 char target_percent_c[3];
211 char target_percent_s[3];
212 char target_percent_s_newline[4];
213 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
215 static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
217 static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_sincos (tree, tree, tree);
220 static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_remquo (tree, tree, tree);
224 static tree do_mpfr_lgamma_r (tree, tree, tree);
225 static void expand_builtin_sync_synchronize (void);
226
227 /* Return true if NAME starts with __builtin_ or __sync_. */
228
229 static bool
230 is_builtin_name (const char *name)
231 {
232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
238 if (flag_cilkplus
239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
242 return false;
243 }
244
245
246 /* Return true if DECL is a function symbol representing a built-in. */
247
248 bool
249 is_builtin_fn (tree decl)
250 {
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 }
253
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258 static bool
259 called_as_built_in (tree node)
260 {
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266 }
267
268 /* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
272
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
278 whereas foo() itself starts on an even address.
279
280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
282
283 static bool
284 get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
286 {
287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
289 machine_mode mode;
290 int unsignedp, volatilep;
291 unsigned int align = BITS_PER_UNIT;
292 bool known_alignment = false;
293
294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
298
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
301 if (TREE_CODE (exp) == FUNCTION_DECL)
302 {
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
309 }
310 else if (TREE_CODE (exp) == LABEL_DECL)
311 ;
312 else if (TREE_CODE (exp) == CONST_DECL)
313 {
314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
316 align = TYPE_ALIGN (TREE_TYPE (exp));
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
339
340 /* If the address is explicitely aligned, handle that. */
341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
344 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
345 ptr_bitmask *= BITS_PER_UNIT;
346 align = ptr_bitmask & -ptr_bitmask;
347 addr = TREE_OPERAND (addr, 0);
348 }
349
350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
352 align = MAX (ptr_align, align);
353
354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos &= ptr_bitmask;
356
357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
359 if (TREE_CODE (exp) == TARGET_MEM_REF)
360 {
361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
371 }
372
373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
388 }
389 }
390 else if (TREE_CODE (exp) == STRING_CST)
391 {
392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
395 if (CONSTANT_CLASS_P (exp))
396 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
397
398 known_alignment = true;
399 }
400
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
403 if (offset)
404 {
405 unsigned int trailing_zeros = tree_ctz (offset);
406 if (trailing_zeros < HOST_BITS_PER_INT)
407 {
408 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
409 if (inner)
410 align = MIN (align, inner);
411 }
412 }
413
414 *alignp = align;
415 *bitposp = bitpos & (*alignp - 1);
416 return known_alignment;
417 }
418
419 /* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423
424 bool
425 get_object_alignment_1 (tree exp, unsigned int *alignp,
426 unsigned HOST_WIDE_INT *bitposp)
427 {
428 return get_object_alignment_2 (exp, alignp, bitposp, false);
429 }
430
431 /* Return the alignment in bits of EXP, an object. */
432
433 unsigned int
434 get_object_alignment (tree exp)
435 {
436 unsigned HOST_WIDE_INT bitpos = 0;
437 unsigned int align;
438
439 get_object_alignment_1 (exp, &align, &bitpos);
440
441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
443
444 if (bitpos != 0)
445 align = (bitpos & -bitpos);
446 return align;
447 }
448
449 /* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
453
454 If EXP is not a pointer, false is returned too. */
455
456 bool
457 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 STRIP_NOPS (exp);
461
462 if (TREE_CODE (exp) == ADDR_EXPR)
463 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
464 alignp, bitposp, true);
465 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
466 {
467 unsigned int align;
468 unsigned HOST_WIDE_INT bitpos;
469 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
470 &align, &bitpos);
471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
472 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
473 else
474 {
475 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
476 if (trailing_zeros < HOST_BITS_PER_INT)
477 {
478 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
479 if (inner)
480 align = MIN (align, inner);
481 }
482 }
483 *alignp = align;
484 *bitposp = bitpos & (align - 1);
485 return res;
486 }
487 else if (TREE_CODE (exp) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp)))
489 {
490 unsigned int ptr_align, ptr_misalign;
491 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
492
493 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
494 {
495 *bitposp = ptr_misalign * BITS_PER_UNIT;
496 *alignp = ptr_align * BITS_PER_UNIT;
497 /* We cannot really tell whether this result is an approximation. */
498 return true;
499 }
500 else
501 {
502 *bitposp = 0;
503 *alignp = BITS_PER_UNIT;
504 return false;
505 }
506 }
507 else if (TREE_CODE (exp) == INTEGER_CST)
508 {
509 *alignp = BIGGEST_ALIGNMENT;
510 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
511 & (BIGGEST_ALIGNMENT - 1));
512 return true;
513 }
514
515 *bitposp = 0;
516 *alignp = BITS_PER_UNIT;
517 return false;
518 }
519
520 /* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
523
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
526
527 unsigned int
528 get_pointer_alignment (tree exp)
529 {
530 unsigned HOST_WIDE_INT bitpos = 0;
531 unsigned int align;
532
533 get_pointer_alignment_1 (exp, &align, &bitpos);
534
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
537
538 if (bitpos != 0)
539 align = (bitpos & -bitpos);
540
541 return align;
542 }
543
544 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
547
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
559 The value returned is of type `ssizetype'.
560
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
563
564 tree
565 c_strlen (tree src, int only_value)
566 {
567 tree offset_node;
568 HOST_WIDE_INT offset;
569 int max;
570 const char *ptr;
571 location_t loc;
572
573 STRIP_NOPS (src);
574 if (TREE_CODE (src) == COND_EXPR
575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 {
577 tree len1, len2;
578
579 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
580 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
581 if (tree_int_cst_equal (len1, len2))
582 return len1;
583 }
584
585 if (TREE_CODE (src) == COMPOUND_EXPR
586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 return c_strlen (TREE_OPERAND (src, 1), only_value);
588
589 loc = EXPR_LOC_OR_LOC (src, input_location);
590
591 src = string_constant (src, &offset_node);
592 if (src == 0)
593 return NULL_TREE;
594
595 max = TREE_STRING_LENGTH (src) - 1;
596 ptr = TREE_STRING_POINTER (src);
597
598 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
599 {
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
603 int i;
604
605 for (i = 0; i < max; i++)
606 if (ptr[i] == 0)
607 return NULL_TREE;
608
609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
615
616 return size_diffop_loc (loc, size_int (max), offset_node);
617 }
618
619 /* We have a known offset into the string. Start searching there for
620 a null character if we can represent it as a single HOST_WIDE_INT. */
621 if (offset_node == 0)
622 offset = 0;
623 else if (! tree_fits_shwi_p (offset_node))
624 offset = -1;
625 else
626 offset = tree_to_shwi (offset_node);
627
628 /* If the offset is known to be out of bounds, warn, and call strlen at
629 runtime. */
630 if (offset < 0 || offset > max)
631 {
632 /* Suppress multiple warnings for propagated constant strings. */
633 if (only_value != 2
634 && !TREE_NO_WARNING (src))
635 {
636 warning_at (loc, 0, "offset outside bounds of constant string");
637 TREE_NO_WARNING (src) = 1;
638 }
639 return NULL_TREE;
640 }
641
642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
645
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
648 return ssize_int (strlen (ptr + offset));
649 }
650
651 /* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
653
654 const char *
655 c_getstr (tree src)
656 {
657 tree offset_node;
658
659 src = string_constant (src, &offset_node);
660 if (src == 0)
661 return 0;
662
663 if (offset_node == 0)
664 return TREE_STRING_POINTER (src);
665 else if (!tree_fits_uhwi_p (offset_node)
666 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
667 return 0;
668
669 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
670 }
671
672 /* Return a constant integer corresponding to target reading
673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
674
675 static rtx
676 c_readstr (const char *str, machine_mode mode)
677 {
678 HOST_WIDE_INT ch;
679 unsigned int i, j;
680 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
681
682 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
683 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
684 / HOST_BITS_PER_WIDE_INT;
685
686 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
687 for (i = 0; i < len; i++)
688 tmp[i] = 0;
689
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705
706 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
707 return immed_wide_int_const (c, mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 /* Do not care if it fits or not right here. */
724 val = TREE_INT_CST_LOW (cst);
725
726 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
727 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
728
729 hostval = val;
730 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
731 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
732
733 if (val != hostval)
734 return 1;
735
736 *p = hostval;
737 return 0;
738 }
739
740 /* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
743
744 static tree
745 builtin_save_expr (tree exp)
746 {
747 if (TREE_CODE (exp) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp) == 0
749 && (TREE_CODE (exp) == PARM_DECL
750 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
751 return exp;
752
753 return save_expr (exp);
754 }
755
756 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
759
760 static rtx
761 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
762 {
763 int i;
764
765 #ifdef INITIAL_FRAME_ADDRESS_RTX
766 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
767 #else
768 rtx tem;
769
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
774
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 tem = frame_pointer_rtx;
781 else
782 {
783 tem = hard_frame_pointer_rtx;
784
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl->accesses_prior_frames = 1;
787 }
788 #endif
789
790 /* Some machines need special handling before we can access
791 arbitrary frames. For example, on the SPARC, we must first flush
792 all register windows to the stack. */
793 #ifdef SETUP_FRAME_ADDRESSES
794 if (count > 0)
795 SETUP_FRAME_ADDRESSES ();
796 #endif
797
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 count--;
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812 #endif
813 tem = memory_address (Pmode, tem);
814 tem = gen_frame_mem (Pmode, tem);
815 tem = copy_to_reg (tem);
816 }
817
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823 #else
824 return tem;
825 #endif
826
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830 #else
831 tem = memory_address (Pmode,
832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
833 tem = gen_frame_mem (Pmode, tem);
834 #endif
835 return tem;
836 }
837
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set = -1;
840
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
844
845 void
846 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 {
848 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
849 rtx stack_save;
850 rtx mem;
851
852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
855 buf_addr = convert_memory_address (Pmode, buf_addr);
856
857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
862
863 mem = gen_rtx_MEM (Pmode, buf_addr);
864 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866
867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
869 set_mem_alias_set (mem, setjmp_alias_set);
870
871 emit_move_insn (validize_mem (mem),
872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873
874 stack_save = gen_rtx_MEM (sa_mode,
875 plus_constant (Pmode, buf_addr,
876 2 * GET_MODE_SIZE (Pmode)));
877 set_mem_alias_set (stack_save, setjmp_alias_set);
878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879
880 /* If there is further processing to do, do it. */
881 if (targetm.have_builtin_setjmp_setup ())
882 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
883
884 /* We have a nonlocal label. */
885 cfun->has_nonlocal_label = 1;
886 }
887
888 /* Construct the trailing part of a __builtin_setjmp call. This is
889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
891
892 void
893 expand_builtin_setjmp_receiver (rtx receiver_label)
894 {
895 rtx chain;
896
897 /* Mark the FP as used when we get here, so we have to make sure it's
898 marked as used by this function. */
899 emit_use (hard_frame_pointer_rtx);
900
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
903 chain = targetm.calls.static_chain (current_function_decl, true);
904 if (chain && REG_P (chain))
905 emit_clobber (chain);
906
907 /* Now put in the code to restore the frame pointer, and argument
908 pointer, if needed. */
909 if (! targetm.have_nonlocal_goto ())
910 {
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
915
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
923
924 /* Restoring the frame pointer also modifies the hard frame pointer.
925 Mark it used (so that the previous assignment remains live once
926 the frame pointer is eliminated) and clobbered (to represent the
927 implicit update from the assignment). */
928 emit_use (hard_frame_pointer_rtx);
929 emit_clobber (hard_frame_pointer_rtx);
930 }
931
932 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
933 if (fixed_regs[ARG_POINTER_REGNUM])
934 {
935 #ifdef ELIMINABLE_REGS
936 /* If the argument pointer can be eliminated in favor of the
937 frame pointer, we don't need to restore it. We assume here
938 that if such an elimination is present, it can always be used.
939 This is the case on all known machines; if we don't make this
940 assumption, we do unnecessary saving on many machines. */
941 size_t i;
942 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
943
944 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
945 if (elim_regs[i].from == ARG_POINTER_REGNUM
946 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
947 break;
948
949 if (i == ARRAY_SIZE (elim_regs))
950 #endif
951 {
952 /* Now restore our arg pointer from the address at which it
953 was saved in our stack frame. */
954 emit_move_insn (crtl->args.internal_arg_pointer,
955 copy_to_reg (get_arg_pointer_save_area ()));
956 }
957 }
958 #endif
959
960 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
961 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
962 else if (targetm.have_nonlocal_goto_receiver ())
963 emit_insn (targetm.gen_nonlocal_goto_receiver ());
964 else
965 { /* Nothing */ }
966
967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
969 happen immediately, not later. */
970 emit_insn (gen_blockage ());
971 }
972
973 /* __builtin_longjmp is passed a pointer to an array of five words (not
974 all will be used on all machines). It operates similarly to the C
975 library function of the same name, but is more efficient. Much of
976 the code below is copied from the handling of non-local gotos. */
977
978 static void
979 expand_builtin_longjmp (rtx buf_addr, rtx value)
980 {
981 rtx fp, lab, stack;
982 rtx_insn *insn, *last;
983 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
984
985 /* DRAP is needed for stack realign if longjmp is expanded to current
986 function */
987 if (SUPPORTS_STACK_ALIGNMENT)
988 crtl->need_drap = true;
989
990 if (setjmp_alias_set == -1)
991 setjmp_alias_set = new_alias_set ();
992
993 buf_addr = convert_memory_address (Pmode, buf_addr);
994
995 buf_addr = force_reg (Pmode, buf_addr);
996
997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
999 gcc_assert (value == const1_rtx);
1000
1001 last = get_last_insn ();
1002 if (targetm.have_builtin_longjmp ())
1003 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1004 else
1005 {
1006 fp = gen_rtx_MEM (Pmode, buf_addr);
1007 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1008 GET_MODE_SIZE (Pmode)));
1009
1010 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1011 2 * GET_MODE_SIZE (Pmode)));
1012 set_mem_alias_set (fp, setjmp_alias_set);
1013 set_mem_alias_set (lab, setjmp_alias_set);
1014 set_mem_alias_set (stack, setjmp_alias_set);
1015
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
1018 if (targetm.have_nonlocal_goto ())
1019 /* We have to pass a value to the nonlocal_goto pattern that will
1020 get copied into the static_chain pointer, but it does not matter
1021 what that value is, because builtin_setjmp does not use it. */
1022 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1023 else
1024 {
1025 lab = copy_to_reg (lab);
1026
1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1029
1030 emit_move_insn (hard_frame_pointer_rtx, fp);
1031 emit_stack_restore (SAVE_NONLOCAL, stack);
1032
1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
1035 emit_indirect_jump (lab);
1036 }
1037 }
1038
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 {
1046 gcc_assert (insn != last);
1047
1048 if (JUMP_P (insn))
1049 {
1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1051 break;
1052 }
1053 else if (CALL_P (insn))
1054 break;
1055 }
1056 }
1057
1058 static inline bool
1059 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1060 {
1061 return (iter->i < iter->n);
1062 }
1063
1064 /* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1067 VOID_TYPE. */
1068
1069 static bool
1070 validate_arglist (const_tree callexpr, ...)
1071 {
1072 enum tree_code code;
1073 bool res = 0;
1074 va_list ap;
1075 const_call_expr_arg_iterator iter;
1076 const_tree arg;
1077
1078 va_start (ap, callexpr);
1079 init_const_call_expr_arg_iterator (callexpr, &iter);
1080
1081 do
1082 {
1083 code = (enum tree_code) va_arg (ap, int);
1084 switch (code)
1085 {
1086 case 0:
1087 /* This signifies an ellipses, any further arguments are all ok. */
1088 res = true;
1089 goto end;
1090 case VOID_TYPE:
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res = !more_const_call_expr_args_p (&iter);
1094 goto end;
1095 default:
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg = next_const_call_expr_arg (&iter);
1100 if (!validate_arg (arg, code))
1101 goto end;
1102 break;
1103 }
1104 }
1105 while (1);
1106
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1111
1112 return res;
1113 }
1114
1115 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1117
1118 static rtx
1119 expand_builtin_nonlocal_goto (tree exp)
1120 {
1121 tree t_label, t_save_area;
1122 rtx r_label, r_save_area, r_fp, r_sp;
1123 rtx_insn *insn;
1124
1125 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1126 return NULL_RTX;
1127
1128 t_label = CALL_EXPR_ARG (exp, 0);
1129 t_save_area = CALL_EXPR_ARG (exp, 1);
1130
1131 r_label = expand_normal (t_label);
1132 r_label = convert_memory_address (Pmode, r_label);
1133 r_save_area = expand_normal (t_save_area);
1134 r_save_area = convert_memory_address (Pmode, r_save_area);
1135 /* Copy the address of the save location to a register just in case it was
1136 based on the frame pointer. */
1137 r_save_area = copy_to_reg (r_save_area);
1138 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1139 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1140 plus_constant (Pmode, r_save_area,
1141 GET_MODE_SIZE (Pmode)));
1142
1143 crtl->has_nonlocal_goto = 1;
1144
1145 /* ??? We no longer need to pass the static chain value, afaik. */
1146 if (targetm.have_nonlocal_goto ())
1147 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1148 else
1149 {
1150 r_label = copy_to_reg (r_label);
1151
1152 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1153 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1154
1155 /* Restore frame pointer for containing function. */
1156 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1157 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1158
1159 /* USE of hard_frame_pointer_rtx added for consistency;
1160 not clear if really needed. */
1161 emit_use (hard_frame_pointer_rtx);
1162 emit_use (stack_pointer_rtx);
1163
1164 /* If the architecture is using a GP register, we must
1165 conservatively assume that the target function makes use of it.
1166 The prologue of functions with nonlocal gotos must therefore
1167 initialize the GP register to the appropriate value, and we
1168 must then make sure that this value is live at the point
1169 of the jump. (Note that this doesn't necessarily apply
1170 to targets with a nonlocal_goto pattern; they are free
1171 to implement it in their own way. Note also that this is
1172 a no-op if the GP register is a global invariant.) */
1173 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1174 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1175 emit_use (pic_offset_table_rtx);
1176
1177 emit_indirect_jump (r_label);
1178 }
1179
1180 /* Search backwards to the jump insn and mark it as a
1181 non-local goto. */
1182 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1183 {
1184 if (JUMP_P (insn))
1185 {
1186 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1187 break;
1188 }
1189 else if (CALL_P (insn))
1190 break;
1191 }
1192
1193 return const0_rtx;
1194 }
1195
1196 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1197 (not all will be used on all machines) that was passed to __builtin_setjmp.
1198 It updates the stack pointer in that block to the current value. This is
1199 also called directly by the SJLJ exception handling code. */
1200
1201 void
1202 expand_builtin_update_setjmp_buf (rtx buf_addr)
1203 {
1204 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1205 rtx stack_save
1206 = gen_rtx_MEM (sa_mode,
1207 memory_address
1208 (sa_mode,
1209 plus_constant (Pmode, buf_addr,
1210 2 * GET_MODE_SIZE (Pmode))));
1211
1212 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 }
1214
1215 /* Expand a call to __builtin_prefetch. For a target that does not support
1216 data prefetch, evaluate the memory address argument in case it has side
1217 effects. */
1218
1219 static void
1220 expand_builtin_prefetch (tree exp)
1221 {
1222 tree arg0, arg1, arg2;
1223 int nargs;
1224 rtx op0, op1, op2;
1225
1226 if (!validate_arglist (exp, POINTER_TYPE, 0))
1227 return;
1228
1229 arg0 = CALL_EXPR_ARG (exp, 0);
1230
1231 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1232 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1233 locality). */
1234 nargs = call_expr_nargs (exp);
1235 if (nargs > 1)
1236 arg1 = CALL_EXPR_ARG (exp, 1);
1237 else
1238 arg1 = integer_zero_node;
1239 if (nargs > 2)
1240 arg2 = CALL_EXPR_ARG (exp, 2);
1241 else
1242 arg2 = integer_three_node;
1243
1244 /* Argument 0 is an address. */
1245 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1246
1247 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1248 if (TREE_CODE (arg1) != INTEGER_CST)
1249 {
1250 error ("second argument to %<__builtin_prefetch%> must be a constant");
1251 arg1 = integer_zero_node;
1252 }
1253 op1 = expand_normal (arg1);
1254 /* Argument 1 must be either zero or one. */
1255 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1256 {
1257 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1258 " using zero");
1259 op1 = const0_rtx;
1260 }
1261
1262 /* Argument 2 (locality) must be a compile-time constant int. */
1263 if (TREE_CODE (arg2) != INTEGER_CST)
1264 {
1265 error ("third argument to %<__builtin_prefetch%> must be a constant");
1266 arg2 = integer_zero_node;
1267 }
1268 op2 = expand_normal (arg2);
1269 /* Argument 2 must be 0, 1, 2, or 3. */
1270 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1271 {
1272 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1273 op2 = const0_rtx;
1274 }
1275
1276 if (targetm.have_prefetch ())
1277 {
1278 struct expand_operand ops[3];
1279
1280 create_address_operand (&ops[0], op0);
1281 create_integer_operand (&ops[1], INTVAL (op1));
1282 create_integer_operand (&ops[2], INTVAL (op2));
1283 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1284 return;
1285 }
1286
1287 /* Don't do anything with direct references to volatile memory, but
1288 generate code to handle other side effects. */
1289 if (!MEM_P (op0) && side_effects_p (op0))
1290 emit_insn (op0);
1291 }
1292
1293 /* Get a MEM rtx for expression EXP which is the address of an operand
1294 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1295 the maximum length of the block of memory that might be accessed or
1296 NULL if unknown. */
1297
1298 static rtx
1299 get_memory_rtx (tree exp, tree len)
1300 {
1301 tree orig_exp = exp;
1302 rtx addr, mem;
1303
1304 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1305 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1306 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1307 exp = TREE_OPERAND (exp, 0);
1308
1309 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1310 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1311
1312 /* Get an expression we can use to find the attributes to assign to MEM.
1313 First remove any nops. */
1314 while (CONVERT_EXPR_P (exp)
1315 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1316 exp = TREE_OPERAND (exp, 0);
1317
1318 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1319 (as builtin stringops may alias with anything). */
1320 exp = fold_build2 (MEM_REF,
1321 build_array_type (char_type_node,
1322 build_range_type (sizetype,
1323 size_one_node, len)),
1324 exp, build_int_cst (ptr_type_node, 0));
1325
1326 /* If the MEM_REF has no acceptable address, try to get the base object
1327 from the original address we got, and build an all-aliasing
1328 unknown-sized access to that one. */
1329 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1330 set_mem_attributes (mem, exp, 0);
1331 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1332 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1333 0))))
1334 {
1335 exp = build_fold_addr_expr (exp);
1336 exp = fold_build2 (MEM_REF,
1337 build_array_type (char_type_node,
1338 build_range_type (sizetype,
1339 size_zero_node,
1340 NULL)),
1341 exp, build_int_cst (ptr_type_node, 0));
1342 set_mem_attributes (mem, exp, 0);
1343 }
1344 set_mem_alias_set (mem, 0);
1345 return mem;
1346 }
1347 \f
1348 /* Built-in functions to perform an untyped call and return. */
1349
1350 #define apply_args_mode \
1351 (this_target_builtins->x_apply_args_mode)
1352 #define apply_result_mode \
1353 (this_target_builtins->x_apply_result_mode)
1354
1355 /* Return the size required for the block returned by __builtin_apply_args,
1356 and initialize apply_args_mode. */
1357
1358 static int
1359 apply_args_size (void)
1360 {
1361 static int size = -1;
1362 int align;
1363 unsigned int regno;
1364 machine_mode mode;
1365
1366 /* The values computed by this function never change. */
1367 if (size < 0)
1368 {
1369 /* The first value is the incoming arg-pointer. */
1370 size = GET_MODE_SIZE (Pmode);
1371
1372 /* The second value is the structure value address unless this is
1373 passed as an "invisible" first argument. */
1374 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1375 size += GET_MODE_SIZE (Pmode);
1376
1377 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1378 if (FUNCTION_ARG_REGNO_P (regno))
1379 {
1380 mode = targetm.calls.get_raw_arg_mode (regno);
1381
1382 gcc_assert (mode != VOIDmode);
1383
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 size += GET_MODE_SIZE (mode);
1388 apply_args_mode[regno] = mode;
1389 }
1390 else
1391 {
1392 apply_args_mode[regno] = VOIDmode;
1393 }
1394 }
1395 return size;
1396 }
1397
1398 /* Return the size required for the block returned by __builtin_apply,
1399 and initialize apply_result_mode. */
1400
1401 static int
1402 apply_result_size (void)
1403 {
1404 static int size = -1;
1405 int align, regno;
1406 machine_mode mode;
1407
1408 /* The values computed by this function never change. */
1409 if (size < 0)
1410 {
1411 size = 0;
1412
1413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1414 if (targetm.calls.function_value_regno_p (regno))
1415 {
1416 mode = targetm.calls.get_raw_result_mode (regno);
1417
1418 gcc_assert (mode != VOIDmode);
1419
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423 size += GET_MODE_SIZE (mode);
1424 apply_result_mode[regno] = mode;
1425 }
1426 else
1427 apply_result_mode[regno] = VOIDmode;
1428
1429 /* Allow targets that use untyped_call and untyped_return to override
1430 the size so that machine-specific information can be stored here. */
1431 #ifdef APPLY_RESULT_SIZE
1432 size = APPLY_RESULT_SIZE;
1433 #endif
1434 }
1435 return size;
1436 }
1437
1438 /* Create a vector describing the result block RESULT. If SAVEP is true,
1439 the result block is used to save the values; otherwise it is used to
1440 restore the values. */
1441
1442 static rtx
1443 result_vector (int savep, rtx result)
1444 {
1445 int regno, size, align, nelts;
1446 machine_mode mode;
1447 rtx reg, mem;
1448 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1449
1450 size = nelts = 0;
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_result_mode[regno]) != VOIDmode)
1453 {
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1458 mem = adjust_address (result, mode, size);
1459 savevec[nelts++] = (savep
1460 ? gen_rtx_SET (mem, reg)
1461 : gen_rtx_SET (reg, mem));
1462 size += GET_MODE_SIZE (mode);
1463 }
1464 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 }
1466
1467 /* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1469
1470 static rtx
1471 expand_builtin_apply_args_1 (void)
1472 {
1473 rtx registers, tem;
1474 int size, align, regno;
1475 machine_mode mode;
1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1477
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1481
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1485 size += GET_MODE_SIZE (Pmode);
1486
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 {
1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1494
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1496
1497 emit_move_insn (adjust_address (registers, mode, size), tem);
1498 size += GET_MODE_SIZE (mode);
1499 }
1500
1501 /* Save the arg pointer to the block. */
1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1503 /* We need the pointer as the caller actually passed them to us, not
1504 as we might have pretended they were passed. Make sure it's a valid
1505 operand, as emit_move_insn isn't expected to handle a PLUS. */
1506 if (STACK_GROWS_DOWNWARD)
1507 tem
1508 = force_operand (plus_constant (Pmode, tem,
1509 crtl->args.pretend_args_size),
1510 NULL_RTX);
1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1512
1513 size = GET_MODE_SIZE (Pmode);
1514
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
1517 if (struct_incoming_value)
1518 {
1519 emit_move_insn (adjust_address (registers, Pmode, size),
1520 copy_to_reg (struct_incoming_value));
1521 size += GET_MODE_SIZE (Pmode);
1522 }
1523
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1526 }
1527
1528 /* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
1534
1535 static rtx
1536 expand_builtin_apply_args (void)
1537 {
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1542 {
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
1547
1548 start_sequence ();
1549 temp = expand_builtin_apply_args_1 ();
1550 rtx_insn *seq = get_insns ();
1551 end_sequence ();
1552
1553 apply_args_value = temp;
1554
1555 /* Put the insns after the NOTE that starts the function.
1556 If this is inside a start_sequence, make the outer-level insn
1557 chain current, so the code is placed at the start of the
1558 function. If internal_arg_pointer is a non-virtual pseudo,
1559 it needs to be placed after the function that initializes
1560 that pseudo. */
1561 push_topmost_sequence ();
1562 if (REG_P (crtl->args.internal_arg_pointer)
1563 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1564 emit_insn_before (seq, parm_birth_insn);
1565 else
1566 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1567 pop_topmost_sequence ();
1568 return temp;
1569 }
1570 }
1571
1572 /* Perform an untyped call and save the state required to perform an
1573 untyped return of whatever value was returned by the given function. */
1574
1575 static rtx
1576 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1577 {
1578 int size, align, regno;
1579 machine_mode mode;
1580 rtx incoming_args, result, reg, dest, src;
1581 rtx_call_insn *call_insn;
1582 rtx old_stack_level = 0;
1583 rtx call_fusage = 0;
1584 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1585
1586 arguments = convert_memory_address (Pmode, arguments);
1587
1588 /* Create a block where the return registers can be saved. */
1589 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1590
1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args = gen_reg_rtx (Pmode);
1593 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1594 if (!STACK_GROWS_DOWNWARD)
1595 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1596 incoming_args, 0, OPTAB_LIB_WIDEN);
1597
1598 /* Push a new argument block and copy the arguments. Do not allow
1599 the (potential) memcpy call below to interfere with our stack
1600 manipulations. */
1601 do_pending_stack_adjust ();
1602 NO_DEFER_POP;
1603
1604 /* Save the stack with nonlocal if available. */
1605 if (targetm.have_save_stack_nonlocal ())
1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1607 else
1608 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1609
1610 /* Allocate a block of memory onto the stack and copy the memory
1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
1614 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1615
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT)
1621 crtl->need_drap = true;
1622
1623 dest = virtual_outgoing_args_rtx;
1624 if (!STACK_GROWS_DOWNWARD)
1625 {
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630 }
1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1636
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
1640 set_mem_align (arguments, PARM_BOUNDARY);
1641
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
1644 if (struct_value)
1645 size += GET_MODE_SIZE (Pmode);
1646
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1651 {
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1659 }
1660
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
1664 if (struct_value)
1665 {
1666 rtx value = gen_reg_rtx (Pmode);
1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1668 emit_move_insn (struct_value, value);
1669 if (REG_P (struct_value))
1670 use_reg (&call_fusage, struct_value);
1671 size += GET_MODE_SIZE (Pmode);
1672 }
1673
1674 /* All arguments and registers used for the call are set up by now! */
1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1676
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1682
1683 /* Generate the actual call instruction and save the return value. */
1684 if (targetm.have_untyped_call ())
1685 {
1686 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1687 emit_call_insn (targetm.gen_untyped_call (mem, result,
1688 result_vector (1, result)));
1689 }
1690 else
1691 #ifdef HAVE_call_value
1692 if (HAVE_call_value)
1693 {
1694 rtx valreg = 0;
1695
1696 /* Locate the unique return register. It is not possible to
1697 express a call that sets more than one return register using
1698 call_value; use untyped_call for that. In fact, untyped_call
1699 only needs to save the return registers in the given block. */
1700 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1701 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 {
1703 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1704
1705 valreg = gen_rtx_REG (mode, regno);
1706 }
1707
1708 emit_call_insn (GEN_CALL_VALUE (valreg,
1709 gen_rtx_MEM (FUNCTION_MODE, function),
1710 const0_rtx, NULL_RTX, const0_rtx));
1711
1712 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 }
1714 else
1715 #endif
1716 gcc_unreachable ();
1717
1718 /* Find the CALL insn we just emitted, and attach the register usage
1719 information. */
1720 call_insn = last_call_insn ();
1721 add_function_usage_to (call_insn, call_fusage);
1722
1723 /* Restore the stack. */
1724 if (targetm.have_save_stack_nonlocal ())
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1729
1730 OK_DEFER_POP;
1731
1732 /* Return the address of the result block. */
1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
1735 }
1736
1737 /* Perform an untyped return. */
1738
1739 static void
1740 expand_builtin_return (rtx result)
1741 {
1742 int size, align, regno;
1743 machine_mode mode;
1744 rtx reg;
1745 rtx_insn *call_fusage = 0;
1746
1747 result = convert_memory_address (Pmode, result);
1748
1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1751
1752 if (targetm.have_untyped_return ())
1753 {
1754 rtx vector = result_vector (0, result);
1755 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1756 emit_barrier ();
1757 return;
1758 }
1759
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1764 {
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1770
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1776 }
1777
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1780
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1784 }
1785
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1787
1788 static enum type_class
1789 type_to_class (tree type)
1790 {
1791 switch (TREE_CODE (type))
1792 {
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1811 }
1812 }
1813
1814 /* Expand a call EXP to __builtin_classify_type. */
1815
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1818 {
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1822 }
1823
1824 /* This helper macro, meant to be used in mathfn_built_in below,
1825 determines which among a set of three builtin math functions is
1826 appropriate for a given type mode. The `F' and `L' cases are
1827 automatically generated from the `double' case. */
1828 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1829 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1830 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1831 fcodel = BUILT_IN_MATHFN##L ; break;
1832 /* Similar to above, but appends _R after any F/L suffix. */
1833 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1835 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1836 fcodel = BUILT_IN_MATHFN##L_R ; break;
1837
1838 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1839 if available. If IMPLICIT is true use the implicit builtin declaration,
1840 otherwise use the explicit declaration. If we can't do the conversion,
1841 return zero. */
1842
1843 static tree
1844 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1845 {
1846 enum built_in_function fcode, fcodef, fcodel, fcode2;
1847
1848 switch (fn)
1849 {
1850 CASE_MATHFN (BUILT_IN_ACOS)
1851 CASE_MATHFN (BUILT_IN_ACOSH)
1852 CASE_MATHFN (BUILT_IN_ASIN)
1853 CASE_MATHFN (BUILT_IN_ASINH)
1854 CASE_MATHFN (BUILT_IN_ATAN)
1855 CASE_MATHFN (BUILT_IN_ATAN2)
1856 CASE_MATHFN (BUILT_IN_ATANH)
1857 CASE_MATHFN (BUILT_IN_CBRT)
1858 CASE_MATHFN (BUILT_IN_CEIL)
1859 CASE_MATHFN (BUILT_IN_CEXPI)
1860 CASE_MATHFN (BUILT_IN_COPYSIGN)
1861 CASE_MATHFN (BUILT_IN_COS)
1862 CASE_MATHFN (BUILT_IN_COSH)
1863 CASE_MATHFN (BUILT_IN_DREM)
1864 CASE_MATHFN (BUILT_IN_ERF)
1865 CASE_MATHFN (BUILT_IN_ERFC)
1866 CASE_MATHFN (BUILT_IN_EXP)
1867 CASE_MATHFN (BUILT_IN_EXP10)
1868 CASE_MATHFN (BUILT_IN_EXP2)
1869 CASE_MATHFN (BUILT_IN_EXPM1)
1870 CASE_MATHFN (BUILT_IN_FABS)
1871 CASE_MATHFN (BUILT_IN_FDIM)
1872 CASE_MATHFN (BUILT_IN_FLOOR)
1873 CASE_MATHFN (BUILT_IN_FMA)
1874 CASE_MATHFN (BUILT_IN_FMAX)
1875 CASE_MATHFN (BUILT_IN_FMIN)
1876 CASE_MATHFN (BUILT_IN_FMOD)
1877 CASE_MATHFN (BUILT_IN_FREXP)
1878 CASE_MATHFN (BUILT_IN_GAMMA)
1879 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1880 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1881 CASE_MATHFN (BUILT_IN_HYPOT)
1882 CASE_MATHFN (BUILT_IN_ILOGB)
1883 CASE_MATHFN (BUILT_IN_ICEIL)
1884 CASE_MATHFN (BUILT_IN_IFLOOR)
1885 CASE_MATHFN (BUILT_IN_INF)
1886 CASE_MATHFN (BUILT_IN_IRINT)
1887 CASE_MATHFN (BUILT_IN_IROUND)
1888 CASE_MATHFN (BUILT_IN_ISINF)
1889 CASE_MATHFN (BUILT_IN_J0)
1890 CASE_MATHFN (BUILT_IN_J1)
1891 CASE_MATHFN (BUILT_IN_JN)
1892 CASE_MATHFN (BUILT_IN_LCEIL)
1893 CASE_MATHFN (BUILT_IN_LDEXP)
1894 CASE_MATHFN (BUILT_IN_LFLOOR)
1895 CASE_MATHFN (BUILT_IN_LGAMMA)
1896 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1897 CASE_MATHFN (BUILT_IN_LLCEIL)
1898 CASE_MATHFN (BUILT_IN_LLFLOOR)
1899 CASE_MATHFN (BUILT_IN_LLRINT)
1900 CASE_MATHFN (BUILT_IN_LLROUND)
1901 CASE_MATHFN (BUILT_IN_LOG)
1902 CASE_MATHFN (BUILT_IN_LOG10)
1903 CASE_MATHFN (BUILT_IN_LOG1P)
1904 CASE_MATHFN (BUILT_IN_LOG2)
1905 CASE_MATHFN (BUILT_IN_LOGB)
1906 CASE_MATHFN (BUILT_IN_LRINT)
1907 CASE_MATHFN (BUILT_IN_LROUND)
1908 CASE_MATHFN (BUILT_IN_MODF)
1909 CASE_MATHFN (BUILT_IN_NAN)
1910 CASE_MATHFN (BUILT_IN_NANS)
1911 CASE_MATHFN (BUILT_IN_NEARBYINT)
1912 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1913 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1914 CASE_MATHFN (BUILT_IN_POW)
1915 CASE_MATHFN (BUILT_IN_POWI)
1916 CASE_MATHFN (BUILT_IN_POW10)
1917 CASE_MATHFN (BUILT_IN_REMAINDER)
1918 CASE_MATHFN (BUILT_IN_REMQUO)
1919 CASE_MATHFN (BUILT_IN_RINT)
1920 CASE_MATHFN (BUILT_IN_ROUND)
1921 CASE_MATHFN (BUILT_IN_SCALB)
1922 CASE_MATHFN (BUILT_IN_SCALBLN)
1923 CASE_MATHFN (BUILT_IN_SCALBN)
1924 CASE_MATHFN (BUILT_IN_SIGNBIT)
1925 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1926 CASE_MATHFN (BUILT_IN_SIN)
1927 CASE_MATHFN (BUILT_IN_SINCOS)
1928 CASE_MATHFN (BUILT_IN_SINH)
1929 CASE_MATHFN (BUILT_IN_SQRT)
1930 CASE_MATHFN (BUILT_IN_TAN)
1931 CASE_MATHFN (BUILT_IN_TANH)
1932 CASE_MATHFN (BUILT_IN_TGAMMA)
1933 CASE_MATHFN (BUILT_IN_TRUNC)
1934 CASE_MATHFN (BUILT_IN_Y0)
1935 CASE_MATHFN (BUILT_IN_Y1)
1936 CASE_MATHFN (BUILT_IN_YN)
1937
1938 default:
1939 return NULL_TREE;
1940 }
1941
1942 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1943 fcode2 = fcode;
1944 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1945 fcode2 = fcodef;
1946 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1947 fcode2 = fcodel;
1948 else
1949 return NULL_TREE;
1950
1951 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1952 return NULL_TREE;
1953
1954 return builtin_decl_explicit (fcode2);
1955 }
1956
1957 /* Like mathfn_built_in_1(), but always use the implicit array. */
1958
1959 tree
1960 mathfn_built_in (tree type, enum built_in_function fn)
1961 {
1962 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1963 }
1964
1965 /* If errno must be maintained, expand the RTL to check if the result,
1966 TARGET, of a built-in function call, EXP, is NaN, and if so set
1967 errno to EDOM. */
1968
1969 static void
1970 expand_errno_check (tree exp, rtx target)
1971 {
1972 rtx_code_label *lab = gen_label_rtx ();
1973
1974 /* Test the result; if it is NaN, set errno=EDOM because
1975 the argument was not in the domain. */
1976 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1977 NULL_RTX, NULL, lab,
1978 /* The jump is very likely. */
1979 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1980
1981 #ifdef TARGET_EDOM
1982 /* If this built-in doesn't throw an exception, set errno directly. */
1983 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1984 {
1985 #ifdef GEN_ERRNO_RTX
1986 rtx errno_rtx = GEN_ERRNO_RTX;
1987 #else
1988 rtx errno_rtx
1989 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1990 #endif
1991 emit_move_insn (errno_rtx,
1992 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1993 emit_label (lab);
1994 return;
1995 }
1996 #endif
1997
1998 /* Make sure the library call isn't expanded as a tail call. */
1999 CALL_EXPR_TAILCALL (exp) = 0;
2000
2001 /* We can't set errno=EDOM directly; let the library call do it.
2002 Pop the arguments right away in case the call gets deleted. */
2003 NO_DEFER_POP;
2004 expand_call (exp, target, 0);
2005 OK_DEFER_POP;
2006 emit_label (lab);
2007 }
2008
2009 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2010 Return NULL_RTX if a normal call should be emitted rather than expanding
2011 the function in-line. EXP is the expression that is a call to the builtin
2012 function; if convenient, the result should be placed in TARGET.
2013 SUBTARGET may be used as the target for computing one of EXP's operands. */
2014
2015 static rtx
2016 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2017 {
2018 optab builtin_optab;
2019 rtx op0;
2020 rtx_insn *insns;
2021 tree fndecl = get_callee_fndecl (exp);
2022 machine_mode mode;
2023 bool errno_set = false;
2024 bool try_widening = false;
2025 tree arg;
2026
2027 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2028 return NULL_RTX;
2029
2030 arg = CALL_EXPR_ARG (exp, 0);
2031
2032 switch (DECL_FUNCTION_CODE (fndecl))
2033 {
2034 CASE_FLT_FN (BUILT_IN_SQRT):
2035 errno_set = ! tree_expr_nonnegative_p (arg);
2036 try_widening = true;
2037 builtin_optab = sqrt_optab;
2038 break;
2039 CASE_FLT_FN (BUILT_IN_EXP):
2040 errno_set = true; builtin_optab = exp_optab; break;
2041 CASE_FLT_FN (BUILT_IN_EXP10):
2042 CASE_FLT_FN (BUILT_IN_POW10):
2043 errno_set = true; builtin_optab = exp10_optab; break;
2044 CASE_FLT_FN (BUILT_IN_EXP2):
2045 errno_set = true; builtin_optab = exp2_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXPM1):
2047 errno_set = true; builtin_optab = expm1_optab; break;
2048 CASE_FLT_FN (BUILT_IN_LOGB):
2049 errno_set = true; builtin_optab = logb_optab; break;
2050 CASE_FLT_FN (BUILT_IN_LOG):
2051 errno_set = true; builtin_optab = log_optab; break;
2052 CASE_FLT_FN (BUILT_IN_LOG10):
2053 errno_set = true; builtin_optab = log10_optab; break;
2054 CASE_FLT_FN (BUILT_IN_LOG2):
2055 errno_set = true; builtin_optab = log2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_LOG1P):
2057 errno_set = true; builtin_optab = log1p_optab; break;
2058 CASE_FLT_FN (BUILT_IN_ASIN):
2059 builtin_optab = asin_optab; break;
2060 CASE_FLT_FN (BUILT_IN_ACOS):
2061 builtin_optab = acos_optab; break;
2062 CASE_FLT_FN (BUILT_IN_TAN):
2063 builtin_optab = tan_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN):
2065 builtin_optab = atan_optab; break;
2066 CASE_FLT_FN (BUILT_IN_FLOOR):
2067 builtin_optab = floor_optab; break;
2068 CASE_FLT_FN (BUILT_IN_CEIL):
2069 builtin_optab = ceil_optab; break;
2070 CASE_FLT_FN (BUILT_IN_TRUNC):
2071 builtin_optab = btrunc_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ROUND):
2073 builtin_optab = round_optab; break;
2074 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2075 builtin_optab = nearbyint_optab;
2076 if (flag_trapping_math)
2077 break;
2078 /* Else fallthrough and expand as rint. */
2079 CASE_FLT_FN (BUILT_IN_RINT):
2080 builtin_optab = rint_optab; break;
2081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2082 builtin_optab = significand_optab; break;
2083 default:
2084 gcc_unreachable ();
2085 }
2086
2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
2089
2090 if (! flag_errno_math || ! HONOR_NANS (mode))
2091 errno_set = false;
2092
2093 /* Before working hard, check whether the instruction is available, but try
2094 to widen the mode for specific operations. */
2095 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2096 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2097 && (!errno_set || !optimize_insn_for_size_p ()))
2098 {
2099 rtx result = gen_reg_rtx (mode);
2100
2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
2104 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2105
2106 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2107
2108 start_sequence ();
2109
2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 result = expand_unop (mode, builtin_optab, op0, result, 0);
2113
2114 if (result != 0)
2115 {
2116 if (errno_set)
2117 expand_errno_check (exp, result);
2118
2119 /* Output the entire sequence. */
2120 insns = get_insns ();
2121 end_sequence ();
2122 emit_insn (insns);
2123 return result;
2124 }
2125
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2129 end_sequence ();
2130 }
2131
2132 return expand_call (exp, target, target == const0_rtx);
2133 }
2134
2135 /* Expand a call to the builtin binary math functions (pow and atan2).
2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2140 operands. */
2141
2142 static rtx
2143 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2144 {
2145 optab builtin_optab;
2146 rtx op0, op1, result;
2147 rtx_insn *insns;
2148 int op1_type = REAL_TYPE;
2149 tree fndecl = get_callee_fndecl (exp);
2150 tree arg0, arg1;
2151 machine_mode mode;
2152 bool errno_set = true;
2153
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SCALBN):
2157 CASE_FLT_FN (BUILT_IN_SCALBLN):
2158 CASE_FLT_FN (BUILT_IN_LDEXP):
2159 op1_type = INTEGER_TYPE;
2160 default:
2161 break;
2162 }
2163
2164 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2165 return NULL_RTX;
2166
2167 arg0 = CALL_EXPR_ARG (exp, 0);
2168 arg1 = CALL_EXPR_ARG (exp, 1);
2169
2170 switch (DECL_FUNCTION_CODE (fndecl))
2171 {
2172 CASE_FLT_FN (BUILT_IN_POW):
2173 builtin_optab = pow_optab; break;
2174 CASE_FLT_FN (BUILT_IN_ATAN2):
2175 builtin_optab = atan2_optab; break;
2176 CASE_FLT_FN (BUILT_IN_SCALB):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2178 return 0;
2179 builtin_optab = scalb_optab; break;
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2183 return 0;
2184 /* Fall through... */
2185 CASE_FLT_FN (BUILT_IN_LDEXP):
2186 builtin_optab = ldexp_optab; break;
2187 CASE_FLT_FN (BUILT_IN_FMOD):
2188 builtin_optab = fmod_optab; break;
2189 CASE_FLT_FN (BUILT_IN_REMAINDER):
2190 CASE_FLT_FN (BUILT_IN_DREM):
2191 builtin_optab = remainder_optab; break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195
2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
2198
2199 /* Before working hard, check whether the instruction is available. */
2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2201 return NULL_RTX;
2202
2203 result = gen_reg_rtx (mode);
2204
2205 if (! flag_errno_math || ! HONOR_NANS (mode))
2206 errno_set = false;
2207
2208 if (errno_set && optimize_insn_for_size_p ())
2209 return 0;
2210
2211 /* Always stabilize the argument list. */
2212 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2213 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2214
2215 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2216 op1 = expand_normal (arg1);
2217
2218 start_sequence ();
2219
2220 /* Compute into RESULT.
2221 Set RESULT to wherever the result comes back. */
2222 result = expand_binop (mode, builtin_optab, op0, op1,
2223 result, 0, OPTAB_DIRECT);
2224
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2228 if (result == 0)
2229 {
2230 end_sequence ();
2231 return expand_call (exp, target, target == const0_rtx);
2232 }
2233
2234 if (errno_set)
2235 expand_errno_check (exp, result);
2236
2237 /* Output the entire sequence. */
2238 insns = get_insns ();
2239 end_sequence ();
2240 emit_insn (insns);
2241
2242 return result;
2243 }
2244
2245 /* Expand a call to the builtin trinary math functions (fma).
2246 Return NULL_RTX if a normal call should be emitted rather than expanding the
2247 function in-line. EXP is the expression that is a call to the builtin
2248 function; if convenient, the result should be placed in TARGET.
2249 SUBTARGET may be used as the target for computing one of EXP's
2250 operands. */
2251
2252 static rtx
2253 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2254 {
2255 optab builtin_optab;
2256 rtx op0, op1, op2, result;
2257 rtx_insn *insns;
2258 tree fndecl = get_callee_fndecl (exp);
2259 tree arg0, arg1, arg2;
2260 machine_mode mode;
2261
2262 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2263 return NULL_RTX;
2264
2265 arg0 = CALL_EXPR_ARG (exp, 0);
2266 arg1 = CALL_EXPR_ARG (exp, 1);
2267 arg2 = CALL_EXPR_ARG (exp, 2);
2268
2269 switch (DECL_FUNCTION_CODE (fndecl))
2270 {
2271 CASE_FLT_FN (BUILT_IN_FMA):
2272 builtin_optab = fma_optab; break;
2273 default:
2274 gcc_unreachable ();
2275 }
2276
2277 /* Make a suitable register to place result in. */
2278 mode = TYPE_MODE (TREE_TYPE (exp));
2279
2280 /* Before working hard, check whether the instruction is available. */
2281 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2282 return NULL_RTX;
2283
2284 result = gen_reg_rtx (mode);
2285
2286 /* Always stabilize the argument list. */
2287 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2288 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2289 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2290
2291 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2292 op1 = expand_normal (arg1);
2293 op2 = expand_normal (arg2);
2294
2295 start_sequence ();
2296
2297 /* Compute into RESULT.
2298 Set RESULT to wherever the result comes back. */
2299 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2300 result, 0);
2301
2302 /* If we were unable to expand via the builtin, stop the sequence
2303 (without outputting the insns) and call to the library function
2304 with the stabilized argument list. */
2305 if (result == 0)
2306 {
2307 end_sequence ();
2308 return expand_call (exp, target, target == const0_rtx);
2309 }
2310
2311 /* Output the entire sequence. */
2312 insns = get_insns ();
2313 end_sequence ();
2314 emit_insn (insns);
2315
2316 return result;
2317 }
2318
2319 /* Expand a call to the builtin sin and cos math functions.
2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
2321 function in-line. EXP is the expression that is a call to the builtin
2322 function; if convenient, the result should be placed in TARGET.
2323 SUBTARGET may be used as the target for computing one of EXP's
2324 operands. */
2325
2326 static rtx
2327 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2328 {
2329 optab builtin_optab;
2330 rtx op0;
2331 rtx_insn *insns;
2332 tree fndecl = get_callee_fndecl (exp);
2333 machine_mode mode;
2334 tree arg;
2335
2336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 return NULL_RTX;
2338
2339 arg = CALL_EXPR_ARG (exp, 0);
2340
2341 switch (DECL_FUNCTION_CODE (fndecl))
2342 {
2343 CASE_FLT_FN (BUILT_IN_SIN):
2344 CASE_FLT_FN (BUILT_IN_COS):
2345 builtin_optab = sincos_optab; break;
2346 default:
2347 gcc_unreachable ();
2348 }
2349
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (exp));
2352
2353 /* Check if sincos insn is available, otherwise fallback
2354 to sin or cos insn. */
2355 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 builtin_optab = sin_optab; break;
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = cos_optab; break;
2362 default:
2363 gcc_unreachable ();
2364 }
2365
2366 /* Before working hard, check whether the instruction is available. */
2367 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2368 {
2369 rtx result = gen_reg_rtx (mode);
2370
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
2374 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2375
2376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2377
2378 start_sequence ();
2379
2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
2382 if (builtin_optab == sincos_optab)
2383 {
2384 int ok;
2385
2386 switch (DECL_FUNCTION_CODE (fndecl))
2387 {
2388 CASE_FLT_FN (BUILT_IN_SIN):
2389 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2390 break;
2391 CASE_FLT_FN (BUILT_IN_COS):
2392 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2393 break;
2394 default:
2395 gcc_unreachable ();
2396 }
2397 gcc_assert (ok);
2398 }
2399 else
2400 result = expand_unop (mode, builtin_optab, op0, result, 0);
2401
2402 if (result != 0)
2403 {
2404 /* Output the entire sequence. */
2405 insns = get_insns ();
2406 end_sequence ();
2407 emit_insn (insns);
2408 return result;
2409 }
2410
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2414 end_sequence ();
2415 }
2416
2417 return expand_call (exp, target, target == const0_rtx);
2418 }
2419
2420 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
2423
2424 static enum insn_code
2425 interclass_mathfn_icode (tree arg, tree fndecl)
2426 {
2427 bool errno_set = false;
2428 optab builtin_optab = unknown_optab;
2429 machine_mode mode;
2430
2431 switch (DECL_FUNCTION_CODE (fndecl))
2432 {
2433 CASE_FLT_FN (BUILT_IN_ILOGB):
2434 errno_set = true; builtin_optab = ilogb_optab; break;
2435 CASE_FLT_FN (BUILT_IN_ISINF):
2436 builtin_optab = isinf_optab; break;
2437 case BUILT_IN_ISNORMAL:
2438 case BUILT_IN_ISFINITE:
2439 CASE_FLT_FN (BUILT_IN_FINITE):
2440 case BUILT_IN_FINITED32:
2441 case BUILT_IN_FINITED64:
2442 case BUILT_IN_FINITED128:
2443 case BUILT_IN_ISINFD32:
2444 case BUILT_IN_ISINFD64:
2445 case BUILT_IN_ISINFD128:
2446 /* These builtins have no optabs (yet). */
2447 break;
2448 default:
2449 gcc_unreachable ();
2450 }
2451
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math && errno_set)
2454 return CODE_FOR_nothing;
2455
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
2459 if (builtin_optab)
2460 return optab_handler (builtin_optab, mode);
2461 return CODE_FOR_nothing;
2462 }
2463
2464 /* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2466 isnan, etc).
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2469 function; if convenient, the result should be placed in TARGET. */
2470
2471 static rtx
2472 expand_builtin_interclass_mathfn (tree exp, rtx target)
2473 {
2474 enum insn_code icode = CODE_FOR_nothing;
2475 rtx op0;
2476 tree fndecl = get_callee_fndecl (exp);
2477 machine_mode mode;
2478 tree arg;
2479
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2482
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 icode = interclass_mathfn_icode (arg, fndecl);
2485 mode = TYPE_MODE (TREE_TYPE (arg));
2486
2487 if (icode != CODE_FOR_nothing)
2488 {
2489 struct expand_operand ops[1];
2490 rtx_insn *last = get_last_insn ();
2491 tree orig_arg = arg;
2492
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
2496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2497
2498 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2499
2500 if (mode != GET_MODE (op0))
2501 op0 = convert_to_mode (mode, op0, 0);
2502
2503 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2504 if (maybe_legitimize_operands (icode, 0, 1, ops)
2505 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2506 return ops[0].value;
2507
2508 delete_insns_since (last);
2509 CALL_EXPR_ARG (exp, 0) = orig_arg;
2510 }
2511
2512 return NULL_RTX;
2513 }
2514
2515 /* Expand a call to the builtin sincos math function.
2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
2517 function in-line. EXP is the expression that is a call to the builtin
2518 function. */
2519
2520 static rtx
2521 expand_builtin_sincos (tree exp)
2522 {
2523 rtx op0, op1, op2, target1, target2;
2524 machine_mode mode;
2525 tree arg, sinp, cosp;
2526 int result;
2527 location_t loc = EXPR_LOCATION (exp);
2528 tree alias_type, alias_off;
2529
2530 if (!validate_arglist (exp, REAL_TYPE,
2531 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2532 return NULL_RTX;
2533
2534 arg = CALL_EXPR_ARG (exp, 0);
2535 sinp = CALL_EXPR_ARG (exp, 1);
2536 cosp = CALL_EXPR_ARG (exp, 2);
2537
2538 /* Make a suitable register to place result in. */
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2540
2541 /* Check if sincos insn is available, otherwise emit the call. */
2542 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2543 return NULL_RTX;
2544
2545 target1 = gen_reg_rtx (mode);
2546 target2 = gen_reg_rtx (mode);
2547
2548 op0 = expand_normal (arg);
2549 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2550 alias_off = build_int_cst (alias_type, 0);
2551 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 sinp, alias_off));
2553 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 cosp, alias_off));
2555
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2559 gcc_assert (result);
2560
2561 /* Move target1 and target2 to the memory locations indicated
2562 by op1 and op2. */
2563 emit_move_insn (op1, target1);
2564 emit_move_insn (op2, target2);
2565
2566 return const0_rtx;
2567 }
2568
2569 /* Expand a call to the internal cexpi builtin to the sincos math function.
2570 EXP is the expression that is a call to the builtin function; if convenient,
2571 the result should be placed in TARGET. */
2572
2573 static rtx
2574 expand_builtin_cexpi (tree exp, rtx target)
2575 {
2576 tree fndecl = get_callee_fndecl (exp);
2577 tree arg, type;
2578 machine_mode mode;
2579 rtx op0, op1, op2;
2580 location_t loc = EXPR_LOCATION (exp);
2581
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 return NULL_RTX;
2584
2585 arg = CALL_EXPR_ARG (exp, 0);
2586 type = TREE_TYPE (arg);
2587 mode = TYPE_MODE (TREE_TYPE (arg));
2588
2589 /* Try expanding via a sincos optab, fall back to emitting a libcall
2590 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2591 is only generated from sincos, cexp or if we have either of them. */
2592 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2593 {
2594 op1 = gen_reg_rtx (mode);
2595 op2 = gen_reg_rtx (mode);
2596
2597 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2598
2599 /* Compute into op1 and op2. */
2600 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2601 }
2602 else if (targetm.libc_has_function (function_sincos))
2603 {
2604 tree call, fn = NULL_TREE;
2605 tree top1, top2;
2606 rtx op1a, op2a;
2607
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2609 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2614 else
2615 gcc_unreachable ();
2616
2617 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2619 op1a = copy_addr_to_reg (XEXP (op1, 0));
2620 op2a = copy_addr_to_reg (XEXP (op2, 0));
2621 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2622 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2623
2624 /* Make sure not to fold the sincos call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2626 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2627 call, 3, arg, top1, top2));
2628 }
2629 else
2630 {
2631 tree call, fn = NULL_TREE, narg;
2632 tree ctype = build_complex_type (type);
2633
2634 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2635 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2640 else
2641 gcc_unreachable ();
2642
2643 /* If we don't have a decl for cexp create one. This is the
2644 friendliest fallback if the user calls __builtin_cexpi
2645 without full target C99 function support. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649 const char *name = NULL;
2650
2651 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2652 name = "cexpf";
2653 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2654 name = "cexp";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2656 name = "cexpl";
2657
2658 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2659 fn = build_fn_decl (name, fntype);
2660 }
2661
2662 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2663 build_real (type, dconst0), arg);
2664
2665 /* Make sure not to fold the cexp call again. */
2666 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2667 return expand_expr (build_call_nary (ctype, call, 1, narg),
2668 target, VOIDmode, EXPAND_NORMAL);
2669 }
2670
2671 /* Now build the proper return type. */
2672 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2673 make_tree (TREE_TYPE (arg), op2),
2674 make_tree (TREE_TYPE (arg), op1)),
2675 target, VOIDmode, EXPAND_NORMAL);
2676 }
2677
2678 /* Conveniently construct a function call expression. FNDECL names the
2679 function to be called, N is the number of arguments, and the "..."
2680 parameters are the argument expressions. Unlike build_call_exr
2681 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2682
2683 static tree
2684 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2685 {
2686 va_list ap;
2687 tree fntype = TREE_TYPE (fndecl);
2688 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2689
2690 va_start (ap, n);
2691 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2692 va_end (ap);
2693 SET_EXPR_LOCATION (fn, loc);
2694 return fn;
2695 }
2696
2697 /* Expand a call to one of the builtin rounding functions gcc defines
2698 as an extension (lfloor and lceil). As these are gcc extensions we
2699 do not need to worry about setting errno to EDOM.
2700 If expanding via optab fails, lower expression to (int)(floor(x)).
2701 EXP is the expression that is a call to the builtin function;
2702 if convenient, the result should be placed in TARGET. */
2703
2704 static rtx
2705 expand_builtin_int_roundingfn (tree exp, rtx target)
2706 {
2707 convert_optab builtin_optab;
2708 rtx op0, tmp;
2709 rtx_insn *insns;
2710 tree fndecl = get_callee_fndecl (exp);
2711 enum built_in_function fallback_fn;
2712 tree fallback_fndecl;
2713 machine_mode mode;
2714 tree arg;
2715
2716 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2717 gcc_unreachable ();
2718
2719 arg = CALL_EXPR_ARG (exp, 0);
2720
2721 switch (DECL_FUNCTION_CODE (fndecl))
2722 {
2723 CASE_FLT_FN (BUILT_IN_ICEIL):
2724 CASE_FLT_FN (BUILT_IN_LCEIL):
2725 CASE_FLT_FN (BUILT_IN_LLCEIL):
2726 builtin_optab = lceil_optab;
2727 fallback_fn = BUILT_IN_CEIL;
2728 break;
2729
2730 CASE_FLT_FN (BUILT_IN_IFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2733 builtin_optab = lfloor_optab;
2734 fallback_fn = BUILT_IN_FLOOR;
2735 break;
2736
2737 default:
2738 gcc_unreachable ();
2739 }
2740
2741 /* Make a suitable register to place result in. */
2742 mode = TYPE_MODE (TREE_TYPE (exp));
2743
2744 target = gen_reg_rtx (mode);
2745
2746 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2747 need to expand the argument again. This way, we will not perform
2748 side-effects more the once. */
2749 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2750
2751 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2752
2753 start_sequence ();
2754
2755 /* Compute into TARGET. */
2756 if (expand_sfix_optab (target, op0, builtin_optab))
2757 {
2758 /* Output the entire sequence. */
2759 insns = get_insns ();
2760 end_sequence ();
2761 emit_insn (insns);
2762 return target;
2763 }
2764
2765 /* If we were unable to expand via the builtin, stop the sequence
2766 (without outputting the insns). */
2767 end_sequence ();
2768
2769 /* Fall back to floating point rounding optab. */
2770 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2771
2772 /* For non-C99 targets we may end up without a fallback fndecl here
2773 if the user called __builtin_lfloor directly. In this case emit
2774 a call to the floor/ceil variants nevertheless. This should result
2775 in the best user experience for not full C99 targets. */
2776 if (fallback_fndecl == NULL_TREE)
2777 {
2778 tree fntype;
2779 const char *name = NULL;
2780
2781 switch (DECL_FUNCTION_CODE (fndecl))
2782 {
2783 case BUILT_IN_ICEIL:
2784 case BUILT_IN_LCEIL:
2785 case BUILT_IN_LLCEIL:
2786 name = "ceil";
2787 break;
2788 case BUILT_IN_ICEILF:
2789 case BUILT_IN_LCEILF:
2790 case BUILT_IN_LLCEILF:
2791 name = "ceilf";
2792 break;
2793 case BUILT_IN_ICEILL:
2794 case BUILT_IN_LCEILL:
2795 case BUILT_IN_LLCEILL:
2796 name = "ceill";
2797 break;
2798 case BUILT_IN_IFLOOR:
2799 case BUILT_IN_LFLOOR:
2800 case BUILT_IN_LLFLOOR:
2801 name = "floor";
2802 break;
2803 case BUILT_IN_IFLOORF:
2804 case BUILT_IN_LFLOORF:
2805 case BUILT_IN_LLFLOORF:
2806 name = "floorf";
2807 break;
2808 case BUILT_IN_IFLOORL:
2809 case BUILT_IN_LFLOORL:
2810 case BUILT_IN_LLFLOORL:
2811 name = "floorl";
2812 break;
2813 default:
2814 gcc_unreachable ();
2815 }
2816
2817 fntype = build_function_type_list (TREE_TYPE (arg),
2818 TREE_TYPE (arg), NULL_TREE);
2819 fallback_fndecl = build_fn_decl (name, fntype);
2820 }
2821
2822 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2823
2824 tmp = expand_normal (exp);
2825 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2826
2827 /* Truncate the result of floating point optab to integer
2828 via expand_fix (). */
2829 target = gen_reg_rtx (mode);
2830 expand_fix (target, tmp, 0);
2831
2832 return target;
2833 }
2834
2835 /* Expand a call to one of the builtin math functions doing integer
2836 conversion (lrint).
2837 Return 0 if a normal call should be emitted rather than expanding the
2838 function in-line. EXP is the expression that is a call to the builtin
2839 function; if convenient, the result should be placed in TARGET. */
2840
2841 static rtx
2842 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2843 {
2844 convert_optab builtin_optab;
2845 rtx op0;
2846 rtx_insn *insns;
2847 tree fndecl = get_callee_fndecl (exp);
2848 tree arg;
2849 machine_mode mode;
2850 enum built_in_function fallback_fn = BUILT_IN_NONE;
2851
2852 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2853 gcc_unreachable ();
2854
2855 arg = CALL_EXPR_ARG (exp, 0);
2856
2857 switch (DECL_FUNCTION_CODE (fndecl))
2858 {
2859 CASE_FLT_FN (BUILT_IN_IRINT):
2860 fallback_fn = BUILT_IN_LRINT;
2861 /* FALLTHRU */
2862 CASE_FLT_FN (BUILT_IN_LRINT):
2863 CASE_FLT_FN (BUILT_IN_LLRINT):
2864 builtin_optab = lrint_optab;
2865 break;
2866
2867 CASE_FLT_FN (BUILT_IN_IROUND):
2868 fallback_fn = BUILT_IN_LROUND;
2869 /* FALLTHRU */
2870 CASE_FLT_FN (BUILT_IN_LROUND):
2871 CASE_FLT_FN (BUILT_IN_LLROUND):
2872 builtin_optab = lround_optab;
2873 break;
2874
2875 default:
2876 gcc_unreachable ();
2877 }
2878
2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2881 return NULL_RTX;
2882
2883 /* Make a suitable register to place result in. */
2884 mode = TYPE_MODE (TREE_TYPE (exp));
2885
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (!flag_errno_math)
2888 {
2889 rtx result = gen_reg_rtx (mode);
2890
2891 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2892 need to expand the argument again. This way, we will not perform
2893 side-effects more the once. */
2894 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2895
2896 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2897
2898 start_sequence ();
2899
2900 if (expand_sfix_optab (result, op0, builtin_optab))
2901 {
2902 /* Output the entire sequence. */
2903 insns = get_insns ();
2904 end_sequence ();
2905 emit_insn (insns);
2906 return result;
2907 }
2908
2909 /* If we were unable to expand via the builtin, stop the sequence
2910 (without outputting the insns) and call to the library function
2911 with the stabilized argument list. */
2912 end_sequence ();
2913 }
2914
2915 if (fallback_fn != BUILT_IN_NONE)
2916 {
2917 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2918 targets, (int) round (x) should never be transformed into
2919 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2920 a call to lround in the hope that the target provides at least some
2921 C99 functions. This should result in the best user experience for
2922 not full C99 targets. */
2923 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2924 fallback_fn, 0);
2925
2926 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2927 fallback_fndecl, 1, arg);
2928
2929 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2930 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2931 return convert_to_mode (mode, target, 0);
2932 }
2933
2934 return expand_call (exp, target, target == const0_rtx);
2935 }
2936
2937 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2938 a normal call should be emitted rather than expanding the function
2939 in-line. EXP is the expression that is a call to the builtin
2940 function; if convenient, the result should be placed in TARGET. */
2941
2942 static rtx
2943 expand_builtin_powi (tree exp, rtx target)
2944 {
2945 tree arg0, arg1;
2946 rtx op0, op1;
2947 machine_mode mode;
2948 machine_mode mode2;
2949
2950 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2951 return NULL_RTX;
2952
2953 arg0 = CALL_EXPR_ARG (exp, 0);
2954 arg1 = CALL_EXPR_ARG (exp, 1);
2955 mode = TYPE_MODE (TREE_TYPE (exp));
2956
2957 /* Emit a libcall to libgcc. */
2958
2959 /* Mode of the 2nd argument must match that of an int. */
2960 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2961
2962 if (target == NULL_RTX)
2963 target = gen_reg_rtx (mode);
2964
2965 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2966 if (GET_MODE (op0) != mode)
2967 op0 = convert_to_mode (mode, op0, 0);
2968 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2969 if (GET_MODE (op1) != mode2)
2970 op1 = convert_to_mode (mode2, op1, 0);
2971
2972 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2973 target, LCT_CONST, mode, 2,
2974 op0, mode, op1, mode2);
2975
2976 return target;
2977 }
2978
2979 /* Expand expression EXP which is a call to the strlen builtin. Return
2980 NULL_RTX if we failed the caller should emit a normal call, otherwise
2981 try to get the result in TARGET, if convenient. */
2982
2983 static rtx
2984 expand_builtin_strlen (tree exp, rtx target,
2985 machine_mode target_mode)
2986 {
2987 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2988 return NULL_RTX;
2989 else
2990 {
2991 struct expand_operand ops[4];
2992 rtx pat;
2993 tree len;
2994 tree src = CALL_EXPR_ARG (exp, 0);
2995 rtx src_reg;
2996 rtx_insn *before_strlen;
2997 machine_mode insn_mode = target_mode;
2998 enum insn_code icode = CODE_FOR_nothing;
2999 unsigned int align;
3000
3001 /* If the length can be computed at compile-time, return it. */
3002 len = c_strlen (src, 0);
3003 if (len)
3004 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3005
3006 /* If the length can be computed at compile-time and is constant
3007 integer, but there are side-effects in src, evaluate
3008 src for side-effects, then return len.
3009 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3010 can be optimized into: i++; x = 3; */
3011 len = c_strlen (src, 1);
3012 if (len && TREE_CODE (len) == INTEGER_CST)
3013 {
3014 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3015 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 }
3017
3018 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3019
3020 /* If SRC is not a pointer type, don't do this operation inline. */
3021 if (align == 0)
3022 return NULL_RTX;
3023
3024 /* Bail out if we can't compute strlen in the right mode. */
3025 while (insn_mode != VOIDmode)
3026 {
3027 icode = optab_handler (strlen_optab, insn_mode);
3028 if (icode != CODE_FOR_nothing)
3029 break;
3030
3031 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3032 }
3033 if (insn_mode == VOIDmode)
3034 return NULL_RTX;
3035
3036 /* Make a place to hold the source address. We will not expand
3037 the actual source until we are sure that the expansion will
3038 not fail -- there are trees that cannot be expanded twice. */
3039 src_reg = gen_reg_rtx (Pmode);
3040
3041 /* Mark the beginning of the strlen sequence so we can emit the
3042 source operand later. */
3043 before_strlen = get_last_insn ();
3044
3045 create_output_operand (&ops[0], target, insn_mode);
3046 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3047 create_integer_operand (&ops[2], 0);
3048 create_integer_operand (&ops[3], align);
3049 if (!maybe_expand_insn (icode, 4, ops))
3050 return NULL_RTX;
3051
3052 /* Now that we are assured of success, expand the source. */
3053 start_sequence ();
3054 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3055 if (pat != src_reg)
3056 {
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (pat) != Pmode)
3059 pat = convert_to_mode (Pmode, pat,
3060 POINTERS_EXTEND_UNSIGNED);
3061 #endif
3062 emit_move_insn (src_reg, pat);
3063 }
3064 pat = get_insns ();
3065 end_sequence ();
3066
3067 if (before_strlen)
3068 emit_insn_after (pat, before_strlen);
3069 else
3070 emit_insn_before (pat, get_insns ());
3071
3072 /* Return the value in the proper mode for this function. */
3073 if (GET_MODE (ops[0].value) == target_mode)
3074 target = ops[0].value;
3075 else if (target != 0)
3076 convert_move (target, ops[0].value, 0);
3077 else
3078 target = convert_to_mode (target_mode, ops[0].value, 0);
3079
3080 return target;
3081 }
3082 }
3083
3084 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3085 bytes from constant string DATA + OFFSET and return it as target
3086 constant. */
3087
3088 static rtx
3089 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3090 machine_mode mode)
3091 {
3092 const char *str = (const char *) data;
3093
3094 gcc_assert (offset >= 0
3095 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3096 <= strlen (str) + 1));
3097
3098 return c_readstr (str + offset, mode);
3099 }
3100
3101 /* LEN specify length of the block of memcpy/memset operation.
3102 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3103 In some cases we can make very likely guess on max size, then we
3104 set it into PROBABLE_MAX_SIZE. */
3105
3106 static void
3107 determine_block_size (tree len, rtx len_rtx,
3108 unsigned HOST_WIDE_INT *min_size,
3109 unsigned HOST_WIDE_INT *max_size,
3110 unsigned HOST_WIDE_INT *probable_max_size)
3111 {
3112 if (CONST_INT_P (len_rtx))
3113 {
3114 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3115 return;
3116 }
3117 else
3118 {
3119 wide_int min, max;
3120 enum value_range_type range_type = VR_UNDEFINED;
3121
3122 /* Determine bounds from the type. */
3123 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3124 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3125 else
3126 *min_size = 0;
3127 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3128 *probable_max_size = *max_size
3129 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3130 else
3131 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3132
3133 if (TREE_CODE (len) == SSA_NAME)
3134 range_type = get_range_info (len, &min, &max);
3135 if (range_type == VR_RANGE)
3136 {
3137 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3138 *min_size = min.to_uhwi ();
3139 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3140 *probable_max_size = *max_size = max.to_uhwi ();
3141 }
3142 else if (range_type == VR_ANTI_RANGE)
3143 {
3144 /* Anti range 0...N lets us to determine minimal size to N+1. */
3145 if (min == 0)
3146 {
3147 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3148 *min_size = max.to_uhwi () + 1;
3149 }
3150 /* Code like
3151
3152 int n;
3153 if (n < 100)
3154 memcpy (a, b, n)
3155
3156 Produce anti range allowing negative values of N. We still
3157 can use the information and make a guess that N is not negative.
3158 */
3159 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3160 *probable_max_size = min.to_uhwi () - 1;
3161 }
3162 }
3163 gcc_checking_assert (*max_size <=
3164 (unsigned HOST_WIDE_INT)
3165 GET_MODE_MASK (GET_MODE (len_rtx)));
3166 }
3167
3168 /* Helper function to do the actual work for expand_builtin_memcpy. */
3169
3170 static rtx
3171 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3172 {
3173 const char *src_str;
3174 unsigned int src_align = get_pointer_alignment (src);
3175 unsigned int dest_align = get_pointer_alignment (dest);
3176 rtx dest_mem, src_mem, dest_addr, len_rtx;
3177 HOST_WIDE_INT expected_size = -1;
3178 unsigned int expected_align = 0;
3179 unsigned HOST_WIDE_INT min_size;
3180 unsigned HOST_WIDE_INT max_size;
3181 unsigned HOST_WIDE_INT probable_max_size;
3182
3183 /* If DEST is not a pointer type, call the normal function. */
3184 if (dest_align == 0)
3185 return NULL_RTX;
3186
3187 /* If either SRC is not a pointer type, don't do this
3188 operation in-line. */
3189 if (src_align == 0)
3190 return NULL_RTX;
3191
3192 if (currently_expanding_gimple_stmt)
3193 stringop_block_profile (currently_expanding_gimple_stmt,
3194 &expected_align, &expected_size);
3195
3196 if (expected_align < dest_align)
3197 expected_align = dest_align;
3198 dest_mem = get_memory_rtx (dest, len);
3199 set_mem_align (dest_mem, dest_align);
3200 len_rtx = expand_normal (len);
3201 determine_block_size (len, len_rtx, &min_size, &max_size,
3202 &probable_max_size);
3203 src_str = c_getstr (src);
3204
3205 /* If SRC is a string constant and block move would be done
3206 by pieces, we can avoid loading the string from memory
3207 and only stored the computed constants. */
3208 if (src_str
3209 && CONST_INT_P (len_rtx)
3210 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3211 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3212 CONST_CAST (char *, src_str),
3213 dest_align, false))
3214 {
3215 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3216 builtin_memcpy_read_str,
3217 CONST_CAST (char *, src_str),
3218 dest_align, false, 0);
3219 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3220 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3221 return dest_mem;
3222 }
3223
3224 src_mem = get_memory_rtx (src, len);
3225 set_mem_align (src_mem, src_align);
3226
3227 /* Copy word part most expediently. */
3228 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3229 CALL_EXPR_TAILCALL (exp)
3230 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3231 expected_align, expected_size,
3232 min_size, max_size, probable_max_size);
3233
3234 if (dest_addr == 0)
3235 {
3236 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3237 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3238 }
3239
3240 return dest_addr;
3241 }
3242
3243 /* Expand a call EXP to the memcpy builtin.
3244 Return NULL_RTX if we failed, the caller should emit a normal call,
3245 otherwise try to get the result in TARGET, if convenient (and in
3246 mode MODE if that's convenient). */
3247
3248 static rtx
3249 expand_builtin_memcpy (tree exp, rtx target)
3250 {
3251 if (!validate_arglist (exp,
3252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
3254 else
3255 {
3256 tree dest = CALL_EXPR_ARG (exp, 0);
3257 tree src = CALL_EXPR_ARG (exp, 1);
3258 tree len = CALL_EXPR_ARG (exp, 2);
3259 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3260 }
3261 }
3262
3263 /* Expand an instrumented call EXP to the memcpy builtin.
3264 Return NULL_RTX if we failed, the caller should emit a normal call,
3265 otherwise try to get the result in TARGET, if convenient (and in
3266 mode MODE if that's convenient). */
3267
3268 static rtx
3269 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3270 {
3271 if (!validate_arglist (exp,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3274 INTEGER_TYPE, VOID_TYPE))
3275 return NULL_RTX;
3276 else
3277 {
3278 tree dest = CALL_EXPR_ARG (exp, 0);
3279 tree src = CALL_EXPR_ARG (exp, 2);
3280 tree len = CALL_EXPR_ARG (exp, 4);
3281 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3282
3283 /* Return src bounds with the result. */
3284 if (res)
3285 {
3286 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3287 expand_normal (CALL_EXPR_ARG (exp, 1)));
3288 res = chkp_join_splitted_slot (res, bnd);
3289 }
3290 return res;
3291 }
3292 }
3293
3294 /* Expand a call EXP to the mempcpy builtin.
3295 Return NULL_RTX if we failed; the caller should emit a normal call,
3296 otherwise try to get the result in TARGET, if convenient (and in
3297 mode MODE if that's convenient). If ENDP is 0 return the
3298 destination pointer, if ENDP is 1 return the end pointer ala
3299 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3300 stpcpy. */
3301
3302 static rtx
3303 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3304 {
3305 if (!validate_arglist (exp,
3306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3307 return NULL_RTX;
3308 else
3309 {
3310 tree dest = CALL_EXPR_ARG (exp, 0);
3311 tree src = CALL_EXPR_ARG (exp, 1);
3312 tree len = CALL_EXPR_ARG (exp, 2);
3313 return expand_builtin_mempcpy_args (dest, src, len,
3314 target, mode, /*endp=*/ 1,
3315 exp);
3316 }
3317 }
3318
3319 /* Expand an instrumented call EXP to the mempcpy builtin.
3320 Return NULL_RTX if we failed, the caller should emit a normal call,
3321 otherwise try to get the result in TARGET, if convenient (and in
3322 mode MODE if that's convenient). */
3323
3324 static rtx
3325 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3326 {
3327 if (!validate_arglist (exp,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3330 INTEGER_TYPE, VOID_TYPE))
3331 return NULL_RTX;
3332 else
3333 {
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 2);
3336 tree len = CALL_EXPR_ARG (exp, 4);
3337 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3338 mode, 1, exp);
3339
3340 /* Return src bounds with the result. */
3341 if (res)
3342 {
3343 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3344 expand_normal (CALL_EXPR_ARG (exp, 1)));
3345 res = chkp_join_splitted_slot (res, bnd);
3346 }
3347 return res;
3348 }
3349 }
3350
3351 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3352 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3353 so that this can also be called without constructing an actual CALL_EXPR.
3354 The other arguments and return value are the same as for
3355 expand_builtin_mempcpy. */
3356
3357 static rtx
3358 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3359 rtx target, machine_mode mode, int endp,
3360 tree orig_exp)
3361 {
3362 tree fndecl = get_callee_fndecl (orig_exp);
3363
3364 /* If return value is ignored, transform mempcpy into memcpy. */
3365 if (target == const0_rtx
3366 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3367 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3368 {
3369 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3370 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3371 dest, src, len);
3372 return expand_expr (result, target, mode, EXPAND_NORMAL);
3373 }
3374 else if (target == const0_rtx
3375 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3376 {
3377 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3378 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3379 dest, src, len);
3380 return expand_expr (result, target, mode, EXPAND_NORMAL);
3381 }
3382 else
3383 {
3384 const char *src_str;
3385 unsigned int src_align = get_pointer_alignment (src);
3386 unsigned int dest_align = get_pointer_alignment (dest);
3387 rtx dest_mem, src_mem, len_rtx;
3388
3389 /* If either SRC or DEST is not a pointer type, don't do this
3390 operation in-line. */
3391 if (dest_align == 0 || src_align == 0)
3392 return NULL_RTX;
3393
3394 /* If LEN is not constant, call the normal function. */
3395 if (! tree_fits_uhwi_p (len))
3396 return NULL_RTX;
3397
3398 len_rtx = expand_normal (len);
3399 src_str = c_getstr (src);
3400
3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3404 if (src_str
3405 && CONST_INT_P (len_rtx)
3406 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3408 CONST_CAST (char *, src_str),
3409 dest_align, false))
3410 {
3411 dest_mem = get_memory_rtx (dest, len);
3412 set_mem_align (dest_mem, dest_align);
3413 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3414 builtin_memcpy_read_str,
3415 CONST_CAST (char *, src_str),
3416 dest_align, false, endp);
3417 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3418 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3419 return dest_mem;
3420 }
3421
3422 if (CONST_INT_P (len_rtx)
3423 && can_move_by_pieces (INTVAL (len_rtx),
3424 MIN (dest_align, src_align)))
3425 {
3426 dest_mem = get_memory_rtx (dest, len);
3427 set_mem_align (dest_mem, dest_align);
3428 src_mem = get_memory_rtx (src, len);
3429 set_mem_align (src_mem, src_align);
3430 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3431 MIN (dest_align, src_align), endp);
3432 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3433 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3434 return dest_mem;
3435 }
3436
3437 return NULL_RTX;
3438 }
3439 }
3440
3441 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3442 we failed, the caller should emit a normal call, otherwise try to
3443 get the result in TARGET, if convenient. If ENDP is 0 return the
3444 destination pointer, if ENDP is 1 return the end pointer ala
3445 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3446 stpcpy. */
3447
3448 static rtx
3449 expand_movstr (tree dest, tree src, rtx target, int endp)
3450 {
3451 struct expand_operand ops[3];
3452 rtx dest_mem;
3453 rtx src_mem;
3454
3455 if (!targetm.have_movstr ())
3456 return NULL_RTX;
3457
3458 dest_mem = get_memory_rtx (dest, NULL);
3459 src_mem = get_memory_rtx (src, NULL);
3460 if (!endp)
3461 {
3462 target = force_reg (Pmode, XEXP (dest_mem, 0));
3463 dest_mem = replace_equiv_address (dest_mem, target);
3464 }
3465
3466 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3467 create_fixed_operand (&ops[1], dest_mem);
3468 create_fixed_operand (&ops[2], src_mem);
3469 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3470 return NULL_RTX;
3471
3472 if (endp && target != const0_rtx)
3473 {
3474 target = ops[0].value;
3475 /* movstr is supposed to set end to the address of the NUL
3476 terminator. If the caller requested a mempcpy-like return value,
3477 adjust it. */
3478 if (endp == 1)
3479 {
3480 rtx tem = plus_constant (GET_MODE (target),
3481 gen_lowpart (GET_MODE (target), target), 1);
3482 emit_move_insn (target, force_operand (tem, NULL_RTX));
3483 }
3484 }
3485 return target;
3486 }
3487
3488 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3489 NULL_RTX if we failed the caller should emit a normal call, otherwise
3490 try to get the result in TARGET, if convenient (and in mode MODE if that's
3491 convenient). */
3492
3493 static rtx
3494 expand_builtin_strcpy (tree exp, rtx target)
3495 {
3496 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3497 {
3498 tree dest = CALL_EXPR_ARG (exp, 0);
3499 tree src = CALL_EXPR_ARG (exp, 1);
3500 return expand_builtin_strcpy_args (dest, src, target);
3501 }
3502 return NULL_RTX;
3503 }
3504
3505 /* Helper function to do the actual work for expand_builtin_strcpy. The
3506 arguments to the builtin_strcpy call DEST and SRC are broken out
3507 so that this can also be called without constructing an actual CALL_EXPR.
3508 The other arguments and return value are the same as for
3509 expand_builtin_strcpy. */
3510
3511 static rtx
3512 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3513 {
3514 return expand_movstr (dest, src, target, /*endp=*/0);
3515 }
3516
3517 /* Expand a call EXP to the stpcpy builtin.
3518 Return NULL_RTX if we failed the caller should emit a normal call,
3519 otherwise try to get the result in TARGET, if convenient (and in
3520 mode MODE if that's convenient). */
3521
3522 static rtx
3523 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3524 {
3525 tree dst, src;
3526 location_t loc = EXPR_LOCATION (exp);
3527
3528 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3529 return NULL_RTX;
3530
3531 dst = CALL_EXPR_ARG (exp, 0);
3532 src = CALL_EXPR_ARG (exp, 1);
3533
3534 /* If return value is ignored, transform stpcpy into strcpy. */
3535 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3536 {
3537 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3538 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3539 return expand_expr (result, target, mode, EXPAND_NORMAL);
3540 }
3541 else
3542 {
3543 tree len, lenp1;
3544 rtx ret;
3545
3546 /* Ensure we get an actual string whose length can be evaluated at
3547 compile-time, not an expression containing a string. This is
3548 because the latter will potentially produce pessimized code
3549 when used to produce the return value. */
3550 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3551 return expand_movstr (dst, src, target, /*endp=*/2);
3552
3553 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3554 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3555 target, mode, /*endp=*/2,
3556 exp);
3557
3558 if (ret)
3559 return ret;
3560
3561 if (TREE_CODE (len) == INTEGER_CST)
3562 {
3563 rtx len_rtx = expand_normal (len);
3564
3565 if (CONST_INT_P (len_rtx))
3566 {
3567 ret = expand_builtin_strcpy_args (dst, src, target);
3568
3569 if (ret)
3570 {
3571 if (! target)
3572 {
3573 if (mode != VOIDmode)
3574 target = gen_reg_rtx (mode);
3575 else
3576 target = gen_reg_rtx (GET_MODE (ret));
3577 }
3578 if (GET_MODE (target) != GET_MODE (ret))
3579 ret = gen_lowpart (GET_MODE (target), ret);
3580
3581 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3582 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3583 gcc_assert (ret);
3584
3585 return target;
3586 }
3587 }
3588 }
3589
3590 return expand_movstr (dst, src, target, /*endp=*/2);
3591 }
3592 }
3593
3594 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3595 bytes from constant string DATA + OFFSET and return it as target
3596 constant. */
3597
3598 rtx
3599 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3600 machine_mode mode)
3601 {
3602 const char *str = (const char *) data;
3603
3604 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3605 return const0_rtx;
3606
3607 return c_readstr (str + offset, mode);
3608 }
3609
3610 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3611 NULL_RTX if we failed the caller should emit a normal call. */
3612
3613 static rtx
3614 expand_builtin_strncpy (tree exp, rtx target)
3615 {
3616 location_t loc = EXPR_LOCATION (exp);
3617
3618 if (validate_arglist (exp,
3619 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3620 {
3621 tree dest = CALL_EXPR_ARG (exp, 0);
3622 tree src = CALL_EXPR_ARG (exp, 1);
3623 tree len = CALL_EXPR_ARG (exp, 2);
3624 tree slen = c_strlen (src, 1);
3625
3626 /* We must be passed a constant len and src parameter. */
3627 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3628 return NULL_RTX;
3629
3630 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3631
3632 /* We're required to pad with trailing zeros if the requested
3633 len is greater than strlen(s2)+1. In that case try to
3634 use store_by_pieces, if it fails, punt. */
3635 if (tree_int_cst_lt (slen, len))
3636 {
3637 unsigned int dest_align = get_pointer_alignment (dest);
3638 const char *p = c_getstr (src);
3639 rtx dest_mem;
3640
3641 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3642 || !can_store_by_pieces (tree_to_uhwi (len),
3643 builtin_strncpy_read_str,
3644 CONST_CAST (char *, p),
3645 dest_align, false))
3646 return NULL_RTX;
3647
3648 dest_mem = get_memory_rtx (dest, len);
3649 store_by_pieces (dest_mem, tree_to_uhwi (len),
3650 builtin_strncpy_read_str,
3651 CONST_CAST (char *, p), dest_align, false, 0);
3652 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3653 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3654 return dest_mem;
3655 }
3656 }
3657 return NULL_RTX;
3658 }
3659
3660 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3661 bytes from constant string DATA + OFFSET and return it as target
3662 constant. */
3663
3664 rtx
3665 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3666 machine_mode mode)
3667 {
3668 const char *c = (const char *) data;
3669 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3670
3671 memset (p, *c, GET_MODE_SIZE (mode));
3672
3673 return c_readstr (p, mode);
3674 }
3675
3676 /* Callback routine for store_by_pieces. Return the RTL of a register
3677 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3678 char value given in the RTL register data. For example, if mode is
3679 4 bytes wide, return the RTL for 0x01010101*data. */
3680
3681 static rtx
3682 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3683 machine_mode mode)
3684 {
3685 rtx target, coeff;
3686 size_t size;
3687 char *p;
3688
3689 size = GET_MODE_SIZE (mode);
3690 if (size == 1)
3691 return (rtx) data;
3692
3693 p = XALLOCAVEC (char, size);
3694 memset (p, 1, size);
3695 coeff = c_readstr (p, mode);
3696
3697 target = convert_to_mode (mode, (rtx) data, 1);
3698 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3699 return force_reg (mode, target);
3700 }
3701
3702 /* Expand expression EXP, which is a call to the memset builtin. Return
3703 NULL_RTX if we failed the caller should emit a normal call, otherwise
3704 try to get the result in TARGET, if convenient (and in mode MODE if that's
3705 convenient). */
3706
3707 static rtx
3708 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3709 {
3710 if (!validate_arglist (exp,
3711 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3712 return NULL_RTX;
3713 else
3714 {
3715 tree dest = CALL_EXPR_ARG (exp, 0);
3716 tree val = CALL_EXPR_ARG (exp, 1);
3717 tree len = CALL_EXPR_ARG (exp, 2);
3718 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3719 }
3720 }
3721
3722 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3723 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3724 try to get the result in TARGET, if convenient (and in mode MODE if that's
3725 convenient). */
3726
3727 static rtx
3728 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3729 {
3730 if (!validate_arglist (exp,
3731 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3732 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3733 return NULL_RTX;
3734 else
3735 {
3736 tree dest = CALL_EXPR_ARG (exp, 0);
3737 tree val = CALL_EXPR_ARG (exp, 2);
3738 tree len = CALL_EXPR_ARG (exp, 3);
3739 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3740
3741 /* Return src bounds with the result. */
3742 if (res)
3743 {
3744 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3745 expand_normal (CALL_EXPR_ARG (exp, 1)));
3746 res = chkp_join_splitted_slot (res, bnd);
3747 }
3748 return res;
3749 }
3750 }
3751
3752 /* Helper function to do the actual work for expand_builtin_memset. The
3753 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3754 so that this can also be called without constructing an actual CALL_EXPR.
3755 The other arguments and return value are the same as for
3756 expand_builtin_memset. */
3757
3758 static rtx
3759 expand_builtin_memset_args (tree dest, tree val, tree len,
3760 rtx target, machine_mode mode, tree orig_exp)
3761 {
3762 tree fndecl, fn;
3763 enum built_in_function fcode;
3764 machine_mode val_mode;
3765 char c;
3766 unsigned int dest_align;
3767 rtx dest_mem, dest_addr, len_rtx;
3768 HOST_WIDE_INT expected_size = -1;
3769 unsigned int expected_align = 0;
3770 unsigned HOST_WIDE_INT min_size;
3771 unsigned HOST_WIDE_INT max_size;
3772 unsigned HOST_WIDE_INT probable_max_size;
3773
3774 dest_align = get_pointer_alignment (dest);
3775
3776 /* If DEST is not a pointer type, don't do this operation in-line. */
3777 if (dest_align == 0)
3778 return NULL_RTX;
3779
3780 if (currently_expanding_gimple_stmt)
3781 stringop_block_profile (currently_expanding_gimple_stmt,
3782 &expected_align, &expected_size);
3783
3784 if (expected_align < dest_align)
3785 expected_align = dest_align;
3786
3787 /* If the LEN parameter is zero, return DEST. */
3788 if (integer_zerop (len))
3789 {
3790 /* Evaluate and ignore VAL in case it has side-effects. */
3791 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3792 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3793 }
3794
3795 /* Stabilize the arguments in case we fail. */
3796 dest = builtin_save_expr (dest);
3797 val = builtin_save_expr (val);
3798 len = builtin_save_expr (len);
3799
3800 len_rtx = expand_normal (len);
3801 determine_block_size (len, len_rtx, &min_size, &max_size,
3802 &probable_max_size);
3803 dest_mem = get_memory_rtx (dest, len);
3804 val_mode = TYPE_MODE (unsigned_char_type_node);
3805
3806 if (TREE_CODE (val) != INTEGER_CST)
3807 {
3808 rtx val_rtx;
3809
3810 val_rtx = expand_normal (val);
3811 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3812
3813 /* Assume that we can memset by pieces if we can store
3814 * the coefficients by pieces (in the required modes).
3815 * We can't pass builtin_memset_gen_str as that emits RTL. */
3816 c = 1;
3817 if (tree_fits_uhwi_p (len)
3818 && can_store_by_pieces (tree_to_uhwi (len),
3819 builtin_memset_read_str, &c, dest_align,
3820 true))
3821 {
3822 val_rtx = force_reg (val_mode, val_rtx);
3823 store_by_pieces (dest_mem, tree_to_uhwi (len),
3824 builtin_memset_gen_str, val_rtx, dest_align,
3825 true, 0);
3826 }
3827 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3828 dest_align, expected_align,
3829 expected_size, min_size, max_size,
3830 probable_max_size))
3831 goto do_libcall;
3832
3833 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3834 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3835 return dest_mem;
3836 }
3837
3838 if (target_char_cast (val, &c))
3839 goto do_libcall;
3840
3841 if (c)
3842 {
3843 if (tree_fits_uhwi_p (len)
3844 && can_store_by_pieces (tree_to_uhwi (len),
3845 builtin_memset_read_str, &c, dest_align,
3846 true))
3847 store_by_pieces (dest_mem, tree_to_uhwi (len),
3848 builtin_memset_read_str, &c, dest_align, true, 0);
3849 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3850 gen_int_mode (c, val_mode),
3851 dest_align, expected_align,
3852 expected_size, min_size, max_size,
3853 probable_max_size))
3854 goto do_libcall;
3855
3856 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3857 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3858 return dest_mem;
3859 }
3860
3861 set_mem_align (dest_mem, dest_align);
3862 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3863 CALL_EXPR_TAILCALL (orig_exp)
3864 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3865 expected_align, expected_size,
3866 min_size, max_size,
3867 probable_max_size);
3868
3869 if (dest_addr == 0)
3870 {
3871 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3872 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3873 }
3874
3875 return dest_addr;
3876
3877 do_libcall:
3878 fndecl = get_callee_fndecl (orig_exp);
3879 fcode = DECL_FUNCTION_CODE (fndecl);
3880 if (fcode == BUILT_IN_MEMSET
3881 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3882 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3883 dest, val, len);
3884 else if (fcode == BUILT_IN_BZERO)
3885 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3886 dest, len);
3887 else
3888 gcc_unreachable ();
3889 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3890 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3891 return expand_call (fn, target, target == const0_rtx);
3892 }
3893
3894 /* Expand expression EXP, which is a call to the bzero builtin. Return
3895 NULL_RTX if we failed the caller should emit a normal call. */
3896
3897 static rtx
3898 expand_builtin_bzero (tree exp)
3899 {
3900 tree dest, size;
3901 location_t loc = EXPR_LOCATION (exp);
3902
3903 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3904 return NULL_RTX;
3905
3906 dest = CALL_EXPR_ARG (exp, 0);
3907 size = CALL_EXPR_ARG (exp, 1);
3908
3909 /* New argument list transforming bzero(ptr x, int y) to
3910 memset(ptr x, int 0, size_t y). This is done this way
3911 so that if it isn't expanded inline, we fallback to
3912 calling bzero instead of memset. */
3913
3914 return expand_builtin_memset_args (dest, integer_zero_node,
3915 fold_convert_loc (loc,
3916 size_type_node, size),
3917 const0_rtx, VOIDmode, exp);
3918 }
3919
3920 /* Expand expression EXP, which is a call to the memcmp built-in function.
3921 Return NULL_RTX if we failed and the caller should emit a normal call,
3922 otherwise try to get the result in TARGET, if convenient (and in mode
3923 MODE, if that's convenient). */
3924
3925 static rtx
3926 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3927 ATTRIBUTE_UNUSED machine_mode mode)
3928 {
3929 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3930
3931 if (!validate_arglist (exp,
3932 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3933 return NULL_RTX;
3934
3935 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3936 implementing memcmp because it will stop if it encounters two
3937 zero bytes. */
3938 #if defined HAVE_cmpmemsi
3939 {
3940 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3941 rtx result;
3942 rtx insn;
3943 tree arg1 = CALL_EXPR_ARG (exp, 0);
3944 tree arg2 = CALL_EXPR_ARG (exp, 1);
3945 tree len = CALL_EXPR_ARG (exp, 2);
3946
3947 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3948 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3949 machine_mode insn_mode;
3950
3951 if (HAVE_cmpmemsi)
3952 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3953 else
3954 return NULL_RTX;
3955
3956 /* If we don't have POINTER_TYPE, call the function. */
3957 if (arg1_align == 0 || arg2_align == 0)
3958 return NULL_RTX;
3959
3960 /* Make a place to write the result of the instruction. */
3961 result = target;
3962 if (! (result != 0
3963 && REG_P (result) && GET_MODE (result) == insn_mode
3964 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3965 result = gen_reg_rtx (insn_mode);
3966
3967 arg1_rtx = get_memory_rtx (arg1, len);
3968 arg2_rtx = get_memory_rtx (arg2, len);
3969 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3970
3971 /* Set MEM_SIZE as appropriate. */
3972 if (CONST_INT_P (arg3_rtx))
3973 {
3974 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3975 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3976 }
3977
3978 if (HAVE_cmpmemsi)
3979 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3980 GEN_INT (MIN (arg1_align, arg2_align)));
3981 else
3982 gcc_unreachable ();
3983
3984 if (insn)
3985 emit_insn (insn);
3986 else
3987 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3988 TYPE_MODE (integer_type_node), 3,
3989 XEXP (arg1_rtx, 0), Pmode,
3990 XEXP (arg2_rtx, 0), Pmode,
3991 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3992 TYPE_UNSIGNED (sizetype)),
3993 TYPE_MODE (sizetype));
3994
3995 /* Return the value in the proper mode for this function. */
3996 mode = TYPE_MODE (TREE_TYPE (exp));
3997 if (GET_MODE (result) == mode)
3998 return result;
3999 else if (target != 0)
4000 {
4001 convert_move (target, result, 0);
4002 return target;
4003 }
4004 else
4005 return convert_to_mode (mode, result, 0);
4006 }
4007 #endif /* HAVE_cmpmemsi. */
4008
4009 return NULL_RTX;
4010 }
4011
4012 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4013 if we failed the caller should emit a normal call, otherwise try to get
4014 the result in TARGET, if convenient. */
4015
4016 static rtx
4017 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4018 {
4019 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4020 return NULL_RTX;
4021
4022 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4023 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4024 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4025 {
4026 rtx arg1_rtx, arg2_rtx;
4027 rtx result, insn = NULL_RTX;
4028 tree fndecl, fn;
4029 tree arg1 = CALL_EXPR_ARG (exp, 0);
4030 tree arg2 = CALL_EXPR_ARG (exp, 1);
4031
4032 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4033 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4034
4035 /* If we don't have POINTER_TYPE, call the function. */
4036 if (arg1_align == 0 || arg2_align == 0)
4037 return NULL_RTX;
4038
4039 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4040 arg1 = builtin_save_expr (arg1);
4041 arg2 = builtin_save_expr (arg2);
4042
4043 arg1_rtx = get_memory_rtx (arg1, NULL);
4044 arg2_rtx = get_memory_rtx (arg2, NULL);
4045
4046 #ifdef HAVE_cmpstrsi
4047 /* Try to call cmpstrsi. */
4048 if (HAVE_cmpstrsi)
4049 {
4050 machine_mode insn_mode
4051 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4052
4053 /* Make a place to write the result of the instruction. */
4054 result = target;
4055 if (! (result != 0
4056 && REG_P (result) && GET_MODE (result) == insn_mode
4057 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4058 result = gen_reg_rtx (insn_mode);
4059
4060 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4061 GEN_INT (MIN (arg1_align, arg2_align)));
4062 }
4063 #endif
4064 #ifdef HAVE_cmpstrnsi
4065 /* Try to determine at least one length and call cmpstrnsi. */
4066 if (!insn && HAVE_cmpstrnsi)
4067 {
4068 tree len;
4069 rtx arg3_rtx;
4070
4071 machine_mode insn_mode
4072 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4073 tree len1 = c_strlen (arg1, 1);
4074 tree len2 = c_strlen (arg2, 1);
4075
4076 if (len1)
4077 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4078 if (len2)
4079 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4080
4081 /* If we don't have a constant length for the first, use the length
4082 of the second, if we know it. We don't require a constant for
4083 this case; some cost analysis could be done if both are available
4084 but neither is constant. For now, assume they're equally cheap,
4085 unless one has side effects. If both strings have constant lengths,
4086 use the smaller. */
4087
4088 if (!len1)
4089 len = len2;
4090 else if (!len2)
4091 len = len1;
4092 else if (TREE_SIDE_EFFECTS (len1))
4093 len = len2;
4094 else if (TREE_SIDE_EFFECTS (len2))
4095 len = len1;
4096 else if (TREE_CODE (len1) != INTEGER_CST)
4097 len = len2;
4098 else if (TREE_CODE (len2) != INTEGER_CST)
4099 len = len1;
4100 else if (tree_int_cst_lt (len1, len2))
4101 len = len1;
4102 else
4103 len = len2;
4104
4105 /* If both arguments have side effects, we cannot optimize. */
4106 if (!len || TREE_SIDE_EFFECTS (len))
4107 goto do_libcall;
4108
4109 arg3_rtx = expand_normal (len);
4110
4111 /* Make a place to write the result of the instruction. */
4112 result = target;
4113 if (! (result != 0
4114 && REG_P (result) && GET_MODE (result) == insn_mode
4115 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4116 result = gen_reg_rtx (insn_mode);
4117
4118 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4119 GEN_INT (MIN (arg1_align, arg2_align)));
4120 }
4121 #endif
4122
4123 if (insn)
4124 {
4125 machine_mode mode;
4126 emit_insn (insn);
4127
4128 /* Return the value in the proper mode for this function. */
4129 mode = TYPE_MODE (TREE_TYPE (exp));
4130 if (GET_MODE (result) == mode)
4131 return result;
4132 if (target == 0)
4133 return convert_to_mode (mode, result, 0);
4134 convert_move (target, result, 0);
4135 return target;
4136 }
4137
4138 /* Expand the library call ourselves using a stabilized argument
4139 list to avoid re-evaluating the function's arguments twice. */
4140 #ifdef HAVE_cmpstrnsi
4141 do_libcall:
4142 #endif
4143 fndecl = get_callee_fndecl (exp);
4144 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4145 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4146 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4147 return expand_call (fn, target, target == const0_rtx);
4148 }
4149 #endif
4150 return NULL_RTX;
4151 }
4152
4153 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4154 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4155 the result in TARGET, if convenient. */
4156
4157 static rtx
4158 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4159 ATTRIBUTE_UNUSED machine_mode mode)
4160 {
4161 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4162
4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4165 return NULL_RTX;
4166
4167 /* If c_strlen can determine an expression for one of the string
4168 lengths, and it doesn't have side effects, then emit cmpstrnsi
4169 using length MIN(strlen(string)+1, arg3). */
4170 #ifdef HAVE_cmpstrnsi
4171 if (HAVE_cmpstrnsi)
4172 {
4173 tree len, len1, len2;
4174 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4175 rtx result, insn;
4176 tree fndecl, fn;
4177 tree arg1 = CALL_EXPR_ARG (exp, 0);
4178 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 tree arg3 = CALL_EXPR_ARG (exp, 2);
4180
4181 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4182 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4183 machine_mode insn_mode
4184 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4185
4186 len1 = c_strlen (arg1, 1);
4187 len2 = c_strlen (arg2, 1);
4188
4189 if (len1)
4190 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4191 if (len2)
4192 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4193
4194 /* If we don't have a constant length for the first, use the length
4195 of the second, if we know it. We don't require a constant for
4196 this case; some cost analysis could be done if both are available
4197 but neither is constant. For now, assume they're equally cheap,
4198 unless one has side effects. If both strings have constant lengths,
4199 use the smaller. */
4200
4201 if (!len1)
4202 len = len2;
4203 else if (!len2)
4204 len = len1;
4205 else if (TREE_SIDE_EFFECTS (len1))
4206 len = len2;
4207 else if (TREE_SIDE_EFFECTS (len2))
4208 len = len1;
4209 else if (TREE_CODE (len1) != INTEGER_CST)
4210 len = len2;
4211 else if (TREE_CODE (len2) != INTEGER_CST)
4212 len = len1;
4213 else if (tree_int_cst_lt (len1, len2))
4214 len = len1;
4215 else
4216 len = len2;
4217
4218 /* If both arguments have side effects, we cannot optimize. */
4219 if (!len || TREE_SIDE_EFFECTS (len))
4220 return NULL_RTX;
4221
4222 /* The actual new length parameter is MIN(len,arg3). */
4223 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4224 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4225
4226 /* If we don't have POINTER_TYPE, call the function. */
4227 if (arg1_align == 0 || arg2_align == 0)
4228 return NULL_RTX;
4229
4230 /* Make a place to write the result of the instruction. */
4231 result = target;
4232 if (! (result != 0
4233 && REG_P (result) && GET_MODE (result) == insn_mode
4234 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4235 result = gen_reg_rtx (insn_mode);
4236
4237 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4238 arg1 = builtin_save_expr (arg1);
4239 arg2 = builtin_save_expr (arg2);
4240 len = builtin_save_expr (len);
4241
4242 arg1_rtx = get_memory_rtx (arg1, len);
4243 arg2_rtx = get_memory_rtx (arg2, len);
4244 arg3_rtx = expand_normal (len);
4245 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4246 GEN_INT (MIN (arg1_align, arg2_align)));
4247 if (insn)
4248 {
4249 emit_insn (insn);
4250
4251 /* Return the value in the proper mode for this function. */
4252 mode = TYPE_MODE (TREE_TYPE (exp));
4253 if (GET_MODE (result) == mode)
4254 return result;
4255 if (target == 0)
4256 return convert_to_mode (mode, result, 0);
4257 convert_move (target, result, 0);
4258 return target;
4259 }
4260
4261 /* Expand the library call ourselves using a stabilized argument
4262 list to avoid re-evaluating the function's arguments twice. */
4263 fndecl = get_callee_fndecl (exp);
4264 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4265 arg1, arg2, len);
4266 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4267 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4268 return expand_call (fn, target, target == const0_rtx);
4269 }
4270 #endif
4271 return NULL_RTX;
4272 }
4273
4274 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4275 if that's convenient. */
4276
4277 rtx
4278 expand_builtin_saveregs (void)
4279 {
4280 rtx val;
4281 rtx_insn *seq;
4282
4283 /* Don't do __builtin_saveregs more than once in a function.
4284 Save the result of the first call and reuse it. */
4285 if (saveregs_value != 0)
4286 return saveregs_value;
4287
4288 /* When this function is called, it means that registers must be
4289 saved on entry to this function. So we migrate the call to the
4290 first insn of this function. */
4291
4292 start_sequence ();
4293
4294 /* Do whatever the machine needs done in this case. */
4295 val = targetm.calls.expand_builtin_saveregs ();
4296
4297 seq = get_insns ();
4298 end_sequence ();
4299
4300 saveregs_value = val;
4301
4302 /* Put the insns after the NOTE that starts the function. If this
4303 is inside a start_sequence, make the outer-level insn chain current, so
4304 the code is placed at the start of the function. */
4305 push_topmost_sequence ();
4306 emit_insn_after (seq, entry_of_function ());
4307 pop_topmost_sequence ();
4308
4309 return val;
4310 }
4311
4312 /* Expand a call to __builtin_next_arg. */
4313
4314 static rtx
4315 expand_builtin_next_arg (void)
4316 {
4317 /* Checking arguments is already done in fold_builtin_next_arg
4318 that must be called before this function. */
4319 return expand_binop (ptr_mode, add_optab,
4320 crtl->args.internal_arg_pointer,
4321 crtl->args.arg_offset_rtx,
4322 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4323 }
4324
4325 /* Make it easier for the backends by protecting the valist argument
4326 from multiple evaluations. */
4327
4328 static tree
4329 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4330 {
4331 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4332
4333 /* The current way of determining the type of valist is completely
4334 bogus. We should have the information on the va builtin instead. */
4335 if (!vatype)
4336 vatype = targetm.fn_abi_va_list (cfun->decl);
4337
4338 if (TREE_CODE (vatype) == ARRAY_TYPE)
4339 {
4340 if (TREE_SIDE_EFFECTS (valist))
4341 valist = save_expr (valist);
4342
4343 /* For this case, the backends will be expecting a pointer to
4344 vatype, but it's possible we've actually been given an array
4345 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4346 So fix it. */
4347 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4348 {
4349 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4350 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4351 }
4352 }
4353 else
4354 {
4355 tree pt = build_pointer_type (vatype);
4356
4357 if (! needs_lvalue)
4358 {
4359 if (! TREE_SIDE_EFFECTS (valist))
4360 return valist;
4361
4362 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4363 TREE_SIDE_EFFECTS (valist) = 1;
4364 }
4365
4366 if (TREE_SIDE_EFFECTS (valist))
4367 valist = save_expr (valist);
4368 valist = fold_build2_loc (loc, MEM_REF,
4369 vatype, valist, build_int_cst (pt, 0));
4370 }
4371
4372 return valist;
4373 }
4374
4375 /* The "standard" definition of va_list is void*. */
4376
4377 tree
4378 std_build_builtin_va_list (void)
4379 {
4380 return ptr_type_node;
4381 }
4382
4383 /* The "standard" abi va_list is va_list_type_node. */
4384
4385 tree
4386 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4387 {
4388 return va_list_type_node;
4389 }
4390
4391 /* The "standard" type of va_list is va_list_type_node. */
4392
4393 tree
4394 std_canonical_va_list_type (tree type)
4395 {
4396 tree wtype, htype;
4397
4398 if (INDIRECT_REF_P (type))
4399 type = TREE_TYPE (type);
4400 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4401 type = TREE_TYPE (type);
4402 wtype = va_list_type_node;
4403 htype = type;
4404 /* Treat structure va_list types. */
4405 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4406 htype = TREE_TYPE (htype);
4407 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4408 {
4409 /* If va_list is an array type, the argument may have decayed
4410 to a pointer type, e.g. by being passed to another function.
4411 In that case, unwrap both types so that we can compare the
4412 underlying records. */
4413 if (TREE_CODE (htype) == ARRAY_TYPE
4414 || POINTER_TYPE_P (htype))
4415 {
4416 wtype = TREE_TYPE (wtype);
4417 htype = TREE_TYPE (htype);
4418 }
4419 }
4420 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4421 return va_list_type_node;
4422
4423 return NULL_TREE;
4424 }
4425
4426 /* The "standard" implementation of va_start: just assign `nextarg' to
4427 the variable. */
4428
4429 void
4430 std_expand_builtin_va_start (tree valist, rtx nextarg)
4431 {
4432 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4433 convert_move (va_r, nextarg, 0);
4434
4435 /* We do not have any valid bounds for the pointer, so
4436 just store zero bounds for it. */
4437 if (chkp_function_instrumented_p (current_function_decl))
4438 chkp_expand_bounds_reset_for_mem (valist,
4439 make_tree (TREE_TYPE (valist),
4440 nextarg));
4441 }
4442
4443 /* Expand EXP, a call to __builtin_va_start. */
4444
4445 static rtx
4446 expand_builtin_va_start (tree exp)
4447 {
4448 rtx nextarg;
4449 tree valist;
4450 location_t loc = EXPR_LOCATION (exp);
4451
4452 if (call_expr_nargs (exp) < 2)
4453 {
4454 error_at (loc, "too few arguments to function %<va_start%>");
4455 return const0_rtx;
4456 }
4457
4458 if (fold_builtin_next_arg (exp, true))
4459 return const0_rtx;
4460
4461 nextarg = expand_builtin_next_arg ();
4462 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4463
4464 if (targetm.expand_builtin_va_start)
4465 targetm.expand_builtin_va_start (valist, nextarg);
4466 else
4467 std_expand_builtin_va_start (valist, nextarg);
4468
4469 return const0_rtx;
4470 }
4471
4472 /* Expand EXP, a call to __builtin_va_end. */
4473
4474 static rtx
4475 expand_builtin_va_end (tree exp)
4476 {
4477 tree valist = CALL_EXPR_ARG (exp, 0);
4478
4479 /* Evaluate for side effects, if needed. I hate macros that don't
4480 do that. */
4481 if (TREE_SIDE_EFFECTS (valist))
4482 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4483
4484 return const0_rtx;
4485 }
4486
4487 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4488 builtin rather than just as an assignment in stdarg.h because of the
4489 nastiness of array-type va_list types. */
4490
4491 static rtx
4492 expand_builtin_va_copy (tree exp)
4493 {
4494 tree dst, src, t;
4495 location_t loc = EXPR_LOCATION (exp);
4496
4497 dst = CALL_EXPR_ARG (exp, 0);
4498 src = CALL_EXPR_ARG (exp, 1);
4499
4500 dst = stabilize_va_list_loc (loc, dst, 1);
4501 src = stabilize_va_list_loc (loc, src, 0);
4502
4503 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4504
4505 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4506 {
4507 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4508 TREE_SIDE_EFFECTS (t) = 1;
4509 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4510 }
4511 else
4512 {
4513 rtx dstb, srcb, size;
4514
4515 /* Evaluate to pointers. */
4516 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4517 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4518 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4519 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4520
4521 dstb = convert_memory_address (Pmode, dstb);
4522 srcb = convert_memory_address (Pmode, srcb);
4523
4524 /* "Dereference" to BLKmode memories. */
4525 dstb = gen_rtx_MEM (BLKmode, dstb);
4526 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4527 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4528 srcb = gen_rtx_MEM (BLKmode, srcb);
4529 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4530 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4531
4532 /* Copy. */
4533 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4534 }
4535
4536 return const0_rtx;
4537 }
4538
4539 /* Expand a call to one of the builtin functions __builtin_frame_address or
4540 __builtin_return_address. */
4541
4542 static rtx
4543 expand_builtin_frame_address (tree fndecl, tree exp)
4544 {
4545 /* The argument must be a nonnegative integer constant.
4546 It counts the number of frames to scan up the stack.
4547 The value is either the frame pointer value or the return
4548 address saved in that frame. */
4549 if (call_expr_nargs (exp) == 0)
4550 /* Warning about missing arg was already issued. */
4551 return const0_rtx;
4552 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4553 {
4554 error ("invalid argument to %qD", fndecl);
4555 return const0_rtx;
4556 }
4557 else
4558 {
4559 /* Number of frames to scan up the stack. */
4560 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4561
4562 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4563
4564 /* Some ports cannot access arbitrary stack frames. */
4565 if (tem == NULL)
4566 {
4567 warning (0, "unsupported argument to %qD", fndecl);
4568 return const0_rtx;
4569 }
4570
4571 if (count)
4572 {
4573 /* Warn since no effort is made to ensure that any frame
4574 beyond the current one exists or can be safely reached. */
4575 warning (OPT_Wframe_address, "calling %qD with "
4576 "a nonzero argument is unsafe", fndecl);
4577 }
4578
4579 /* For __builtin_frame_address, return what we've got. */
4580 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4581 return tem;
4582
4583 if (!REG_P (tem)
4584 && ! CONSTANT_P (tem))
4585 tem = copy_addr_to_reg (tem);
4586 return tem;
4587 }
4588 }
4589
4590 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4591 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4592 is the same as for allocate_dynamic_stack_space. */
4593
4594 static rtx
4595 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4596 {
4597 rtx op0;
4598 rtx result;
4599 bool valid_arglist;
4600 unsigned int align;
4601 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4602 == BUILT_IN_ALLOCA_WITH_ALIGN);
4603
4604 valid_arglist
4605 = (alloca_with_align
4606 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4607 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4608
4609 if (!valid_arglist)
4610 return NULL_RTX;
4611
4612 /* Compute the argument. */
4613 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4614
4615 /* Compute the alignment. */
4616 align = (alloca_with_align
4617 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4618 : BIGGEST_ALIGNMENT);
4619
4620 /* Allocate the desired space. */
4621 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4622 result = convert_memory_address (ptr_mode, result);
4623
4624 return result;
4625 }
4626
4627 /* Expand a call to bswap builtin in EXP.
4628 Return NULL_RTX if a normal call should be emitted rather than expanding the
4629 function in-line. If convenient, the result should be placed in TARGET.
4630 SUBTARGET may be used as the target for computing one of EXP's operands. */
4631
4632 static rtx
4633 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4634 rtx subtarget)
4635 {
4636 tree arg;
4637 rtx op0;
4638
4639 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4640 return NULL_RTX;
4641
4642 arg = CALL_EXPR_ARG (exp, 0);
4643 op0 = expand_expr (arg,
4644 subtarget && GET_MODE (subtarget) == target_mode
4645 ? subtarget : NULL_RTX,
4646 target_mode, EXPAND_NORMAL);
4647 if (GET_MODE (op0) != target_mode)
4648 op0 = convert_to_mode (target_mode, op0, 1);
4649
4650 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4651
4652 gcc_assert (target);
4653
4654 return convert_to_mode (target_mode, target, 1);
4655 }
4656
4657 /* Expand a call to a unary builtin in EXP.
4658 Return NULL_RTX if a normal call should be emitted rather than expanding the
4659 function in-line. If convenient, the result should be placed in TARGET.
4660 SUBTARGET may be used as the target for computing one of EXP's operands. */
4661
4662 static rtx
4663 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4664 rtx subtarget, optab op_optab)
4665 {
4666 rtx op0;
4667
4668 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4670
4671 /* Compute the argument. */
4672 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4673 (subtarget
4674 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4675 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4676 VOIDmode, EXPAND_NORMAL);
4677 /* Compute op, into TARGET if possible.
4678 Set TARGET to wherever the result comes back. */
4679 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4680 op_optab, op0, target, op_optab != clrsb_optab);
4681 gcc_assert (target);
4682
4683 return convert_to_mode (target_mode, target, 0);
4684 }
4685
4686 /* Expand a call to __builtin_expect. We just return our argument
4687 as the builtin_expect semantic should've been already executed by
4688 tree branch prediction pass. */
4689
4690 static rtx
4691 expand_builtin_expect (tree exp, rtx target)
4692 {
4693 tree arg;
4694
4695 if (call_expr_nargs (exp) < 2)
4696 return const0_rtx;
4697 arg = CALL_EXPR_ARG (exp, 0);
4698
4699 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4700 /* When guessing was done, the hints should be already stripped away. */
4701 gcc_assert (!flag_guess_branch_prob
4702 || optimize == 0 || seen_error ());
4703 return target;
4704 }
4705
4706 /* Expand a call to __builtin_assume_aligned. We just return our first
4707 argument as the builtin_assume_aligned semantic should've been already
4708 executed by CCP. */
4709
4710 static rtx
4711 expand_builtin_assume_aligned (tree exp, rtx target)
4712 {
4713 if (call_expr_nargs (exp) < 2)
4714 return const0_rtx;
4715 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4716 EXPAND_NORMAL);
4717 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4718 && (call_expr_nargs (exp) < 3
4719 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4720 return target;
4721 }
4722
4723 void
4724 expand_builtin_trap (void)
4725 {
4726 if (targetm.have_trap ())
4727 {
4728 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4729 /* For trap insns when not accumulating outgoing args force
4730 REG_ARGS_SIZE note to prevent crossjumping of calls with
4731 different args sizes. */
4732 if (!ACCUMULATE_OUTGOING_ARGS)
4733 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4734 }
4735 else
4736 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4737 emit_barrier ();
4738 }
4739
4740 /* Expand a call to __builtin_unreachable. We do nothing except emit
4741 a barrier saying that control flow will not pass here.
4742
4743 It is the responsibility of the program being compiled to ensure
4744 that control flow does never reach __builtin_unreachable. */
4745 static void
4746 expand_builtin_unreachable (void)
4747 {
4748 emit_barrier ();
4749 }
4750
4751 /* Expand EXP, a call to fabs, fabsf or fabsl.
4752 Return NULL_RTX if a normal call should be emitted rather than expanding
4753 the function inline. If convenient, the result should be placed
4754 in TARGET. SUBTARGET may be used as the target for computing
4755 the operand. */
4756
4757 static rtx
4758 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4759 {
4760 machine_mode mode;
4761 tree arg;
4762 rtx op0;
4763
4764 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4765 return NULL_RTX;
4766
4767 arg = CALL_EXPR_ARG (exp, 0);
4768 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4769 mode = TYPE_MODE (TREE_TYPE (arg));
4770 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4771 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4772 }
4773
4774 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4775 Return NULL is a normal call should be emitted rather than expanding the
4776 function inline. If convenient, the result should be placed in TARGET.
4777 SUBTARGET may be used as the target for computing the operand. */
4778
4779 static rtx
4780 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4781 {
4782 rtx op0, op1;
4783 tree arg;
4784
4785 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4786 return NULL_RTX;
4787
4788 arg = CALL_EXPR_ARG (exp, 0);
4789 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4790
4791 arg = CALL_EXPR_ARG (exp, 1);
4792 op1 = expand_normal (arg);
4793
4794 return expand_copysign (op0, op1, target);
4795 }
4796
4797 /* Expand a call to __builtin___clear_cache. */
4798
4799 static rtx
4800 expand_builtin___clear_cache (tree exp)
4801 {
4802 if (!targetm.code_for_clear_cache)
4803 {
4804 #ifdef CLEAR_INSN_CACHE
4805 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4806 does something. Just do the default expansion to a call to
4807 __clear_cache(). */
4808 return NULL_RTX;
4809 #else
4810 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4811 does nothing. There is no need to call it. Do nothing. */
4812 return const0_rtx;
4813 #endif /* CLEAR_INSN_CACHE */
4814 }
4815
4816 /* We have a "clear_cache" insn, and it will handle everything. */
4817 tree begin, end;
4818 rtx begin_rtx, end_rtx;
4819
4820 /* We must not expand to a library call. If we did, any
4821 fallback library function in libgcc that might contain a call to
4822 __builtin___clear_cache() would recurse infinitely. */
4823 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4824 {
4825 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4826 return const0_rtx;
4827 }
4828
4829 if (targetm.have_clear_cache ())
4830 {
4831 struct expand_operand ops[2];
4832
4833 begin = CALL_EXPR_ARG (exp, 0);
4834 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4835
4836 end = CALL_EXPR_ARG (exp, 1);
4837 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4838
4839 create_address_operand (&ops[0], begin_rtx);
4840 create_address_operand (&ops[1], end_rtx);
4841 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4842 return const0_rtx;
4843 }
4844 return const0_rtx;
4845 }
4846
4847 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4848
4849 static rtx
4850 round_trampoline_addr (rtx tramp)
4851 {
4852 rtx temp, addend, mask;
4853
4854 /* If we don't need too much alignment, we'll have been guaranteed
4855 proper alignment by get_trampoline_type. */
4856 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4857 return tramp;
4858
4859 /* Round address up to desired boundary. */
4860 temp = gen_reg_rtx (Pmode);
4861 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4862 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4863
4864 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4865 temp, 0, OPTAB_LIB_WIDEN);
4866 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4867 temp, 0, OPTAB_LIB_WIDEN);
4868
4869 return tramp;
4870 }
4871
4872 static rtx
4873 expand_builtin_init_trampoline (tree exp, bool onstack)
4874 {
4875 tree t_tramp, t_func, t_chain;
4876 rtx m_tramp, r_tramp, r_chain, tmp;
4877
4878 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4879 POINTER_TYPE, VOID_TYPE))
4880 return NULL_RTX;
4881
4882 t_tramp = CALL_EXPR_ARG (exp, 0);
4883 t_func = CALL_EXPR_ARG (exp, 1);
4884 t_chain = CALL_EXPR_ARG (exp, 2);
4885
4886 r_tramp = expand_normal (t_tramp);
4887 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4888 MEM_NOTRAP_P (m_tramp) = 1;
4889
4890 /* If ONSTACK, the TRAMP argument should be the address of a field
4891 within the local function's FRAME decl. Either way, let's see if
4892 we can fill in the MEM_ATTRs for this memory. */
4893 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4894 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4895
4896 /* Creator of a heap trampoline is responsible for making sure the
4897 address is aligned to at least STACK_BOUNDARY. Normally malloc
4898 will ensure this anyhow. */
4899 tmp = round_trampoline_addr (r_tramp);
4900 if (tmp != r_tramp)
4901 {
4902 m_tramp = change_address (m_tramp, BLKmode, tmp);
4903 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4904 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4905 }
4906
4907 /* The FUNC argument should be the address of the nested function.
4908 Extract the actual function decl to pass to the hook. */
4909 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4910 t_func = TREE_OPERAND (t_func, 0);
4911 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4912
4913 r_chain = expand_normal (t_chain);
4914
4915 /* Generate insns to initialize the trampoline. */
4916 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4917
4918 if (onstack)
4919 {
4920 trampolines_created = 1;
4921
4922 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4923 "trampoline generated for nested function %qD", t_func);
4924 }
4925
4926 return const0_rtx;
4927 }
4928
4929 static rtx
4930 expand_builtin_adjust_trampoline (tree exp)
4931 {
4932 rtx tramp;
4933
4934 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4935 return NULL_RTX;
4936
4937 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4938 tramp = round_trampoline_addr (tramp);
4939 if (targetm.calls.trampoline_adjust_address)
4940 tramp = targetm.calls.trampoline_adjust_address (tramp);
4941
4942 return tramp;
4943 }
4944
4945 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4946 function. The function first checks whether the back end provides
4947 an insn to implement signbit for the respective mode. If not, it
4948 checks whether the floating point format of the value is such that
4949 the sign bit can be extracted. If that is not the case, error out.
4950 EXP is the expression that is a call to the builtin function; if
4951 convenient, the result should be placed in TARGET. */
4952 static rtx
4953 expand_builtin_signbit (tree exp, rtx target)
4954 {
4955 const struct real_format *fmt;
4956 machine_mode fmode, imode, rmode;
4957 tree arg;
4958 int word, bitpos;
4959 enum insn_code icode;
4960 rtx temp;
4961 location_t loc = EXPR_LOCATION (exp);
4962
4963 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4964 return NULL_RTX;
4965
4966 arg = CALL_EXPR_ARG (exp, 0);
4967 fmode = TYPE_MODE (TREE_TYPE (arg));
4968 rmode = TYPE_MODE (TREE_TYPE (exp));
4969 fmt = REAL_MODE_FORMAT (fmode);
4970
4971 arg = builtin_save_expr (arg);
4972
4973 /* Expand the argument yielding a RTX expression. */
4974 temp = expand_normal (arg);
4975
4976 /* Check if the back end provides an insn that handles signbit for the
4977 argument's mode. */
4978 icode = optab_handler (signbit_optab, fmode);
4979 if (icode != CODE_FOR_nothing)
4980 {
4981 rtx_insn *last = get_last_insn ();
4982 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4983 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4984 return target;
4985 delete_insns_since (last);
4986 }
4987
4988 /* For floating point formats without a sign bit, implement signbit
4989 as "ARG < 0.0". */
4990 bitpos = fmt->signbit_ro;
4991 if (bitpos < 0)
4992 {
4993 /* But we can't do this if the format supports signed zero. */
4994 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4995
4996 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4997 build_real (TREE_TYPE (arg), dconst0));
4998 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4999 }
5000
5001 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5002 {
5003 imode = int_mode_for_mode (fmode);
5004 gcc_assert (imode != BLKmode);
5005 temp = gen_lowpart (imode, temp);
5006 }
5007 else
5008 {
5009 imode = word_mode;
5010 /* Handle targets with different FP word orders. */
5011 if (FLOAT_WORDS_BIG_ENDIAN)
5012 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5013 else
5014 word = bitpos / BITS_PER_WORD;
5015 temp = operand_subword_force (temp, word, fmode);
5016 bitpos = bitpos % BITS_PER_WORD;
5017 }
5018
5019 /* Force the intermediate word_mode (or narrower) result into a
5020 register. This avoids attempting to create paradoxical SUBREGs
5021 of floating point modes below. */
5022 temp = force_reg (imode, temp);
5023
5024 /* If the bitpos is within the "result mode" lowpart, the operation
5025 can be implement with a single bitwise AND. Otherwise, we need
5026 a right shift and an AND. */
5027
5028 if (bitpos < GET_MODE_BITSIZE (rmode))
5029 {
5030 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5031
5032 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp,
5035 immed_wide_int_const (mask, rmode),
5036 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5037 }
5038 else
5039 {
5040 /* Perform a logical right shift to place the signbit in the least
5041 significant bit, then truncate the result to the desired mode
5042 and mask just this bit. */
5043 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5044 temp = gen_lowpart (rmode, temp);
5045 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5046 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5047 }
5048
5049 return temp;
5050 }
5051
5052 /* Expand fork or exec calls. TARGET is the desired target of the
5053 call. EXP is the call. FN is the
5054 identificator of the actual function. IGNORE is nonzero if the
5055 value is to be ignored. */
5056
5057 static rtx
5058 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5059 {
5060 tree id, decl;
5061 tree call;
5062
5063 /* If we are not profiling, just call the function. */
5064 if (!profile_arc_flag)
5065 return NULL_RTX;
5066
5067 /* Otherwise call the wrapper. This should be equivalent for the rest of
5068 compiler, so the code does not diverge, and the wrapper may run the
5069 code necessary for keeping the profiling sane. */
5070
5071 switch (DECL_FUNCTION_CODE (fn))
5072 {
5073 case BUILT_IN_FORK:
5074 id = get_identifier ("__gcov_fork");
5075 break;
5076
5077 case BUILT_IN_EXECL:
5078 id = get_identifier ("__gcov_execl");
5079 break;
5080
5081 case BUILT_IN_EXECV:
5082 id = get_identifier ("__gcov_execv");
5083 break;
5084
5085 case BUILT_IN_EXECLP:
5086 id = get_identifier ("__gcov_execlp");
5087 break;
5088
5089 case BUILT_IN_EXECLE:
5090 id = get_identifier ("__gcov_execle");
5091 break;
5092
5093 case BUILT_IN_EXECVP:
5094 id = get_identifier ("__gcov_execvp");
5095 break;
5096
5097 case BUILT_IN_EXECVE:
5098 id = get_identifier ("__gcov_execve");
5099 break;
5100
5101 default:
5102 gcc_unreachable ();
5103 }
5104
5105 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5106 FUNCTION_DECL, id, TREE_TYPE (fn));
5107 DECL_EXTERNAL (decl) = 1;
5108 TREE_PUBLIC (decl) = 1;
5109 DECL_ARTIFICIAL (decl) = 1;
5110 TREE_NOTHROW (decl) = 1;
5111 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5112 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5113 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5114 return expand_call (call, target, ignore);
5115 }
5116
5117
5118 \f
5119 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5120 the pointer in these functions is void*, the tree optimizers may remove
5121 casts. The mode computed in expand_builtin isn't reliable either, due
5122 to __sync_bool_compare_and_swap.
5123
5124 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5125 group of builtins. This gives us log2 of the mode size. */
5126
5127 static inline machine_mode
5128 get_builtin_sync_mode (int fcode_diff)
5129 {
5130 /* The size is not negotiable, so ask not to get BLKmode in return
5131 if the target indicates that a smaller size would be better. */
5132 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5133 }
5134
5135 /* Expand the memory expression LOC and return the appropriate memory operand
5136 for the builtin_sync operations. */
5137
5138 static rtx
5139 get_builtin_sync_mem (tree loc, machine_mode mode)
5140 {
5141 rtx addr, mem;
5142
5143 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5144 addr = convert_memory_address (Pmode, addr);
5145
5146 /* Note that we explicitly do not want any alias information for this
5147 memory, so that we kill all other live memories. Otherwise we don't
5148 satisfy the full barrier semantics of the intrinsic. */
5149 mem = validize_mem (gen_rtx_MEM (mode, addr));
5150
5151 /* The alignment needs to be at least according to that of the mode. */
5152 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5153 get_pointer_alignment (loc)));
5154 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5155 MEM_VOLATILE_P (mem) = 1;
5156
5157 return mem;
5158 }
5159
5160 /* Make sure an argument is in the right mode.
5161 EXP is the tree argument.
5162 MODE is the mode it should be in. */
5163
5164 static rtx
5165 expand_expr_force_mode (tree exp, machine_mode mode)
5166 {
5167 rtx val;
5168 machine_mode old_mode;
5169
5170 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5171 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5172 of CONST_INTs, where we know the old_mode only from the call argument. */
5173
5174 old_mode = GET_MODE (val);
5175 if (old_mode == VOIDmode)
5176 old_mode = TYPE_MODE (TREE_TYPE (exp));
5177 val = convert_modes (mode, old_mode, val, 1);
5178 return val;
5179 }
5180
5181
5182 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5183 EXP is the CALL_EXPR. CODE is the rtx code
5184 that corresponds to the arithmetic or logical operation from the name;
5185 an exception here is that NOT actually means NAND. TARGET is an optional
5186 place for us to store the results; AFTER is true if this is the
5187 fetch_and_xxx form. */
5188
5189 static rtx
5190 expand_builtin_sync_operation (machine_mode mode, tree exp,
5191 enum rtx_code code, bool after,
5192 rtx target)
5193 {
5194 rtx val, mem;
5195 location_t loc = EXPR_LOCATION (exp);
5196
5197 if (code == NOT && warn_sync_nand)
5198 {
5199 tree fndecl = get_callee_fndecl (exp);
5200 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5201
5202 static bool warned_f_a_n, warned_n_a_f;
5203
5204 switch (fcode)
5205 {
5206 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5207 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5208 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5209 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5210 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5211 if (warned_f_a_n)
5212 break;
5213
5214 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5215 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5216 warned_f_a_n = true;
5217 break;
5218
5219 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5220 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5221 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5222 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5223 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5224 if (warned_n_a_f)
5225 break;
5226
5227 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5228 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5229 warned_n_a_f = true;
5230 break;
5231
5232 default:
5233 gcc_unreachable ();
5234 }
5235 }
5236
5237 /* Expand the operands. */
5238 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5239 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5240
5241 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5242 after);
5243 }
5244
5245 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5246 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5247 true if this is the boolean form. TARGET is a place for us to store the
5248 results; this is NOT optional if IS_BOOL is true. */
5249
5250 static rtx
5251 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5252 bool is_bool, rtx target)
5253 {
5254 rtx old_val, new_val, mem;
5255 rtx *pbool, *poval;
5256
5257 /* Expand the operands. */
5258 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5259 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5260 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5261
5262 pbool = poval = NULL;
5263 if (target != const0_rtx)
5264 {
5265 if (is_bool)
5266 pbool = &target;
5267 else
5268 poval = &target;
5269 }
5270 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5271 false, MEMMODEL_SYNC_SEQ_CST,
5272 MEMMODEL_SYNC_SEQ_CST))
5273 return NULL_RTX;
5274
5275 return target;
5276 }
5277
5278 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5279 general form is actually an atomic exchange, and some targets only
5280 support a reduced form with the second argument being a constant 1.
5281 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5282 the results. */
5283
5284 static rtx
5285 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5286 rtx target)
5287 {
5288 rtx val, mem;
5289
5290 /* Expand the operands. */
5291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5292 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5293
5294 return expand_sync_lock_test_and_set (target, mem, val);
5295 }
5296
5297 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5298
5299 static void
5300 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5301 {
5302 rtx mem;
5303
5304 /* Expand the operands. */
5305 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5306
5307 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5308 }
5309
5310 /* Given an integer representing an ``enum memmodel'', verify its
5311 correctness and return the memory model enum. */
5312
5313 static enum memmodel
5314 get_memmodel (tree exp)
5315 {
5316 rtx op;
5317 unsigned HOST_WIDE_INT val;
5318
5319 /* If the parameter is not a constant, it's a run time value so we'll just
5320 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5321 if (TREE_CODE (exp) != INTEGER_CST)
5322 return MEMMODEL_SEQ_CST;
5323
5324 op = expand_normal (exp);
5325
5326 val = INTVAL (op);
5327 if (targetm.memmodel_check)
5328 val = targetm.memmodel_check (val);
5329 else if (val & ~MEMMODEL_MASK)
5330 {
5331 warning (OPT_Winvalid_memory_model,
5332 "Unknown architecture specifier in memory model to builtin.");
5333 return MEMMODEL_SEQ_CST;
5334 }
5335
5336 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5337 if (memmodel_base (val) >= MEMMODEL_LAST)
5338 {
5339 warning (OPT_Winvalid_memory_model,
5340 "invalid memory model argument to builtin");
5341 return MEMMODEL_SEQ_CST;
5342 }
5343
5344 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5345 be conservative and promote consume to acquire. */
5346 if (val == MEMMODEL_CONSUME)
5347 val = MEMMODEL_ACQUIRE;
5348
5349 return (enum memmodel) val;
5350 }
5351
5352 /* Expand the __atomic_exchange intrinsic:
5353 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5354 EXP is the CALL_EXPR.
5355 TARGET is an optional place for us to store the results. */
5356
5357 static rtx
5358 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5359 {
5360 rtx val, mem;
5361 enum memmodel model;
5362
5363 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5364
5365 if (!flag_inline_atomics)
5366 return NULL_RTX;
5367
5368 /* Expand the operands. */
5369 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5370 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5371
5372 return expand_atomic_exchange (target, mem, val, model);
5373 }
5374
5375 /* Expand the __atomic_compare_exchange intrinsic:
5376 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5377 TYPE desired, BOOL weak,
5378 enum memmodel success,
5379 enum memmodel failure)
5380 EXP is the CALL_EXPR.
5381 TARGET is an optional place for us to store the results. */
5382
5383 static rtx
5384 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5385 rtx target)
5386 {
5387 rtx expect, desired, mem, oldval;
5388 rtx_code_label *label;
5389 enum memmodel success, failure;
5390 tree weak;
5391 bool is_weak;
5392
5393 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5394 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5395
5396 if (failure > success)
5397 {
5398 warning (OPT_Winvalid_memory_model,
5399 "failure memory model cannot be stronger than success memory "
5400 "model for %<__atomic_compare_exchange%>");
5401 success = MEMMODEL_SEQ_CST;
5402 }
5403
5404 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5405 {
5406 warning (OPT_Winvalid_memory_model,
5407 "invalid failure memory model for "
5408 "%<__atomic_compare_exchange%>");
5409 failure = MEMMODEL_SEQ_CST;
5410 success = MEMMODEL_SEQ_CST;
5411 }
5412
5413
5414 if (!flag_inline_atomics)
5415 return NULL_RTX;
5416
5417 /* Expand the operands. */
5418 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5419
5420 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5421 expect = convert_memory_address (Pmode, expect);
5422 expect = gen_rtx_MEM (mode, expect);
5423 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5424
5425 weak = CALL_EXPR_ARG (exp, 3);
5426 is_weak = false;
5427 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5428 is_weak = true;
5429
5430 if (target == const0_rtx)
5431 target = NULL;
5432
5433 /* Lest the rtl backend create a race condition with an imporoper store
5434 to memory, always create a new pseudo for OLDVAL. */
5435 oldval = NULL;
5436
5437 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5438 is_weak, success, failure))
5439 return NULL_RTX;
5440
5441 /* Conditionally store back to EXPECT, lest we create a race condition
5442 with an improper store to memory. */
5443 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5444 the normal case where EXPECT is totally private, i.e. a register. At
5445 which point the store can be unconditional. */
5446 label = gen_label_rtx ();
5447 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5448 GET_MODE (target), 1, label);
5449 emit_move_insn (expect, oldval);
5450 emit_label (label);
5451
5452 return target;
5453 }
5454
5455 /* Expand the __atomic_load intrinsic:
5456 TYPE __atomic_load (TYPE *object, enum memmodel)
5457 EXP is the CALL_EXPR.
5458 TARGET is an optional place for us to store the results. */
5459
5460 static rtx
5461 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5462 {
5463 rtx mem;
5464 enum memmodel model;
5465
5466 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5467 if (is_mm_release (model) || is_mm_acq_rel (model))
5468 {
5469 warning (OPT_Winvalid_memory_model,
5470 "invalid memory model for %<__atomic_load%>");
5471 model = MEMMODEL_SEQ_CST;
5472 }
5473
5474 if (!flag_inline_atomics)
5475 return NULL_RTX;
5476
5477 /* Expand the operand. */
5478 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5479
5480 return expand_atomic_load (target, mem, model);
5481 }
5482
5483
5484 /* Expand the __atomic_store intrinsic:
5485 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5486 EXP is the CALL_EXPR.
5487 TARGET is an optional place for us to store the results. */
5488
5489 static rtx
5490 expand_builtin_atomic_store (machine_mode mode, tree exp)
5491 {
5492 rtx mem, val;
5493 enum memmodel model;
5494
5495 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5496 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5497 || is_mm_release (model)))
5498 {
5499 warning (OPT_Winvalid_memory_model,
5500 "invalid memory model for %<__atomic_store%>");
5501 model = MEMMODEL_SEQ_CST;
5502 }
5503
5504 if (!flag_inline_atomics)
5505 return NULL_RTX;
5506
5507 /* Expand the operands. */
5508 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5509 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5510
5511 return expand_atomic_store (mem, val, model, false);
5512 }
5513
5514 /* Expand the __atomic_fetch_XXX intrinsic:
5515 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5516 EXP is the CALL_EXPR.
5517 TARGET is an optional place for us to store the results.
5518 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5519 FETCH_AFTER is true if returning the result of the operation.
5520 FETCH_AFTER is false if returning the value before the operation.
5521 IGNORE is true if the result is not used.
5522 EXT_CALL is the correct builtin for an external call if this cannot be
5523 resolved to an instruction sequence. */
5524
5525 static rtx
5526 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5527 enum rtx_code code, bool fetch_after,
5528 bool ignore, enum built_in_function ext_call)
5529 {
5530 rtx val, mem, ret;
5531 enum memmodel model;
5532 tree fndecl;
5533 tree addr;
5534
5535 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5536
5537 /* Expand the operands. */
5538 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5539 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5540
5541 /* Only try generating instructions if inlining is turned on. */
5542 if (flag_inline_atomics)
5543 {
5544 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5545 if (ret)
5546 return ret;
5547 }
5548
5549 /* Return if a different routine isn't needed for the library call. */
5550 if (ext_call == BUILT_IN_NONE)
5551 return NULL_RTX;
5552
5553 /* Change the call to the specified function. */
5554 fndecl = get_callee_fndecl (exp);
5555 addr = CALL_EXPR_FN (exp);
5556 STRIP_NOPS (addr);
5557
5558 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5559 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5560
5561 /* Expand the call here so we can emit trailing code. */
5562 ret = expand_call (exp, target, ignore);
5563
5564 /* Replace the original function just in case it matters. */
5565 TREE_OPERAND (addr, 0) = fndecl;
5566
5567 /* Then issue the arithmetic correction to return the right result. */
5568 if (!ignore)
5569 {
5570 if (code == NOT)
5571 {
5572 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5573 OPTAB_LIB_WIDEN);
5574 ret = expand_simple_unop (mode, NOT, ret, target, true);
5575 }
5576 else
5577 ret = expand_simple_binop (mode, code, ret, val, target, true,
5578 OPTAB_LIB_WIDEN);
5579 }
5580 return ret;
5581 }
5582
5583 /* Expand an atomic clear operation.
5584 void _atomic_clear (BOOL *obj, enum memmodel)
5585 EXP is the call expression. */
5586
5587 static rtx
5588 expand_builtin_atomic_clear (tree exp)
5589 {
5590 machine_mode mode;
5591 rtx mem, ret;
5592 enum memmodel model;
5593
5594 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5595 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5596 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5597
5598 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5599 {
5600 warning (OPT_Winvalid_memory_model,
5601 "invalid memory model for %<__atomic_store%>");
5602 model = MEMMODEL_SEQ_CST;
5603 }
5604
5605 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5606 Failing that, a store is issued by __atomic_store. The only way this can
5607 fail is if the bool type is larger than a word size. Unlikely, but
5608 handle it anyway for completeness. Assume a single threaded model since
5609 there is no atomic support in this case, and no barriers are required. */
5610 ret = expand_atomic_store (mem, const0_rtx, model, true);
5611 if (!ret)
5612 emit_move_insn (mem, const0_rtx);
5613 return const0_rtx;
5614 }
5615
5616 /* Expand an atomic test_and_set operation.
5617 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5618 EXP is the call expression. */
5619
5620 static rtx
5621 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5622 {
5623 rtx mem;
5624 enum memmodel model;
5625 machine_mode mode;
5626
5627 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5628 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5629 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5630
5631 return expand_atomic_test_and_set (target, mem, model);
5632 }
5633
5634
5635 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5636 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5637
5638 static tree
5639 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5640 {
5641 int size;
5642 machine_mode mode;
5643 unsigned int mode_align, type_align;
5644
5645 if (TREE_CODE (arg0) != INTEGER_CST)
5646 return NULL_TREE;
5647
5648 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5649 mode = mode_for_size (size, MODE_INT, 0);
5650 mode_align = GET_MODE_ALIGNMENT (mode);
5651
5652 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5653 type_align = mode_align;
5654 else
5655 {
5656 tree ttype = TREE_TYPE (arg1);
5657
5658 /* This function is usually invoked and folded immediately by the front
5659 end before anything else has a chance to look at it. The pointer
5660 parameter at this point is usually cast to a void *, so check for that
5661 and look past the cast. */
5662 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5663 && VOID_TYPE_P (TREE_TYPE (ttype)))
5664 arg1 = TREE_OPERAND (arg1, 0);
5665
5666 ttype = TREE_TYPE (arg1);
5667 gcc_assert (POINTER_TYPE_P (ttype));
5668
5669 /* Get the underlying type of the object. */
5670 ttype = TREE_TYPE (ttype);
5671 type_align = TYPE_ALIGN (ttype);
5672 }
5673
5674 /* If the object has smaller alignment, the lock free routines cannot
5675 be used. */
5676 if (type_align < mode_align)
5677 return boolean_false_node;
5678
5679 /* Check if a compare_and_swap pattern exists for the mode which represents
5680 the required size. The pattern is not allowed to fail, so the existence
5681 of the pattern indicates support is present. */
5682 if (can_compare_and_swap_p (mode, true))
5683 return boolean_true_node;
5684 else
5685 return boolean_false_node;
5686 }
5687
5688 /* Return true if the parameters to call EXP represent an object which will
5689 always generate lock free instructions. The first argument represents the
5690 size of the object, and the second parameter is a pointer to the object
5691 itself. If NULL is passed for the object, then the result is based on
5692 typical alignment for an object of the specified size. Otherwise return
5693 false. */
5694
5695 static rtx
5696 expand_builtin_atomic_always_lock_free (tree exp)
5697 {
5698 tree size;
5699 tree arg0 = CALL_EXPR_ARG (exp, 0);
5700 tree arg1 = CALL_EXPR_ARG (exp, 1);
5701
5702 if (TREE_CODE (arg0) != INTEGER_CST)
5703 {
5704 error ("non-constant argument 1 to __atomic_always_lock_free");
5705 return const0_rtx;
5706 }
5707
5708 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5709 if (size == boolean_true_node)
5710 return const1_rtx;
5711 return const0_rtx;
5712 }
5713
5714 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5715 is lock free on this architecture. */
5716
5717 static tree
5718 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5719 {
5720 if (!flag_inline_atomics)
5721 return NULL_TREE;
5722
5723 /* If it isn't always lock free, don't generate a result. */
5724 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5725 return boolean_true_node;
5726
5727 return NULL_TREE;
5728 }
5729
5730 /* Return true if the parameters to call EXP represent an object which will
5731 always generate lock free instructions. The first argument represents the
5732 size of the object, and the second parameter is a pointer to the object
5733 itself. If NULL is passed for the object, then the result is based on
5734 typical alignment for an object of the specified size. Otherwise return
5735 NULL*/
5736
5737 static rtx
5738 expand_builtin_atomic_is_lock_free (tree exp)
5739 {
5740 tree size;
5741 tree arg0 = CALL_EXPR_ARG (exp, 0);
5742 tree arg1 = CALL_EXPR_ARG (exp, 1);
5743
5744 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5745 {
5746 error ("non-integer argument 1 to __atomic_is_lock_free");
5747 return NULL_RTX;
5748 }
5749
5750 if (!flag_inline_atomics)
5751 return NULL_RTX;
5752
5753 /* If the value is known at compile time, return the RTX for it. */
5754 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5755 if (size == boolean_true_node)
5756 return const1_rtx;
5757
5758 return NULL_RTX;
5759 }
5760
5761 /* Expand the __atomic_thread_fence intrinsic:
5762 void __atomic_thread_fence (enum memmodel)
5763 EXP is the CALL_EXPR. */
5764
5765 static void
5766 expand_builtin_atomic_thread_fence (tree exp)
5767 {
5768 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5769 expand_mem_thread_fence (model);
5770 }
5771
5772 /* Expand the __atomic_signal_fence intrinsic:
5773 void __atomic_signal_fence (enum memmodel)
5774 EXP is the CALL_EXPR. */
5775
5776 static void
5777 expand_builtin_atomic_signal_fence (tree exp)
5778 {
5779 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5780 expand_mem_signal_fence (model);
5781 }
5782
5783 /* Expand the __sync_synchronize intrinsic. */
5784
5785 static void
5786 expand_builtin_sync_synchronize (void)
5787 {
5788 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5789 }
5790
5791 static rtx
5792 expand_builtin_thread_pointer (tree exp, rtx target)
5793 {
5794 enum insn_code icode;
5795 if (!validate_arglist (exp, VOID_TYPE))
5796 return const0_rtx;
5797 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5798 if (icode != CODE_FOR_nothing)
5799 {
5800 struct expand_operand op;
5801 /* If the target is not sutitable then create a new target. */
5802 if (target == NULL_RTX
5803 || !REG_P (target)
5804 || GET_MODE (target) != Pmode)
5805 target = gen_reg_rtx (Pmode);
5806 create_output_operand (&op, target, Pmode);
5807 expand_insn (icode, 1, &op);
5808 return target;
5809 }
5810 error ("__builtin_thread_pointer is not supported on this target");
5811 return const0_rtx;
5812 }
5813
5814 static void
5815 expand_builtin_set_thread_pointer (tree exp)
5816 {
5817 enum insn_code icode;
5818 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5819 return;
5820 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5821 if (icode != CODE_FOR_nothing)
5822 {
5823 struct expand_operand op;
5824 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5825 Pmode, EXPAND_NORMAL);
5826 create_input_operand (&op, val, Pmode);
5827 expand_insn (icode, 1, &op);
5828 return;
5829 }
5830 error ("__builtin_set_thread_pointer is not supported on this target");
5831 }
5832
5833 \f
5834 /* Emit code to restore the current value of stack. */
5835
5836 static void
5837 expand_stack_restore (tree var)
5838 {
5839 rtx_insn *prev;
5840 rtx sa = expand_normal (var);
5841
5842 sa = convert_memory_address (Pmode, sa);
5843
5844 prev = get_last_insn ();
5845 emit_stack_restore (SAVE_BLOCK, sa);
5846
5847 record_new_stack_level ();
5848
5849 fixup_args_size_notes (prev, get_last_insn (), 0);
5850 }
5851
5852 /* Emit code to save the current value of stack. */
5853
5854 static rtx
5855 expand_stack_save (void)
5856 {
5857 rtx ret = NULL_RTX;
5858
5859 emit_stack_save (SAVE_BLOCK, &ret);
5860 return ret;
5861 }
5862
5863
5864 /* Expand OpenACC acc_on_device.
5865
5866 This has to happen late (that is, not in early folding; expand_builtin_*,
5867 rather than fold_builtin_*), as we have to act differently for host and
5868 acceleration device (ACCEL_COMPILER conditional). */
5869
5870 static rtx
5871 expand_builtin_acc_on_device (tree exp, rtx target)
5872 {
5873 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5874 return NULL_RTX;
5875
5876 tree arg = CALL_EXPR_ARG (exp, 0);
5877
5878 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5879 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5880 rtx v = expand_normal (arg), v1, v2;
5881 #ifdef ACCEL_COMPILER
5882 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5883 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5884 #else
5885 v1 = GEN_INT (GOMP_DEVICE_NONE);
5886 v2 = GEN_INT (GOMP_DEVICE_HOST);
5887 #endif
5888 machine_mode target_mode = TYPE_MODE (integer_type_node);
5889 if (!target || !register_operand (target, target_mode))
5890 target = gen_reg_rtx (target_mode);
5891 emit_move_insn (target, const1_rtx);
5892 rtx_code_label *done_label = gen_label_rtx ();
5893 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5894 NULL, done_label, PROB_EVEN);
5895 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5896 NULL, done_label, PROB_EVEN);
5897 emit_move_insn (target, const0_rtx);
5898 emit_label (done_label);
5899
5900 return target;
5901 }
5902
5903
5904 /* Expand an expression EXP that calls a built-in function,
5905 with result going to TARGET if that's convenient
5906 (and in mode MODE if that's convenient).
5907 SUBTARGET may be used as the target for computing one of EXP's operands.
5908 IGNORE is nonzero if the value is to be ignored. */
5909
5910 rtx
5911 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5912 int ignore)
5913 {
5914 tree fndecl = get_callee_fndecl (exp);
5915 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5916 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5917 int flags;
5918
5919 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5920 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5921
5922 /* When ASan is enabled, we don't want to expand some memory/string
5923 builtins and rely on libsanitizer's hooks. This allows us to avoid
5924 redundant checks and be sure, that possible overflow will be detected
5925 by ASan. */
5926
5927 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5928 return expand_call (exp, target, ignore);
5929
5930 /* When not optimizing, generate calls to library functions for a certain
5931 set of builtins. */
5932 if (!optimize
5933 && !called_as_built_in (fndecl)
5934 && fcode != BUILT_IN_FORK
5935 && fcode != BUILT_IN_EXECL
5936 && fcode != BUILT_IN_EXECV
5937 && fcode != BUILT_IN_EXECLP
5938 && fcode != BUILT_IN_EXECLE
5939 && fcode != BUILT_IN_EXECVP
5940 && fcode != BUILT_IN_EXECVE
5941 && fcode != BUILT_IN_ALLOCA
5942 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5943 && fcode != BUILT_IN_FREE
5944 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5945 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5946 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5947 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5948 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5949 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5950 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5951 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5952 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5953 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5954 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5955 && fcode != BUILT_IN_CHKP_BNDRET)
5956 return expand_call (exp, target, ignore);
5957
5958 /* The built-in function expanders test for target == const0_rtx
5959 to determine whether the function's result will be ignored. */
5960 if (ignore)
5961 target = const0_rtx;
5962
5963 /* If the result of a pure or const built-in function is ignored, and
5964 none of its arguments are volatile, we can avoid expanding the
5965 built-in call and just evaluate the arguments for side-effects. */
5966 if (target == const0_rtx
5967 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5968 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5969 {
5970 bool volatilep = false;
5971 tree arg;
5972 call_expr_arg_iterator iter;
5973
5974 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5975 if (TREE_THIS_VOLATILE (arg))
5976 {
5977 volatilep = true;
5978 break;
5979 }
5980
5981 if (! volatilep)
5982 {
5983 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5984 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5985 return const0_rtx;
5986 }
5987 }
5988
5989 /* expand_builtin_with_bounds is supposed to be used for
5990 instrumented builtin calls. */
5991 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5992
5993 switch (fcode)
5994 {
5995 CASE_FLT_FN (BUILT_IN_FABS):
5996 case BUILT_IN_FABSD32:
5997 case BUILT_IN_FABSD64:
5998 case BUILT_IN_FABSD128:
5999 target = expand_builtin_fabs (exp, target, subtarget);
6000 if (target)
6001 return target;
6002 break;
6003
6004 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6005 target = expand_builtin_copysign (exp, target, subtarget);
6006 if (target)
6007 return target;
6008 break;
6009
6010 /* Just do a normal library call if we were unable to fold
6011 the values. */
6012 CASE_FLT_FN (BUILT_IN_CABS):
6013 break;
6014
6015 CASE_FLT_FN (BUILT_IN_EXP):
6016 CASE_FLT_FN (BUILT_IN_EXP10):
6017 CASE_FLT_FN (BUILT_IN_POW10):
6018 CASE_FLT_FN (BUILT_IN_EXP2):
6019 CASE_FLT_FN (BUILT_IN_EXPM1):
6020 CASE_FLT_FN (BUILT_IN_LOGB):
6021 CASE_FLT_FN (BUILT_IN_LOG):
6022 CASE_FLT_FN (BUILT_IN_LOG10):
6023 CASE_FLT_FN (BUILT_IN_LOG2):
6024 CASE_FLT_FN (BUILT_IN_LOG1P):
6025 CASE_FLT_FN (BUILT_IN_TAN):
6026 CASE_FLT_FN (BUILT_IN_ASIN):
6027 CASE_FLT_FN (BUILT_IN_ACOS):
6028 CASE_FLT_FN (BUILT_IN_ATAN):
6029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6030 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6031 because of possible accuracy problems. */
6032 if (! flag_unsafe_math_optimizations)
6033 break;
6034 CASE_FLT_FN (BUILT_IN_SQRT):
6035 CASE_FLT_FN (BUILT_IN_FLOOR):
6036 CASE_FLT_FN (BUILT_IN_CEIL):
6037 CASE_FLT_FN (BUILT_IN_TRUNC):
6038 CASE_FLT_FN (BUILT_IN_ROUND):
6039 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6040 CASE_FLT_FN (BUILT_IN_RINT):
6041 target = expand_builtin_mathfn (exp, target, subtarget);
6042 if (target)
6043 return target;
6044 break;
6045
6046 CASE_FLT_FN (BUILT_IN_FMA):
6047 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6048 if (target)
6049 return target;
6050 break;
6051
6052 CASE_FLT_FN (BUILT_IN_ILOGB):
6053 if (! flag_unsafe_math_optimizations)
6054 break;
6055 CASE_FLT_FN (BUILT_IN_ISINF):
6056 CASE_FLT_FN (BUILT_IN_FINITE):
6057 case BUILT_IN_ISFINITE:
6058 case BUILT_IN_ISNORMAL:
6059 target = expand_builtin_interclass_mathfn (exp, target);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_FLT_FN (BUILT_IN_ICEIL):
6065 CASE_FLT_FN (BUILT_IN_LCEIL):
6066 CASE_FLT_FN (BUILT_IN_LLCEIL):
6067 CASE_FLT_FN (BUILT_IN_LFLOOR):
6068 CASE_FLT_FN (BUILT_IN_IFLOOR):
6069 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6070 target = expand_builtin_int_roundingfn (exp, target);
6071 if (target)
6072 return target;
6073 break;
6074
6075 CASE_FLT_FN (BUILT_IN_IRINT):
6076 CASE_FLT_FN (BUILT_IN_LRINT):
6077 CASE_FLT_FN (BUILT_IN_LLRINT):
6078 CASE_FLT_FN (BUILT_IN_IROUND):
6079 CASE_FLT_FN (BUILT_IN_LROUND):
6080 CASE_FLT_FN (BUILT_IN_LLROUND):
6081 target = expand_builtin_int_roundingfn_2 (exp, target);
6082 if (target)
6083 return target;
6084 break;
6085
6086 CASE_FLT_FN (BUILT_IN_POWI):
6087 target = expand_builtin_powi (exp, target);
6088 if (target)
6089 return target;
6090 break;
6091
6092 CASE_FLT_FN (BUILT_IN_ATAN2):
6093 CASE_FLT_FN (BUILT_IN_LDEXP):
6094 CASE_FLT_FN (BUILT_IN_SCALB):
6095 CASE_FLT_FN (BUILT_IN_SCALBN):
6096 CASE_FLT_FN (BUILT_IN_SCALBLN):
6097 if (! flag_unsafe_math_optimizations)
6098 break;
6099
6100 CASE_FLT_FN (BUILT_IN_FMOD):
6101 CASE_FLT_FN (BUILT_IN_REMAINDER):
6102 CASE_FLT_FN (BUILT_IN_DREM):
6103 CASE_FLT_FN (BUILT_IN_POW):
6104 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6105 if (target)
6106 return target;
6107 break;
6108
6109 CASE_FLT_FN (BUILT_IN_CEXPI):
6110 target = expand_builtin_cexpi (exp, target);
6111 gcc_assert (target);
6112 return target;
6113
6114 CASE_FLT_FN (BUILT_IN_SIN):
6115 CASE_FLT_FN (BUILT_IN_COS):
6116 if (! flag_unsafe_math_optimizations)
6117 break;
6118 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6119 if (target)
6120 return target;
6121 break;
6122
6123 CASE_FLT_FN (BUILT_IN_SINCOS):
6124 if (! flag_unsafe_math_optimizations)
6125 break;
6126 target = expand_builtin_sincos (exp);
6127 if (target)
6128 return target;
6129 break;
6130
6131 case BUILT_IN_APPLY_ARGS:
6132 return expand_builtin_apply_args ();
6133
6134 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6135 FUNCTION with a copy of the parameters described by
6136 ARGUMENTS, and ARGSIZE. It returns a block of memory
6137 allocated on the stack into which is stored all the registers
6138 that might possibly be used for returning the result of a
6139 function. ARGUMENTS is the value returned by
6140 __builtin_apply_args. ARGSIZE is the number of bytes of
6141 arguments that must be copied. ??? How should this value be
6142 computed? We'll also need a safe worst case value for varargs
6143 functions. */
6144 case BUILT_IN_APPLY:
6145 if (!validate_arglist (exp, POINTER_TYPE,
6146 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6147 && !validate_arglist (exp, REFERENCE_TYPE,
6148 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6149 return const0_rtx;
6150 else
6151 {
6152 rtx ops[3];
6153
6154 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6155 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6156 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6157
6158 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6159 }
6160
6161 /* __builtin_return (RESULT) causes the function to return the
6162 value described by RESULT. RESULT is address of the block of
6163 memory returned by __builtin_apply. */
6164 case BUILT_IN_RETURN:
6165 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6166 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6167 return const0_rtx;
6168
6169 case BUILT_IN_SAVEREGS:
6170 return expand_builtin_saveregs ();
6171
6172 case BUILT_IN_VA_ARG_PACK:
6173 /* All valid uses of __builtin_va_arg_pack () are removed during
6174 inlining. */
6175 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6176 return const0_rtx;
6177
6178 case BUILT_IN_VA_ARG_PACK_LEN:
6179 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6180 inlining. */
6181 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6182 return const0_rtx;
6183
6184 /* Return the address of the first anonymous stack arg. */
6185 case BUILT_IN_NEXT_ARG:
6186 if (fold_builtin_next_arg (exp, false))
6187 return const0_rtx;
6188 return expand_builtin_next_arg ();
6189
6190 case BUILT_IN_CLEAR_CACHE:
6191 target = expand_builtin___clear_cache (exp);
6192 if (target)
6193 return target;
6194 break;
6195
6196 case BUILT_IN_CLASSIFY_TYPE:
6197 return expand_builtin_classify_type (exp);
6198
6199 case BUILT_IN_CONSTANT_P:
6200 return const0_rtx;
6201
6202 case BUILT_IN_FRAME_ADDRESS:
6203 case BUILT_IN_RETURN_ADDRESS:
6204 return expand_builtin_frame_address (fndecl, exp);
6205
6206 /* Returns the address of the area where the structure is returned.
6207 0 otherwise. */
6208 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6209 if (call_expr_nargs (exp) != 0
6210 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6211 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6212 return const0_rtx;
6213 else
6214 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6215
6216 case BUILT_IN_ALLOCA:
6217 case BUILT_IN_ALLOCA_WITH_ALIGN:
6218 /* If the allocation stems from the declaration of a variable-sized
6219 object, it cannot accumulate. */
6220 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6221 if (target)
6222 return target;
6223 break;
6224
6225 case BUILT_IN_STACK_SAVE:
6226 return expand_stack_save ();
6227
6228 case BUILT_IN_STACK_RESTORE:
6229 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6230 return const0_rtx;
6231
6232 case BUILT_IN_BSWAP16:
6233 case BUILT_IN_BSWAP32:
6234 case BUILT_IN_BSWAP64:
6235 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6236 if (target)
6237 return target;
6238 break;
6239
6240 CASE_INT_FN (BUILT_IN_FFS):
6241 target = expand_builtin_unop (target_mode, exp, target,
6242 subtarget, ffs_optab);
6243 if (target)
6244 return target;
6245 break;
6246
6247 CASE_INT_FN (BUILT_IN_CLZ):
6248 target = expand_builtin_unop (target_mode, exp, target,
6249 subtarget, clz_optab);
6250 if (target)
6251 return target;
6252 break;
6253
6254 CASE_INT_FN (BUILT_IN_CTZ):
6255 target = expand_builtin_unop (target_mode, exp, target,
6256 subtarget, ctz_optab);
6257 if (target)
6258 return target;
6259 break;
6260
6261 CASE_INT_FN (BUILT_IN_CLRSB):
6262 target = expand_builtin_unop (target_mode, exp, target,
6263 subtarget, clrsb_optab);
6264 if (target)
6265 return target;
6266 break;
6267
6268 CASE_INT_FN (BUILT_IN_POPCOUNT):
6269 target = expand_builtin_unop (target_mode, exp, target,
6270 subtarget, popcount_optab);
6271 if (target)
6272 return target;
6273 break;
6274
6275 CASE_INT_FN (BUILT_IN_PARITY):
6276 target = expand_builtin_unop (target_mode, exp, target,
6277 subtarget, parity_optab);
6278 if (target)
6279 return target;
6280 break;
6281
6282 case BUILT_IN_STRLEN:
6283 target = expand_builtin_strlen (exp, target, target_mode);
6284 if (target)
6285 return target;
6286 break;
6287
6288 case BUILT_IN_STRCPY:
6289 target = expand_builtin_strcpy (exp, target);
6290 if (target)
6291 return target;
6292 break;
6293
6294 case BUILT_IN_STRNCPY:
6295 target = expand_builtin_strncpy (exp, target);
6296 if (target)
6297 return target;
6298 break;
6299
6300 case BUILT_IN_STPCPY:
6301 target = expand_builtin_stpcpy (exp, target, mode);
6302 if (target)
6303 return target;
6304 break;
6305
6306 case BUILT_IN_MEMCPY:
6307 target = expand_builtin_memcpy (exp, target);
6308 if (target)
6309 return target;
6310 break;
6311
6312 case BUILT_IN_MEMPCPY:
6313 target = expand_builtin_mempcpy (exp, target, mode);
6314 if (target)
6315 return target;
6316 break;
6317
6318 case BUILT_IN_MEMSET:
6319 target = expand_builtin_memset (exp, target, mode);
6320 if (target)
6321 return target;
6322 break;
6323
6324 case BUILT_IN_BZERO:
6325 target = expand_builtin_bzero (exp);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_STRCMP:
6331 target = expand_builtin_strcmp (exp, target);
6332 if (target)
6333 return target;
6334 break;
6335
6336 case BUILT_IN_STRNCMP:
6337 target = expand_builtin_strncmp (exp, target, mode);
6338 if (target)
6339 return target;
6340 break;
6341
6342 case BUILT_IN_BCMP:
6343 case BUILT_IN_MEMCMP:
6344 target = expand_builtin_memcmp (exp, target, mode);
6345 if (target)
6346 return target;
6347 break;
6348
6349 case BUILT_IN_SETJMP:
6350 /* This should have been lowered to the builtins below. */
6351 gcc_unreachable ();
6352
6353 case BUILT_IN_SETJMP_SETUP:
6354 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6355 and the receiver label. */
6356 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6357 {
6358 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6359 VOIDmode, EXPAND_NORMAL);
6360 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6361 rtx_insn *label_r = label_rtx (label);
6362
6363 /* This is copied from the handling of non-local gotos. */
6364 expand_builtin_setjmp_setup (buf_addr, label_r);
6365 nonlocal_goto_handler_labels
6366 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6367 nonlocal_goto_handler_labels);
6368 /* ??? Do not let expand_label treat us as such since we would
6369 not want to be both on the list of non-local labels and on
6370 the list of forced labels. */
6371 FORCED_LABEL (label) = 0;
6372 return const0_rtx;
6373 }
6374 break;
6375
6376 case BUILT_IN_SETJMP_RECEIVER:
6377 /* __builtin_setjmp_receiver is passed the receiver label. */
6378 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6379 {
6380 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6381 rtx_insn *label_r = label_rtx (label);
6382
6383 expand_builtin_setjmp_receiver (label_r);
6384 return const0_rtx;
6385 }
6386 break;
6387
6388 /* __builtin_longjmp is passed a pointer to an array of five words.
6389 It's similar to the C library longjmp function but works with
6390 __builtin_setjmp above. */
6391 case BUILT_IN_LONGJMP:
6392 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6393 {
6394 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6395 VOIDmode, EXPAND_NORMAL);
6396 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6397
6398 if (value != const1_rtx)
6399 {
6400 error ("%<__builtin_longjmp%> second argument must be 1");
6401 return const0_rtx;
6402 }
6403
6404 expand_builtin_longjmp (buf_addr, value);
6405 return const0_rtx;
6406 }
6407 break;
6408
6409 case BUILT_IN_NONLOCAL_GOTO:
6410 target = expand_builtin_nonlocal_goto (exp);
6411 if (target)
6412 return target;
6413 break;
6414
6415 /* This updates the setjmp buffer that is its argument with the value
6416 of the current stack pointer. */
6417 case BUILT_IN_UPDATE_SETJMP_BUF:
6418 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6419 {
6420 rtx buf_addr
6421 = expand_normal (CALL_EXPR_ARG (exp, 0));
6422
6423 expand_builtin_update_setjmp_buf (buf_addr);
6424 return const0_rtx;
6425 }
6426 break;
6427
6428 case BUILT_IN_TRAP:
6429 expand_builtin_trap ();
6430 return const0_rtx;
6431
6432 case BUILT_IN_UNREACHABLE:
6433 expand_builtin_unreachable ();
6434 return const0_rtx;
6435
6436 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6437 case BUILT_IN_SIGNBITD32:
6438 case BUILT_IN_SIGNBITD64:
6439 case BUILT_IN_SIGNBITD128:
6440 target = expand_builtin_signbit (exp, target);
6441 if (target)
6442 return target;
6443 break;
6444
6445 /* Various hooks for the DWARF 2 __throw routine. */
6446 case BUILT_IN_UNWIND_INIT:
6447 expand_builtin_unwind_init ();
6448 return const0_rtx;
6449 case BUILT_IN_DWARF_CFA:
6450 return virtual_cfa_rtx;
6451 #ifdef DWARF2_UNWIND_INFO
6452 case BUILT_IN_DWARF_SP_COLUMN:
6453 return expand_builtin_dwarf_sp_column ();
6454 case BUILT_IN_INIT_DWARF_REG_SIZES:
6455 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6456 return const0_rtx;
6457 #endif
6458 case BUILT_IN_FROB_RETURN_ADDR:
6459 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6460 case BUILT_IN_EXTRACT_RETURN_ADDR:
6461 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6462 case BUILT_IN_EH_RETURN:
6463 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6464 CALL_EXPR_ARG (exp, 1));
6465 return const0_rtx;
6466 case BUILT_IN_EH_RETURN_DATA_REGNO:
6467 return expand_builtin_eh_return_data_regno (exp);
6468 case BUILT_IN_EXTEND_POINTER:
6469 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6470 case BUILT_IN_EH_POINTER:
6471 return expand_builtin_eh_pointer (exp);
6472 case BUILT_IN_EH_FILTER:
6473 return expand_builtin_eh_filter (exp);
6474 case BUILT_IN_EH_COPY_VALUES:
6475 return expand_builtin_eh_copy_values (exp);
6476
6477 case BUILT_IN_VA_START:
6478 return expand_builtin_va_start (exp);
6479 case BUILT_IN_VA_END:
6480 return expand_builtin_va_end (exp);
6481 case BUILT_IN_VA_COPY:
6482 return expand_builtin_va_copy (exp);
6483 case BUILT_IN_EXPECT:
6484 return expand_builtin_expect (exp, target);
6485 case BUILT_IN_ASSUME_ALIGNED:
6486 return expand_builtin_assume_aligned (exp, target);
6487 case BUILT_IN_PREFETCH:
6488 expand_builtin_prefetch (exp);
6489 return const0_rtx;
6490
6491 case BUILT_IN_INIT_TRAMPOLINE:
6492 return expand_builtin_init_trampoline (exp, true);
6493 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6494 return expand_builtin_init_trampoline (exp, false);
6495 case BUILT_IN_ADJUST_TRAMPOLINE:
6496 return expand_builtin_adjust_trampoline (exp);
6497
6498 case BUILT_IN_FORK:
6499 case BUILT_IN_EXECL:
6500 case BUILT_IN_EXECV:
6501 case BUILT_IN_EXECLP:
6502 case BUILT_IN_EXECLE:
6503 case BUILT_IN_EXECVP:
6504 case BUILT_IN_EXECVE:
6505 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6506 if (target)
6507 return target;
6508 break;
6509
6510 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6511 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6512 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6513 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6514 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6516 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6517 if (target)
6518 return target;
6519 break;
6520
6521 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6522 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6523 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6524 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6525 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6527 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6528 if (target)
6529 return target;
6530 break;
6531
6532 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6533 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6534 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6535 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6536 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6538 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6539 if (target)
6540 return target;
6541 break;
6542
6543 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6544 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6545 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6546 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6547 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6549 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6550 if (target)
6551 return target;
6552 break;
6553
6554 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6555 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6556 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6557 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6558 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6560 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6561 if (target)
6562 return target;
6563 break;
6564
6565 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6566 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6567 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6568 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6569 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6571 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6572 if (target)
6573 return target;
6574 break;
6575
6576 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6577 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6578 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6579 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6580 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6581 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6582 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6583 if (target)
6584 return target;
6585 break;
6586
6587 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6588 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6589 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6590 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6591 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6592 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6593 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6594 if (target)
6595 return target;
6596 break;
6597
6598 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6599 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6600 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6601 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6602 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6604 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6605 if (target)
6606 return target;
6607 break;
6608
6609 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6610 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6611 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6612 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6613 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6615 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6616 if (target)
6617 return target;
6618 break;
6619
6620 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6621 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6622 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6623 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6624 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6626 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6627 if (target)
6628 return target;
6629 break;
6630
6631 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6632 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6633 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6634 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6635 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6637 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6638 if (target)
6639 return target;
6640 break;
6641
6642 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6643 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6644 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6645 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6646 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6647 if (mode == VOIDmode)
6648 mode = TYPE_MODE (boolean_type_node);
6649 if (!target || !register_operand (target, mode))
6650 target = gen_reg_rtx (mode);
6651
6652 mode = get_builtin_sync_mode
6653 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6654 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6655 if (target)
6656 return target;
6657 break;
6658
6659 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6660 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6661 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6662 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6663 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6664 mode = get_builtin_sync_mode
6665 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6666 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6667 if (target)
6668 return target;
6669 break;
6670
6671 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6672 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6673 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6674 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6675 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6676 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6677 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6678 if (target)
6679 return target;
6680 break;
6681
6682 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6683 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6684 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6685 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6686 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6687 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6688 expand_builtin_sync_lock_release (mode, exp);
6689 return const0_rtx;
6690
6691 case BUILT_IN_SYNC_SYNCHRONIZE:
6692 expand_builtin_sync_synchronize ();
6693 return const0_rtx;
6694
6695 case BUILT_IN_ATOMIC_EXCHANGE_1:
6696 case BUILT_IN_ATOMIC_EXCHANGE_2:
6697 case BUILT_IN_ATOMIC_EXCHANGE_4:
6698 case BUILT_IN_ATOMIC_EXCHANGE_8:
6699 case BUILT_IN_ATOMIC_EXCHANGE_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6701 target = expand_builtin_atomic_exchange (mode, exp, target);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6710 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6711 {
6712 unsigned int nargs, z;
6713 vec<tree, va_gc> *vec;
6714
6715 mode =
6716 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6717 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6718 if (target)
6719 return target;
6720
6721 /* If this is turned into an external library call, the weak parameter
6722 must be dropped to match the expected parameter list. */
6723 nargs = call_expr_nargs (exp);
6724 vec_alloc (vec, nargs - 1);
6725 for (z = 0; z < 3; z++)
6726 vec->quick_push (CALL_EXPR_ARG (exp, z));
6727 /* Skip the boolean weak parameter. */
6728 for (z = 4; z < 6; z++)
6729 vec->quick_push (CALL_EXPR_ARG (exp, z));
6730 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6731 break;
6732 }
6733
6734 case BUILT_IN_ATOMIC_LOAD_1:
6735 case BUILT_IN_ATOMIC_LOAD_2:
6736 case BUILT_IN_ATOMIC_LOAD_4:
6737 case BUILT_IN_ATOMIC_LOAD_8:
6738 case BUILT_IN_ATOMIC_LOAD_16:
6739 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6740 target = expand_builtin_atomic_load (mode, exp, target);
6741 if (target)
6742 return target;
6743 break;
6744
6745 case BUILT_IN_ATOMIC_STORE_1:
6746 case BUILT_IN_ATOMIC_STORE_2:
6747 case BUILT_IN_ATOMIC_STORE_4:
6748 case BUILT_IN_ATOMIC_STORE_8:
6749 case BUILT_IN_ATOMIC_STORE_16:
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6751 target = expand_builtin_atomic_store (mode, exp);
6752 if (target)
6753 return const0_rtx;
6754 break;
6755
6756 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6757 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6758 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6759 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6760 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6761 {
6762 enum built_in_function lib;
6763 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6764 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6765 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6766 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6767 ignore, lib);
6768 if (target)
6769 return target;
6770 break;
6771 }
6772 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6773 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6774 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6775 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6776 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6777 {
6778 enum built_in_function lib;
6779 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6780 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6781 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6782 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6783 ignore, lib);
6784 if (target)
6785 return target;
6786 break;
6787 }
6788 case BUILT_IN_ATOMIC_AND_FETCH_1:
6789 case BUILT_IN_ATOMIC_AND_FETCH_2:
6790 case BUILT_IN_ATOMIC_AND_FETCH_4:
6791 case BUILT_IN_ATOMIC_AND_FETCH_8:
6792 case BUILT_IN_ATOMIC_AND_FETCH_16:
6793 {
6794 enum built_in_function lib;
6795 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6796 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6797 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6798 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6799 ignore, lib);
6800 if (target)
6801 return target;
6802 break;
6803 }
6804 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6805 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6806 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6807 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6808 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6809 {
6810 enum built_in_function lib;
6811 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6812 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6813 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6814 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6815 ignore, lib);
6816 if (target)
6817 return target;
6818 break;
6819 }
6820 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6821 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6822 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6823 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6824 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6825 {
6826 enum built_in_function lib;
6827 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6828 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6829 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6830 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6831 ignore, lib);
6832 if (target)
6833 return target;
6834 break;
6835 }
6836 case BUILT_IN_ATOMIC_OR_FETCH_1:
6837 case BUILT_IN_ATOMIC_OR_FETCH_2:
6838 case BUILT_IN_ATOMIC_OR_FETCH_4:
6839 case BUILT_IN_ATOMIC_OR_FETCH_8:
6840 case BUILT_IN_ATOMIC_OR_FETCH_16:
6841 {
6842 enum built_in_function lib;
6843 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6844 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6845 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6846 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6847 ignore, lib);
6848 if (target)
6849 return target;
6850 break;
6851 }
6852 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6853 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6854 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6855 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6856 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6858 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6859 ignore, BUILT_IN_NONE);
6860 if (target)
6861 return target;
6862 break;
6863
6864 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6865 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6866 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6867 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6868 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6869 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6870 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6871 ignore, BUILT_IN_NONE);
6872 if (target)
6873 return target;
6874 break;
6875
6876 case BUILT_IN_ATOMIC_FETCH_AND_1:
6877 case BUILT_IN_ATOMIC_FETCH_AND_2:
6878 case BUILT_IN_ATOMIC_FETCH_AND_4:
6879 case BUILT_IN_ATOMIC_FETCH_AND_8:
6880 case BUILT_IN_ATOMIC_FETCH_AND_16:
6881 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6882 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6883 ignore, BUILT_IN_NONE);
6884 if (target)
6885 return target;
6886 break;
6887
6888 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6889 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6890 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6891 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6892 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6893 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6894 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6895 ignore, BUILT_IN_NONE);
6896 if (target)
6897 return target;
6898 break;
6899
6900 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6901 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6902 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6903 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6904 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6905 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6906 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6907 ignore, BUILT_IN_NONE);
6908 if (target)
6909 return target;
6910 break;
6911
6912 case BUILT_IN_ATOMIC_FETCH_OR_1:
6913 case BUILT_IN_ATOMIC_FETCH_OR_2:
6914 case BUILT_IN_ATOMIC_FETCH_OR_4:
6915 case BUILT_IN_ATOMIC_FETCH_OR_8:
6916 case BUILT_IN_ATOMIC_FETCH_OR_16:
6917 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6918 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6919 ignore, BUILT_IN_NONE);
6920 if (target)
6921 return target;
6922 break;
6923
6924 case BUILT_IN_ATOMIC_TEST_AND_SET:
6925 return expand_builtin_atomic_test_and_set (exp, target);
6926
6927 case BUILT_IN_ATOMIC_CLEAR:
6928 return expand_builtin_atomic_clear (exp);
6929
6930 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6931 return expand_builtin_atomic_always_lock_free (exp);
6932
6933 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6934 target = expand_builtin_atomic_is_lock_free (exp);
6935 if (target)
6936 return target;
6937 break;
6938
6939 case BUILT_IN_ATOMIC_THREAD_FENCE:
6940 expand_builtin_atomic_thread_fence (exp);
6941 return const0_rtx;
6942
6943 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6944 expand_builtin_atomic_signal_fence (exp);
6945 return const0_rtx;
6946
6947 case BUILT_IN_OBJECT_SIZE:
6948 return expand_builtin_object_size (exp);
6949
6950 case BUILT_IN_MEMCPY_CHK:
6951 case BUILT_IN_MEMPCPY_CHK:
6952 case BUILT_IN_MEMMOVE_CHK:
6953 case BUILT_IN_MEMSET_CHK:
6954 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6955 if (target)
6956 return target;
6957 break;
6958
6959 case BUILT_IN_STRCPY_CHK:
6960 case BUILT_IN_STPCPY_CHK:
6961 case BUILT_IN_STRNCPY_CHK:
6962 case BUILT_IN_STPNCPY_CHK:
6963 case BUILT_IN_STRCAT_CHK:
6964 case BUILT_IN_STRNCAT_CHK:
6965 case BUILT_IN_SNPRINTF_CHK:
6966 case BUILT_IN_VSNPRINTF_CHK:
6967 maybe_emit_chk_warning (exp, fcode);
6968 break;
6969
6970 case BUILT_IN_SPRINTF_CHK:
6971 case BUILT_IN_VSPRINTF_CHK:
6972 maybe_emit_sprintf_chk_warning (exp, fcode);
6973 break;
6974
6975 case BUILT_IN_FREE:
6976 if (warn_free_nonheap_object)
6977 maybe_emit_free_warning (exp);
6978 break;
6979
6980 case BUILT_IN_THREAD_POINTER:
6981 return expand_builtin_thread_pointer (exp, target);
6982
6983 case BUILT_IN_SET_THREAD_POINTER:
6984 expand_builtin_set_thread_pointer (exp);
6985 return const0_rtx;
6986
6987 case BUILT_IN_CILK_DETACH:
6988 expand_builtin_cilk_detach (exp);
6989 return const0_rtx;
6990
6991 case BUILT_IN_CILK_POP_FRAME:
6992 expand_builtin_cilk_pop_frame (exp);
6993 return const0_rtx;
6994
6995 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6996 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6997 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6998 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6999 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7000 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7001 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7002 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7003 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7004 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7005 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7006 /* We allow user CHKP builtins if Pointer Bounds
7007 Checker is off. */
7008 if (!chkp_function_instrumented_p (current_function_decl))
7009 {
7010 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7011 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7012 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7013 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7014 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7015 return expand_normal (CALL_EXPR_ARG (exp, 0));
7016 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7017 return expand_normal (size_zero_node);
7018 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7019 return expand_normal (size_int (-1));
7020 else
7021 return const0_rtx;
7022 }
7023 /* FALLTHROUGH */
7024
7025 case BUILT_IN_CHKP_BNDMK:
7026 case BUILT_IN_CHKP_BNDSTX:
7027 case BUILT_IN_CHKP_BNDCL:
7028 case BUILT_IN_CHKP_BNDCU:
7029 case BUILT_IN_CHKP_BNDLDX:
7030 case BUILT_IN_CHKP_BNDRET:
7031 case BUILT_IN_CHKP_INTERSECT:
7032 case BUILT_IN_CHKP_NARROW:
7033 case BUILT_IN_CHKP_EXTRACT_LOWER:
7034 case BUILT_IN_CHKP_EXTRACT_UPPER:
7035 /* Software implementation of Pointer Bounds Checker is NYI.
7036 Target support is required. */
7037 error ("Your target platform does not support -fcheck-pointer-bounds");
7038 break;
7039
7040 case BUILT_IN_ACC_ON_DEVICE:
7041 target = expand_builtin_acc_on_device (exp, target);
7042 if (target)
7043 return target;
7044 break;
7045
7046 default: /* just do library call, if unknown builtin */
7047 break;
7048 }
7049
7050 /* The switch statement above can drop through to cause the function
7051 to be called normally. */
7052 return expand_call (exp, target, ignore);
7053 }
7054
7055 /* Similar to expand_builtin but is used for instrumented calls. */
7056
7057 rtx
7058 expand_builtin_with_bounds (tree exp, rtx target,
7059 rtx subtarget ATTRIBUTE_UNUSED,
7060 machine_mode mode, int ignore)
7061 {
7062 tree fndecl = get_callee_fndecl (exp);
7063 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7064
7065 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7066
7067 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7068 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7069
7070 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7071 && fcode < END_CHKP_BUILTINS);
7072
7073 switch (fcode)
7074 {
7075 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7076 target = expand_builtin_memcpy_with_bounds (exp, target);
7077 if (target)
7078 return target;
7079 break;
7080
7081 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7082 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7083 if (target)
7084 return target;
7085 break;
7086
7087 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7088 target = expand_builtin_memset_with_bounds (exp, target, mode);
7089 if (target)
7090 return target;
7091 break;
7092
7093 default:
7094 break;
7095 }
7096
7097 /* The switch statement above can drop through to cause the function
7098 to be called normally. */
7099 return expand_call (exp, target, ignore);
7100 }
7101
7102 /* Determine whether a tree node represents a call to a built-in
7103 function. If the tree T is a call to a built-in function with
7104 the right number of arguments of the appropriate types, return
7105 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7106 Otherwise the return value is END_BUILTINS. */
7107
7108 enum built_in_function
7109 builtin_mathfn_code (const_tree t)
7110 {
7111 const_tree fndecl, arg, parmlist;
7112 const_tree argtype, parmtype;
7113 const_call_expr_arg_iterator iter;
7114
7115 if (TREE_CODE (t) != CALL_EXPR
7116 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7117 return END_BUILTINS;
7118
7119 fndecl = get_callee_fndecl (t);
7120 if (fndecl == NULL_TREE
7121 || TREE_CODE (fndecl) != FUNCTION_DECL
7122 || ! DECL_BUILT_IN (fndecl)
7123 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7124 return END_BUILTINS;
7125
7126 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7127 init_const_call_expr_arg_iterator (t, &iter);
7128 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7129 {
7130 /* If a function doesn't take a variable number of arguments,
7131 the last element in the list will have type `void'. */
7132 parmtype = TREE_VALUE (parmlist);
7133 if (VOID_TYPE_P (parmtype))
7134 {
7135 if (more_const_call_expr_args_p (&iter))
7136 return END_BUILTINS;
7137 return DECL_FUNCTION_CODE (fndecl);
7138 }
7139
7140 if (! more_const_call_expr_args_p (&iter))
7141 return END_BUILTINS;
7142
7143 arg = next_const_call_expr_arg (&iter);
7144 argtype = TREE_TYPE (arg);
7145
7146 if (SCALAR_FLOAT_TYPE_P (parmtype))
7147 {
7148 if (! SCALAR_FLOAT_TYPE_P (argtype))
7149 return END_BUILTINS;
7150 }
7151 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7152 {
7153 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7154 return END_BUILTINS;
7155 }
7156 else if (POINTER_TYPE_P (parmtype))
7157 {
7158 if (! POINTER_TYPE_P (argtype))
7159 return END_BUILTINS;
7160 }
7161 else if (INTEGRAL_TYPE_P (parmtype))
7162 {
7163 if (! INTEGRAL_TYPE_P (argtype))
7164 return END_BUILTINS;
7165 }
7166 else
7167 return END_BUILTINS;
7168 }
7169
7170 /* Variable-length argument list. */
7171 return DECL_FUNCTION_CODE (fndecl);
7172 }
7173
7174 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7175 evaluate to a constant. */
7176
7177 static tree
7178 fold_builtin_constant_p (tree arg)
7179 {
7180 /* We return 1 for a numeric type that's known to be a constant
7181 value at compile-time or for an aggregate type that's a
7182 literal constant. */
7183 STRIP_NOPS (arg);
7184
7185 /* If we know this is a constant, emit the constant of one. */
7186 if (CONSTANT_CLASS_P (arg)
7187 || (TREE_CODE (arg) == CONSTRUCTOR
7188 && TREE_CONSTANT (arg)))
7189 return integer_one_node;
7190 if (TREE_CODE (arg) == ADDR_EXPR)
7191 {
7192 tree op = TREE_OPERAND (arg, 0);
7193 if (TREE_CODE (op) == STRING_CST
7194 || (TREE_CODE (op) == ARRAY_REF
7195 && integer_zerop (TREE_OPERAND (op, 1))
7196 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7197 return integer_one_node;
7198 }
7199
7200 /* If this expression has side effects, show we don't know it to be a
7201 constant. Likewise if it's a pointer or aggregate type since in
7202 those case we only want literals, since those are only optimized
7203 when generating RTL, not later.
7204 And finally, if we are compiling an initializer, not code, we
7205 need to return a definite result now; there's not going to be any
7206 more optimization done. */
7207 if (TREE_SIDE_EFFECTS (arg)
7208 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7209 || POINTER_TYPE_P (TREE_TYPE (arg))
7210 || cfun == 0
7211 || folding_initializer
7212 || force_folding_builtin_constant_p)
7213 return integer_zero_node;
7214
7215 return NULL_TREE;
7216 }
7217
7218 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7219 return it as a truthvalue. */
7220
7221 static tree
7222 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7223 tree predictor)
7224 {
7225 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7226
7227 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7228 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7229 ret_type = TREE_TYPE (TREE_TYPE (fn));
7230 pred_type = TREE_VALUE (arg_types);
7231 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7232
7233 pred = fold_convert_loc (loc, pred_type, pred);
7234 expected = fold_convert_loc (loc, expected_type, expected);
7235 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7236 predictor);
7237
7238 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7239 build_int_cst (ret_type, 0));
7240 }
7241
7242 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7243 NULL_TREE if no simplification is possible. */
7244
7245 tree
7246 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7247 {
7248 tree inner, fndecl, inner_arg0;
7249 enum tree_code code;
7250
7251 /* Distribute the expected value over short-circuiting operators.
7252 See through the cast from truthvalue_type_node to long. */
7253 inner_arg0 = arg0;
7254 while (CONVERT_EXPR_P (inner_arg0)
7255 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7256 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7257 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7258
7259 /* If this is a builtin_expect within a builtin_expect keep the
7260 inner one. See through a comparison against a constant. It
7261 might have been added to create a thruthvalue. */
7262 inner = inner_arg0;
7263
7264 if (COMPARISON_CLASS_P (inner)
7265 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7266 inner = TREE_OPERAND (inner, 0);
7267
7268 if (TREE_CODE (inner) == CALL_EXPR
7269 && (fndecl = get_callee_fndecl (inner))
7270 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7271 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7272 return arg0;
7273
7274 inner = inner_arg0;
7275 code = TREE_CODE (inner);
7276 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7277 {
7278 tree op0 = TREE_OPERAND (inner, 0);
7279 tree op1 = TREE_OPERAND (inner, 1);
7280
7281 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7282 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7283 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7284
7285 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7286 }
7287
7288 /* If the argument isn't invariant then there's nothing else we can do. */
7289 if (!TREE_CONSTANT (inner_arg0))
7290 return NULL_TREE;
7291
7292 /* If we expect that a comparison against the argument will fold to
7293 a constant return the constant. In practice, this means a true
7294 constant or the address of a non-weak symbol. */
7295 inner = inner_arg0;
7296 STRIP_NOPS (inner);
7297 if (TREE_CODE (inner) == ADDR_EXPR)
7298 {
7299 do
7300 {
7301 inner = TREE_OPERAND (inner, 0);
7302 }
7303 while (TREE_CODE (inner) == COMPONENT_REF
7304 || TREE_CODE (inner) == ARRAY_REF);
7305 if ((TREE_CODE (inner) == VAR_DECL
7306 || TREE_CODE (inner) == FUNCTION_DECL)
7307 && DECL_WEAK (inner))
7308 return NULL_TREE;
7309 }
7310
7311 /* Otherwise, ARG0 already has the proper type for the return value. */
7312 return arg0;
7313 }
7314
7315 /* Fold a call to __builtin_classify_type with argument ARG. */
7316
7317 static tree
7318 fold_builtin_classify_type (tree arg)
7319 {
7320 if (arg == 0)
7321 return build_int_cst (integer_type_node, no_type_class);
7322
7323 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7324 }
7325
7326 /* Fold a call to __builtin_strlen with argument ARG. */
7327
7328 static tree
7329 fold_builtin_strlen (location_t loc, tree type, tree arg)
7330 {
7331 if (!validate_arg (arg, POINTER_TYPE))
7332 return NULL_TREE;
7333 else
7334 {
7335 tree len = c_strlen (arg, 0);
7336
7337 if (len)
7338 return fold_convert_loc (loc, type, len);
7339
7340 return NULL_TREE;
7341 }
7342 }
7343
7344 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7345
7346 static tree
7347 fold_builtin_inf (location_t loc, tree type, int warn)
7348 {
7349 REAL_VALUE_TYPE real;
7350
7351 /* __builtin_inff is intended to be usable to define INFINITY on all
7352 targets. If an infinity is not available, INFINITY expands "to a
7353 positive constant of type float that overflows at translation
7354 time", footnote "In this case, using INFINITY will violate the
7355 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7356 Thus we pedwarn to ensure this constraint violation is
7357 diagnosed. */
7358 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7359 pedwarn (loc, 0, "target format does not support infinity");
7360
7361 real_inf (&real);
7362 return build_real (type, real);
7363 }
7364
7365 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7366
7367 static tree
7368 fold_builtin_nan (tree arg, tree type, int quiet)
7369 {
7370 REAL_VALUE_TYPE real;
7371 const char *str;
7372
7373 if (!validate_arg (arg, POINTER_TYPE))
7374 return NULL_TREE;
7375 str = c_getstr (arg);
7376 if (!str)
7377 return NULL_TREE;
7378
7379 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7380 return NULL_TREE;
7381
7382 return build_real (type, real);
7383 }
7384
7385 /* Return true if the floating point expression T has an integer value.
7386 We also allow +Inf, -Inf and NaN to be considered integer values. */
7387
7388 static bool
7389 integer_valued_real_p (tree t)
7390 {
7391 switch (TREE_CODE (t))
7392 {
7393 case FLOAT_EXPR:
7394 return true;
7395
7396 case ABS_EXPR:
7397 case SAVE_EXPR:
7398 return integer_valued_real_p (TREE_OPERAND (t, 0));
7399
7400 case COMPOUND_EXPR:
7401 case MODIFY_EXPR:
7402 case BIND_EXPR:
7403 return integer_valued_real_p (TREE_OPERAND (t, 1));
7404
7405 case PLUS_EXPR:
7406 case MINUS_EXPR:
7407 case MULT_EXPR:
7408 case MIN_EXPR:
7409 case MAX_EXPR:
7410 return integer_valued_real_p (TREE_OPERAND (t, 0))
7411 && integer_valued_real_p (TREE_OPERAND (t, 1));
7412
7413 case COND_EXPR:
7414 return integer_valued_real_p (TREE_OPERAND (t, 1))
7415 && integer_valued_real_p (TREE_OPERAND (t, 2));
7416
7417 case REAL_CST:
7418 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7419
7420 CASE_CONVERT:
7421 {
7422 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7423 if (TREE_CODE (type) == INTEGER_TYPE)
7424 return true;
7425 if (TREE_CODE (type) == REAL_TYPE)
7426 return integer_valued_real_p (TREE_OPERAND (t, 0));
7427 break;
7428 }
7429
7430 case CALL_EXPR:
7431 switch (builtin_mathfn_code (t))
7432 {
7433 CASE_FLT_FN (BUILT_IN_CEIL):
7434 CASE_FLT_FN (BUILT_IN_FLOOR):
7435 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7436 CASE_FLT_FN (BUILT_IN_RINT):
7437 CASE_FLT_FN (BUILT_IN_ROUND):
7438 CASE_FLT_FN (BUILT_IN_TRUNC):
7439 return true;
7440
7441 CASE_FLT_FN (BUILT_IN_FMIN):
7442 CASE_FLT_FN (BUILT_IN_FMAX):
7443 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7444 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7445
7446 default:
7447 break;
7448 }
7449 break;
7450
7451 default:
7452 break;
7453 }
7454 return false;
7455 }
7456
7457 /* FNDECL is assumed to be a builtin where truncation can be propagated
7458 across (for instance floor((double)f) == (double)floorf (f).
7459 Do the transformation for a call with argument ARG. */
7460
7461 static tree
7462 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7463 {
7464 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7465
7466 if (!validate_arg (arg, REAL_TYPE))
7467 return NULL_TREE;
7468
7469 /* Integer rounding functions are idempotent. */
7470 if (fcode == builtin_mathfn_code (arg))
7471 return arg;
7472
7473 /* If argument is already integer valued, and we don't need to worry
7474 about setting errno, there's no need to perform rounding. */
7475 if (! flag_errno_math && integer_valued_real_p (arg))
7476 return arg;
7477
7478 if (optimize)
7479 {
7480 tree arg0 = strip_float_extensions (arg);
7481 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7482 tree newtype = TREE_TYPE (arg0);
7483 tree decl;
7484
7485 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7486 && (decl = mathfn_built_in (newtype, fcode)))
7487 return fold_convert_loc (loc, ftype,
7488 build_call_expr_loc (loc, decl, 1,
7489 fold_convert_loc (loc,
7490 newtype,
7491 arg0)));
7492 }
7493 return NULL_TREE;
7494 }
7495
7496 /* FNDECL is assumed to be builtin which can narrow the FP type of
7497 the argument, for instance lround((double)f) -> lroundf (f).
7498 Do the transformation for a call with argument ARG. */
7499
7500 static tree
7501 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7502 {
7503 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7504
7505 if (!validate_arg (arg, REAL_TYPE))
7506 return NULL_TREE;
7507
7508 /* If argument is already integer valued, and we don't need to worry
7509 about setting errno, there's no need to perform rounding. */
7510 if (! flag_errno_math && integer_valued_real_p (arg))
7511 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7512 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7513
7514 if (optimize)
7515 {
7516 tree ftype = TREE_TYPE (arg);
7517 tree arg0 = strip_float_extensions (arg);
7518 tree newtype = TREE_TYPE (arg0);
7519 tree decl;
7520
7521 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7522 && (decl = mathfn_built_in (newtype, fcode)))
7523 return build_call_expr_loc (loc, decl, 1,
7524 fold_convert_loc (loc, newtype, arg0));
7525 }
7526
7527 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7528 sizeof (int) == sizeof (long). */
7529 if (TYPE_PRECISION (integer_type_node)
7530 == TYPE_PRECISION (long_integer_type_node))
7531 {
7532 tree newfn = NULL_TREE;
7533 switch (fcode)
7534 {
7535 CASE_FLT_FN (BUILT_IN_ICEIL):
7536 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7537 break;
7538
7539 CASE_FLT_FN (BUILT_IN_IFLOOR):
7540 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7541 break;
7542
7543 CASE_FLT_FN (BUILT_IN_IROUND):
7544 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7545 break;
7546
7547 CASE_FLT_FN (BUILT_IN_IRINT):
7548 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7549 break;
7550
7551 default:
7552 break;
7553 }
7554
7555 if (newfn)
7556 {
7557 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7558 return fold_convert_loc (loc,
7559 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7560 }
7561 }
7562
7563 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7564 sizeof (long long) == sizeof (long). */
7565 if (TYPE_PRECISION (long_long_integer_type_node)
7566 == TYPE_PRECISION (long_integer_type_node))
7567 {
7568 tree newfn = NULL_TREE;
7569 switch (fcode)
7570 {
7571 CASE_FLT_FN (BUILT_IN_LLCEIL):
7572 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7573 break;
7574
7575 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7576 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7577 break;
7578
7579 CASE_FLT_FN (BUILT_IN_LLROUND):
7580 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7581 break;
7582
7583 CASE_FLT_FN (BUILT_IN_LLRINT):
7584 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7585 break;
7586
7587 default:
7588 break;
7589 }
7590
7591 if (newfn)
7592 {
7593 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7594 return fold_convert_loc (loc,
7595 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7596 }
7597 }
7598
7599 return NULL_TREE;
7600 }
7601
7602 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7603 return type. Return NULL_TREE if no simplification can be made. */
7604
7605 static tree
7606 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7607 {
7608 tree res;
7609
7610 if (!validate_arg (arg, COMPLEX_TYPE)
7611 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7612 return NULL_TREE;
7613
7614 /* Calculate the result when the argument is a constant. */
7615 if (TREE_CODE (arg) == COMPLEX_CST
7616 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7617 type, mpfr_hypot)))
7618 return res;
7619
7620 if (TREE_CODE (arg) == COMPLEX_EXPR)
7621 {
7622 tree real = TREE_OPERAND (arg, 0);
7623 tree imag = TREE_OPERAND (arg, 1);
7624
7625 /* If either part is zero, cabs is fabs of the other. */
7626 if (real_zerop (real))
7627 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7628 if (real_zerop (imag))
7629 return fold_build1_loc (loc, ABS_EXPR, type, real);
7630
7631 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7632 if (flag_unsafe_math_optimizations
7633 && operand_equal_p (real, imag, OEP_PURE_SAME))
7634 {
7635 const REAL_VALUE_TYPE sqrt2_trunc
7636 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7637 STRIP_NOPS (real);
7638 return fold_build2_loc (loc, MULT_EXPR, type,
7639 fold_build1_loc (loc, ABS_EXPR, type, real),
7640 build_real (type, sqrt2_trunc));
7641 }
7642 }
7643
7644 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7645 if (TREE_CODE (arg) == NEGATE_EXPR
7646 || TREE_CODE (arg) == CONJ_EXPR)
7647 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7648
7649 /* Don't do this when optimizing for size. */
7650 if (flag_unsafe_math_optimizations
7651 && optimize && optimize_function_for_speed_p (cfun))
7652 {
7653 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7654
7655 if (sqrtfn != NULL_TREE)
7656 {
7657 tree rpart, ipart, result;
7658
7659 arg = builtin_save_expr (arg);
7660
7661 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7662 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7663
7664 rpart = builtin_save_expr (rpart);
7665 ipart = builtin_save_expr (ipart);
7666
7667 result = fold_build2_loc (loc, PLUS_EXPR, type,
7668 fold_build2_loc (loc, MULT_EXPR, type,
7669 rpart, rpart),
7670 fold_build2_loc (loc, MULT_EXPR, type,
7671 ipart, ipart));
7672
7673 return build_call_expr_loc (loc, sqrtfn, 1, result);
7674 }
7675 }
7676
7677 return NULL_TREE;
7678 }
7679
7680 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7681 complex tree type of the result. If NEG is true, the imaginary
7682 zero is negative. */
7683
7684 static tree
7685 build_complex_cproj (tree type, bool neg)
7686 {
7687 REAL_VALUE_TYPE rinf, rzero = dconst0;
7688
7689 real_inf (&rinf);
7690 rzero.sign = neg;
7691 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7692 build_real (TREE_TYPE (type), rzero));
7693 }
7694
7695 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7696 return type. Return NULL_TREE if no simplification can be made. */
7697
7698 static tree
7699 fold_builtin_cproj (location_t loc, tree arg, tree type)
7700 {
7701 if (!validate_arg (arg, COMPLEX_TYPE)
7702 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7703 return NULL_TREE;
7704
7705 /* If there are no infinities, return arg. */
7706 if (! HONOR_INFINITIES (type))
7707 return non_lvalue_loc (loc, arg);
7708
7709 /* Calculate the result when the argument is a constant. */
7710 if (TREE_CODE (arg) == COMPLEX_CST)
7711 {
7712 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7713 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7714
7715 if (real_isinf (real) || real_isinf (imag))
7716 return build_complex_cproj (type, imag->sign);
7717 else
7718 return arg;
7719 }
7720 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7721 {
7722 tree real = TREE_OPERAND (arg, 0);
7723 tree imag = TREE_OPERAND (arg, 1);
7724
7725 STRIP_NOPS (real);
7726 STRIP_NOPS (imag);
7727
7728 /* If the real part is inf and the imag part is known to be
7729 nonnegative, return (inf + 0i). Remember side-effects are
7730 possible in the imag part. */
7731 if (TREE_CODE (real) == REAL_CST
7732 && real_isinf (TREE_REAL_CST_PTR (real))
7733 && tree_expr_nonnegative_p (imag))
7734 return omit_one_operand_loc (loc, type,
7735 build_complex_cproj (type, false),
7736 arg);
7737
7738 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7739 Remember side-effects are possible in the real part. */
7740 if (TREE_CODE (imag) == REAL_CST
7741 && real_isinf (TREE_REAL_CST_PTR (imag)))
7742 return
7743 omit_one_operand_loc (loc, type,
7744 build_complex_cproj (type, TREE_REAL_CST_PTR
7745 (imag)->sign), arg);
7746 }
7747
7748 return NULL_TREE;
7749 }
7750
7751 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7752 Return NULL_TREE if no simplification can be made. */
7753
7754 static tree
7755 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7756 {
7757
7758 enum built_in_function fcode;
7759 tree res;
7760
7761 if (!validate_arg (arg, REAL_TYPE))
7762 return NULL_TREE;
7763
7764 /* Calculate the result when the argument is a constant. */
7765 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7766 return res;
7767
7768 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7769 fcode = builtin_mathfn_code (arg);
7770 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7771 {
7772 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7773 arg = fold_build2_loc (loc, MULT_EXPR, type,
7774 CALL_EXPR_ARG (arg, 0),
7775 build_real (type, dconsthalf));
7776 return build_call_expr_loc (loc, expfn, 1, arg);
7777 }
7778
7779 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7780 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7781 {
7782 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7783
7784 if (powfn)
7785 {
7786 tree arg0 = CALL_EXPR_ARG (arg, 0);
7787 tree tree_root;
7788 /* The inner root was either sqrt or cbrt. */
7789 /* This was a conditional expression but it triggered a bug
7790 in Sun C 5.5. */
7791 REAL_VALUE_TYPE dconstroot;
7792 if (BUILTIN_SQRT_P (fcode))
7793 dconstroot = dconsthalf;
7794 else
7795 dconstroot = dconst_third ();
7796
7797 /* Adjust for the outer root. */
7798 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7799 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7800 tree_root = build_real (type, dconstroot);
7801 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7802 }
7803 }
7804
7805 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7806 if (flag_unsafe_math_optimizations
7807 && (fcode == BUILT_IN_POW
7808 || fcode == BUILT_IN_POWF
7809 || fcode == BUILT_IN_POWL))
7810 {
7811 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7812 tree arg0 = CALL_EXPR_ARG (arg, 0);
7813 tree arg1 = CALL_EXPR_ARG (arg, 1);
7814 tree narg1;
7815 if (!tree_expr_nonnegative_p (arg0))
7816 arg0 = build1 (ABS_EXPR, type, arg0);
7817 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7818 build_real (type, dconsthalf));
7819 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7820 }
7821
7822 return NULL_TREE;
7823 }
7824
7825 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7826 Return NULL_TREE if no simplification can be made. */
7827
7828 static tree
7829 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7830 {
7831 const enum built_in_function fcode = builtin_mathfn_code (arg);
7832 tree res;
7833
7834 if (!validate_arg (arg, REAL_TYPE))
7835 return NULL_TREE;
7836
7837 /* Calculate the result when the argument is a constant. */
7838 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7839 return res;
7840
7841 if (flag_unsafe_math_optimizations)
7842 {
7843 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7844 if (BUILTIN_EXPONENT_P (fcode))
7845 {
7846 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7847 const REAL_VALUE_TYPE third_trunc =
7848 real_value_truncate (TYPE_MODE (type), dconst_third ());
7849 arg = fold_build2_loc (loc, MULT_EXPR, type,
7850 CALL_EXPR_ARG (arg, 0),
7851 build_real (type, third_trunc));
7852 return build_call_expr_loc (loc, expfn, 1, arg);
7853 }
7854
7855 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7856 if (BUILTIN_SQRT_P (fcode))
7857 {
7858 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7859
7860 if (powfn)
7861 {
7862 tree arg0 = CALL_EXPR_ARG (arg, 0);
7863 tree tree_root;
7864 REAL_VALUE_TYPE dconstroot = dconst_third ();
7865
7866 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7867 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7868 tree_root = build_real (type, dconstroot);
7869 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7870 }
7871 }
7872
7873 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7874 if (BUILTIN_CBRT_P (fcode))
7875 {
7876 tree arg0 = CALL_EXPR_ARG (arg, 0);
7877 if (tree_expr_nonnegative_p (arg0))
7878 {
7879 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7880
7881 if (powfn)
7882 {
7883 tree tree_root;
7884 REAL_VALUE_TYPE dconstroot;
7885
7886 real_arithmetic (&dconstroot, MULT_EXPR,
7887 dconst_third_ptr (), dconst_third_ptr ());
7888 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7889 tree_root = build_real (type, dconstroot);
7890 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7891 }
7892 }
7893 }
7894
7895 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7896 if (fcode == BUILT_IN_POW
7897 || fcode == BUILT_IN_POWF
7898 || fcode == BUILT_IN_POWL)
7899 {
7900 tree arg00 = CALL_EXPR_ARG (arg, 0);
7901 tree arg01 = CALL_EXPR_ARG (arg, 1);
7902 if (tree_expr_nonnegative_p (arg00))
7903 {
7904 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7905 const REAL_VALUE_TYPE dconstroot
7906 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7907 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7908 build_real (type, dconstroot));
7909 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7910 }
7911 }
7912 }
7913 return NULL_TREE;
7914 }
7915
7916 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7917 TYPE is the type of the return value. Return NULL_TREE if no
7918 simplification can be made. */
7919
7920 static tree
7921 fold_builtin_cos (location_t loc,
7922 tree arg, tree type, tree fndecl)
7923 {
7924 tree res, narg;
7925
7926 if (!validate_arg (arg, REAL_TYPE))
7927 return NULL_TREE;
7928
7929 /* Calculate the result when the argument is a constant. */
7930 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7931 return res;
7932
7933 /* Optimize cos(-x) into cos (x). */
7934 if ((narg = fold_strip_sign_ops (arg)))
7935 return build_call_expr_loc (loc, fndecl, 1, narg);
7936
7937 return NULL_TREE;
7938 }
7939
7940 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7941 Return NULL_TREE if no simplification can be made. */
7942
7943 static tree
7944 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7945 {
7946 if (validate_arg (arg, REAL_TYPE))
7947 {
7948 tree res, narg;
7949
7950 /* Calculate the result when the argument is a constant. */
7951 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7952 return res;
7953
7954 /* Optimize cosh(-x) into cosh (x). */
7955 if ((narg = fold_strip_sign_ops (arg)))
7956 return build_call_expr_loc (loc, fndecl, 1, narg);
7957 }
7958
7959 return NULL_TREE;
7960 }
7961
7962 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7963 argument ARG. TYPE is the type of the return value. Return
7964 NULL_TREE if no simplification can be made. */
7965
7966 static tree
7967 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7968 bool hyper)
7969 {
7970 if (validate_arg (arg, COMPLEX_TYPE)
7971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7972 {
7973 tree tmp;
7974
7975 /* Calculate the result when the argument is a constant. */
7976 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7977 return tmp;
7978
7979 /* Optimize fn(-x) into fn(x). */
7980 if ((tmp = fold_strip_sign_ops (arg)))
7981 return build_call_expr_loc (loc, fndecl, 1, tmp);
7982 }
7983
7984 return NULL_TREE;
7985 }
7986
7987 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7988 Return NULL_TREE if no simplification can be made. */
7989
7990 static tree
7991 fold_builtin_tan (tree arg, tree type)
7992 {
7993 enum built_in_function fcode;
7994 tree res;
7995
7996 if (!validate_arg (arg, REAL_TYPE))
7997 return NULL_TREE;
7998
7999 /* Calculate the result when the argument is a constant. */
8000 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8001 return res;
8002
8003 /* Optimize tan(atan(x)) = x. */
8004 fcode = builtin_mathfn_code (arg);
8005 if (flag_unsafe_math_optimizations
8006 && (fcode == BUILT_IN_ATAN
8007 || fcode == BUILT_IN_ATANF
8008 || fcode == BUILT_IN_ATANL))
8009 return CALL_EXPR_ARG (arg, 0);
8010
8011 return NULL_TREE;
8012 }
8013
8014 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8015 NULL_TREE if no simplification can be made. */
8016
8017 static tree
8018 fold_builtin_sincos (location_t loc,
8019 tree arg0, tree arg1, tree arg2)
8020 {
8021 tree type;
8022 tree res, fn, call;
8023
8024 if (!validate_arg (arg0, REAL_TYPE)
8025 || !validate_arg (arg1, POINTER_TYPE)
8026 || !validate_arg (arg2, POINTER_TYPE))
8027 return NULL_TREE;
8028
8029 type = TREE_TYPE (arg0);
8030
8031 /* Calculate the result when the argument is a constant. */
8032 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8033 return res;
8034
8035 /* Canonicalize sincos to cexpi. */
8036 if (!targetm.libc_has_function (function_c99_math_complex))
8037 return NULL_TREE;
8038 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8039 if (!fn)
8040 return NULL_TREE;
8041
8042 call = build_call_expr_loc (loc, fn, 1, arg0);
8043 call = builtin_save_expr (call);
8044
8045 return build2 (COMPOUND_EXPR, void_type_node,
8046 build2 (MODIFY_EXPR, void_type_node,
8047 build_fold_indirect_ref_loc (loc, arg1),
8048 build1 (IMAGPART_EXPR, type, call)),
8049 build2 (MODIFY_EXPR, void_type_node,
8050 build_fold_indirect_ref_loc (loc, arg2),
8051 build1 (REALPART_EXPR, type, call)));
8052 }
8053
8054 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8055 NULL_TREE if no simplification can be made. */
8056
8057 static tree
8058 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8059 {
8060 tree rtype;
8061 tree realp, imagp, ifn;
8062 tree res;
8063
8064 if (!validate_arg (arg0, COMPLEX_TYPE)
8065 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8066 return NULL_TREE;
8067
8068 /* Calculate the result when the argument is a constant. */
8069 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8070 return res;
8071
8072 rtype = TREE_TYPE (TREE_TYPE (arg0));
8073
8074 /* In case we can figure out the real part of arg0 and it is constant zero
8075 fold to cexpi. */
8076 if (!targetm.libc_has_function (function_c99_math_complex))
8077 return NULL_TREE;
8078 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8079 if (!ifn)
8080 return NULL_TREE;
8081
8082 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8083 && real_zerop (realp))
8084 {
8085 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8086 return build_call_expr_loc (loc, ifn, 1, narg);
8087 }
8088
8089 /* In case we can easily decompose real and imaginary parts split cexp
8090 to exp (r) * cexpi (i). */
8091 if (flag_unsafe_math_optimizations
8092 && realp)
8093 {
8094 tree rfn, rcall, icall;
8095
8096 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8097 if (!rfn)
8098 return NULL_TREE;
8099
8100 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8101 if (!imagp)
8102 return NULL_TREE;
8103
8104 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8105 icall = builtin_save_expr (icall);
8106 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8107 rcall = builtin_save_expr (rcall);
8108 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8109 fold_build2_loc (loc, MULT_EXPR, rtype,
8110 rcall,
8111 fold_build1_loc (loc, REALPART_EXPR,
8112 rtype, icall)),
8113 fold_build2_loc (loc, MULT_EXPR, rtype,
8114 rcall,
8115 fold_build1_loc (loc, IMAGPART_EXPR,
8116 rtype, icall)));
8117 }
8118
8119 return NULL_TREE;
8120 }
8121
8122 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8123 Return NULL_TREE if no simplification can be made. */
8124
8125 static tree
8126 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8127 {
8128 if (!validate_arg (arg, REAL_TYPE))
8129 return NULL_TREE;
8130
8131 /* Optimize trunc of constant value. */
8132 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8133 {
8134 REAL_VALUE_TYPE r, x;
8135 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8136
8137 x = TREE_REAL_CST (arg);
8138 real_trunc (&r, TYPE_MODE (type), &x);
8139 return build_real (type, r);
8140 }
8141
8142 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8143 }
8144
8145 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8146 Return NULL_TREE if no simplification can be made. */
8147
8148 static tree
8149 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8150 {
8151 if (!validate_arg (arg, REAL_TYPE))
8152 return NULL_TREE;
8153
8154 /* Optimize floor of constant value. */
8155 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8156 {
8157 REAL_VALUE_TYPE x;
8158
8159 x = TREE_REAL_CST (arg);
8160 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8161 {
8162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8163 REAL_VALUE_TYPE r;
8164
8165 real_floor (&r, TYPE_MODE (type), &x);
8166 return build_real (type, r);
8167 }
8168 }
8169
8170 /* Fold floor (x) where x is nonnegative to trunc (x). */
8171 if (tree_expr_nonnegative_p (arg))
8172 {
8173 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8174 if (truncfn)
8175 return build_call_expr_loc (loc, truncfn, 1, arg);
8176 }
8177
8178 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8179 }
8180
8181 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8182 Return NULL_TREE if no simplification can be made. */
8183
8184 static tree
8185 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8186 {
8187 if (!validate_arg (arg, REAL_TYPE))
8188 return NULL_TREE;
8189
8190 /* Optimize ceil of constant value. */
8191 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8192 {
8193 REAL_VALUE_TYPE x;
8194
8195 x = TREE_REAL_CST (arg);
8196 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8197 {
8198 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8199 REAL_VALUE_TYPE r;
8200
8201 real_ceil (&r, TYPE_MODE (type), &x);
8202 return build_real (type, r);
8203 }
8204 }
8205
8206 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8207 }
8208
8209 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8210 Return NULL_TREE if no simplification can be made. */
8211
8212 static tree
8213 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8214 {
8215 if (!validate_arg (arg, REAL_TYPE))
8216 return NULL_TREE;
8217
8218 /* Optimize round of constant value. */
8219 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8220 {
8221 REAL_VALUE_TYPE x;
8222
8223 x = TREE_REAL_CST (arg);
8224 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8225 {
8226 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8227 REAL_VALUE_TYPE r;
8228
8229 real_round (&r, TYPE_MODE (type), &x);
8230 return build_real (type, r);
8231 }
8232 }
8233
8234 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8235 }
8236
8237 /* Fold function call to builtin lround, lroundf or lroundl (or the
8238 corresponding long long versions) and other rounding functions. ARG
8239 is the argument to the call. Return NULL_TREE if no simplification
8240 can be made. */
8241
8242 static tree
8243 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8244 {
8245 if (!validate_arg (arg, REAL_TYPE))
8246 return NULL_TREE;
8247
8248 /* Optimize lround of constant value. */
8249 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8250 {
8251 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8252
8253 if (real_isfinite (&x))
8254 {
8255 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8256 tree ftype = TREE_TYPE (arg);
8257 REAL_VALUE_TYPE r;
8258 bool fail = false;
8259
8260 switch (DECL_FUNCTION_CODE (fndecl))
8261 {
8262 CASE_FLT_FN (BUILT_IN_IFLOOR):
8263 CASE_FLT_FN (BUILT_IN_LFLOOR):
8264 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8265 real_floor (&r, TYPE_MODE (ftype), &x);
8266 break;
8267
8268 CASE_FLT_FN (BUILT_IN_ICEIL):
8269 CASE_FLT_FN (BUILT_IN_LCEIL):
8270 CASE_FLT_FN (BUILT_IN_LLCEIL):
8271 real_ceil (&r, TYPE_MODE (ftype), &x);
8272 break;
8273
8274 CASE_FLT_FN (BUILT_IN_IROUND):
8275 CASE_FLT_FN (BUILT_IN_LROUND):
8276 CASE_FLT_FN (BUILT_IN_LLROUND):
8277 real_round (&r, TYPE_MODE (ftype), &x);
8278 break;
8279
8280 default:
8281 gcc_unreachable ();
8282 }
8283
8284 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8285 if (!fail)
8286 return wide_int_to_tree (itype, val);
8287 }
8288 }
8289
8290 switch (DECL_FUNCTION_CODE (fndecl))
8291 {
8292 CASE_FLT_FN (BUILT_IN_LFLOOR):
8293 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8294 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8295 if (tree_expr_nonnegative_p (arg))
8296 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8297 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8298 break;
8299 default:;
8300 }
8301
8302 return fold_fixed_mathfn (loc, fndecl, arg);
8303 }
8304
8305 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8306 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8307 the argument to the call. Return NULL_TREE if no simplification can
8308 be made. */
8309
8310 static tree
8311 fold_builtin_bitop (tree fndecl, tree arg)
8312 {
8313 if (!validate_arg (arg, INTEGER_TYPE))
8314 return NULL_TREE;
8315
8316 /* Optimize for constant argument. */
8317 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8318 {
8319 tree type = TREE_TYPE (arg);
8320 int result;
8321
8322 switch (DECL_FUNCTION_CODE (fndecl))
8323 {
8324 CASE_INT_FN (BUILT_IN_FFS):
8325 result = wi::ffs (arg);
8326 break;
8327
8328 CASE_INT_FN (BUILT_IN_CLZ):
8329 if (wi::ne_p (arg, 0))
8330 result = wi::clz (arg);
8331 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8332 result = TYPE_PRECISION (type);
8333 break;
8334
8335 CASE_INT_FN (BUILT_IN_CTZ):
8336 if (wi::ne_p (arg, 0))
8337 result = wi::ctz (arg);
8338 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8339 result = TYPE_PRECISION (type);
8340 break;
8341
8342 CASE_INT_FN (BUILT_IN_CLRSB):
8343 result = wi::clrsb (arg);
8344 break;
8345
8346 CASE_INT_FN (BUILT_IN_POPCOUNT):
8347 result = wi::popcount (arg);
8348 break;
8349
8350 CASE_INT_FN (BUILT_IN_PARITY):
8351 result = wi::parity (arg);
8352 break;
8353
8354 default:
8355 gcc_unreachable ();
8356 }
8357
8358 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8359 }
8360
8361 return NULL_TREE;
8362 }
8363
8364 /* Fold function call to builtin_bswap and the short, long and long long
8365 variants. Return NULL_TREE if no simplification can be made. */
8366 static tree
8367 fold_builtin_bswap (tree fndecl, tree arg)
8368 {
8369 if (! validate_arg (arg, INTEGER_TYPE))
8370 return NULL_TREE;
8371
8372 /* Optimize constant value. */
8373 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8374 {
8375 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8376
8377 switch (DECL_FUNCTION_CODE (fndecl))
8378 {
8379 case BUILT_IN_BSWAP16:
8380 case BUILT_IN_BSWAP32:
8381 case BUILT_IN_BSWAP64:
8382 {
8383 signop sgn = TYPE_SIGN (type);
8384 tree result =
8385 wide_int_to_tree (type,
8386 wide_int::from (arg, TYPE_PRECISION (type),
8387 sgn).bswap ());
8388 return result;
8389 }
8390 default:
8391 gcc_unreachable ();
8392 }
8393 }
8394
8395 return NULL_TREE;
8396 }
8397
8398 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8399 NULL_TREE if no simplification can be made. */
8400
8401 static tree
8402 fold_builtin_hypot (location_t loc, tree fndecl,
8403 tree arg0, tree arg1, tree type)
8404 {
8405 tree res, narg0, narg1;
8406
8407 if (!validate_arg (arg0, REAL_TYPE)
8408 || !validate_arg (arg1, REAL_TYPE))
8409 return NULL_TREE;
8410
8411 /* Calculate the result when the argument is a constant. */
8412 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8413 return res;
8414
8415 /* If either argument to hypot has a negate or abs, strip that off.
8416 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8417 narg0 = fold_strip_sign_ops (arg0);
8418 narg1 = fold_strip_sign_ops (arg1);
8419 if (narg0 || narg1)
8420 {
8421 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8422 narg1 ? narg1 : arg1);
8423 }
8424
8425 /* If either argument is zero, hypot is fabs of the other. */
8426 if (real_zerop (arg0))
8427 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8428 else if (real_zerop (arg1))
8429 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8430
8431 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8432 if (flag_unsafe_math_optimizations
8433 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8434 {
8435 const REAL_VALUE_TYPE sqrt2_trunc
8436 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8437 return fold_build2_loc (loc, MULT_EXPR, type,
8438 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8439 build_real (type, sqrt2_trunc));
8440 }
8441
8442 return NULL_TREE;
8443 }
8444
8445
8446 /* Fold a builtin function call to pow, powf, or powl. Return
8447 NULL_TREE if no simplification can be made. */
8448 static tree
8449 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8450 {
8451 tree res;
8452
8453 if (!validate_arg (arg0, REAL_TYPE)
8454 || !validate_arg (arg1, REAL_TYPE))
8455 return NULL_TREE;
8456
8457 /* Calculate the result when the argument is a constant. */
8458 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8459 return res;
8460
8461 /* Optimize pow(1.0,y) = 1.0. */
8462 if (real_onep (arg0))
8463 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8464
8465 if (TREE_CODE (arg1) == REAL_CST
8466 && !TREE_OVERFLOW (arg1))
8467 {
8468 REAL_VALUE_TYPE cint;
8469 REAL_VALUE_TYPE c;
8470 HOST_WIDE_INT n;
8471
8472 c = TREE_REAL_CST (arg1);
8473
8474 /* Optimize pow(x,0.0) = 1.0. */
8475 if (REAL_VALUES_EQUAL (c, dconst0))
8476 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8477 arg0);
8478
8479 /* Optimize pow(x,1.0) = x. */
8480 if (REAL_VALUES_EQUAL (c, dconst1))
8481 return arg0;
8482
8483 /* Optimize pow(x,-1.0) = 1.0/x. */
8484 if (REAL_VALUES_EQUAL (c, dconstm1))
8485 return fold_build2_loc (loc, RDIV_EXPR, type,
8486 build_real (type, dconst1), arg0);
8487
8488 /* Optimize pow(x,0.5) = sqrt(x). */
8489 if (flag_unsafe_math_optimizations
8490 && REAL_VALUES_EQUAL (c, dconsthalf))
8491 {
8492 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8493
8494 if (sqrtfn != NULL_TREE)
8495 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8496 }
8497
8498 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8499 if (flag_unsafe_math_optimizations)
8500 {
8501 const REAL_VALUE_TYPE dconstroot
8502 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8503
8504 if (REAL_VALUES_EQUAL (c, dconstroot))
8505 {
8506 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8507 if (cbrtfn != NULL_TREE)
8508 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8509 }
8510 }
8511
8512 /* Check for an integer exponent. */
8513 n = real_to_integer (&c);
8514 real_from_integer (&cint, VOIDmode, n, SIGNED);
8515 if (real_identical (&c, &cint))
8516 {
8517 /* Attempt to evaluate pow at compile-time, unless this should
8518 raise an exception. */
8519 if (TREE_CODE (arg0) == REAL_CST
8520 && !TREE_OVERFLOW (arg0)
8521 && (n > 0
8522 || (!flag_trapping_math && !flag_errno_math)
8523 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8524 {
8525 REAL_VALUE_TYPE x;
8526 bool inexact;
8527
8528 x = TREE_REAL_CST (arg0);
8529 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8530 if (flag_unsafe_math_optimizations || !inexact)
8531 return build_real (type, x);
8532 }
8533
8534 /* Strip sign ops from even integer powers. */
8535 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8536 {
8537 tree narg0 = fold_strip_sign_ops (arg0);
8538 if (narg0)
8539 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8540 }
8541 }
8542 }
8543
8544 if (flag_unsafe_math_optimizations)
8545 {
8546 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8547
8548 /* Optimize pow(expN(x),y) = expN(x*y). */
8549 if (BUILTIN_EXPONENT_P (fcode))
8550 {
8551 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8552 tree arg = CALL_EXPR_ARG (arg0, 0);
8553 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8554 return build_call_expr_loc (loc, expfn, 1, arg);
8555 }
8556
8557 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8558 if (BUILTIN_SQRT_P (fcode))
8559 {
8560 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8561 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8562 build_real (type, dconsthalf));
8563 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8564 }
8565
8566 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8567 if (BUILTIN_CBRT_P (fcode))
8568 {
8569 tree arg = CALL_EXPR_ARG (arg0, 0);
8570 if (tree_expr_nonnegative_p (arg))
8571 {
8572 const REAL_VALUE_TYPE dconstroot
8573 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8574 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8575 build_real (type, dconstroot));
8576 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8577 }
8578 }
8579
8580 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8581 if (fcode == BUILT_IN_POW
8582 || fcode == BUILT_IN_POWF
8583 || fcode == BUILT_IN_POWL)
8584 {
8585 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8586 if (tree_expr_nonnegative_p (arg00))
8587 {
8588 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8589 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8590 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8591 }
8592 }
8593 }
8594
8595 return NULL_TREE;
8596 }
8597
8598 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8599 Return NULL_TREE if no simplification can be made. */
8600 static tree
8601 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8602 tree arg0, tree arg1, tree type)
8603 {
8604 if (!validate_arg (arg0, REAL_TYPE)
8605 || !validate_arg (arg1, INTEGER_TYPE))
8606 return NULL_TREE;
8607
8608 /* Optimize pow(1.0,y) = 1.0. */
8609 if (real_onep (arg0))
8610 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8611
8612 if (tree_fits_shwi_p (arg1))
8613 {
8614 HOST_WIDE_INT c = tree_to_shwi (arg1);
8615
8616 /* Evaluate powi at compile-time. */
8617 if (TREE_CODE (arg0) == REAL_CST
8618 && !TREE_OVERFLOW (arg0))
8619 {
8620 REAL_VALUE_TYPE x;
8621 x = TREE_REAL_CST (arg0);
8622 real_powi (&x, TYPE_MODE (type), &x, c);
8623 return build_real (type, x);
8624 }
8625
8626 /* Optimize pow(x,0) = 1.0. */
8627 if (c == 0)
8628 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8629 arg0);
8630
8631 /* Optimize pow(x,1) = x. */
8632 if (c == 1)
8633 return arg0;
8634
8635 /* Optimize pow(x,-1) = 1.0/x. */
8636 if (c == -1)
8637 return fold_build2_loc (loc, RDIV_EXPR, type,
8638 build_real (type, dconst1), arg0);
8639 }
8640
8641 return NULL_TREE;
8642 }
8643
8644 /* A subroutine of fold_builtin to fold the various exponent
8645 functions. Return NULL_TREE if no simplification can be made.
8646 FUNC is the corresponding MPFR exponent function. */
8647
8648 static tree
8649 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8650 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8651 {
8652 if (validate_arg (arg, REAL_TYPE))
8653 {
8654 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8655 tree res;
8656
8657 /* Calculate the result when the argument is a constant. */
8658 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8659 return res;
8660
8661 /* Optimize expN(logN(x)) = x. */
8662 if (flag_unsafe_math_optimizations)
8663 {
8664 const enum built_in_function fcode = builtin_mathfn_code (arg);
8665
8666 if ((func == mpfr_exp
8667 && (fcode == BUILT_IN_LOG
8668 || fcode == BUILT_IN_LOGF
8669 || fcode == BUILT_IN_LOGL))
8670 || (func == mpfr_exp2
8671 && (fcode == BUILT_IN_LOG2
8672 || fcode == BUILT_IN_LOG2F
8673 || fcode == BUILT_IN_LOG2L))
8674 || (func == mpfr_exp10
8675 && (fcode == BUILT_IN_LOG10
8676 || fcode == BUILT_IN_LOG10F
8677 || fcode == BUILT_IN_LOG10L)))
8678 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8679 }
8680 }
8681
8682 return NULL_TREE;
8683 }
8684
8685 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8686 arguments to the call, and TYPE is its return type.
8687 Return NULL_TREE if no simplification can be made. */
8688
8689 static tree
8690 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8691 {
8692 if (!validate_arg (arg1, POINTER_TYPE)
8693 || !validate_arg (arg2, INTEGER_TYPE)
8694 || !validate_arg (len, INTEGER_TYPE))
8695 return NULL_TREE;
8696 else
8697 {
8698 const char *p1;
8699
8700 if (TREE_CODE (arg2) != INTEGER_CST
8701 || !tree_fits_uhwi_p (len))
8702 return NULL_TREE;
8703
8704 p1 = c_getstr (arg1);
8705 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8706 {
8707 char c;
8708 const char *r;
8709 tree tem;
8710
8711 if (target_char_cast (arg2, &c))
8712 return NULL_TREE;
8713
8714 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8715
8716 if (r == NULL)
8717 return build_int_cst (TREE_TYPE (arg1), 0);
8718
8719 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8720 return fold_convert_loc (loc, type, tem);
8721 }
8722 return NULL_TREE;
8723 }
8724 }
8725
8726 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8727 Return NULL_TREE if no simplification can be made. */
8728
8729 static tree
8730 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8731 {
8732 const char *p1, *p2;
8733
8734 if (!validate_arg (arg1, POINTER_TYPE)
8735 || !validate_arg (arg2, POINTER_TYPE)
8736 || !validate_arg (len, INTEGER_TYPE))
8737 return NULL_TREE;
8738
8739 /* If the LEN parameter is zero, return zero. */
8740 if (integer_zerop (len))
8741 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8742 arg1, arg2);
8743
8744 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8745 if (operand_equal_p (arg1, arg2, 0))
8746 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8747
8748 p1 = c_getstr (arg1);
8749 p2 = c_getstr (arg2);
8750
8751 /* If all arguments are constant, and the value of len is not greater
8752 than the lengths of arg1 and arg2, evaluate at compile-time. */
8753 if (tree_fits_uhwi_p (len) && p1 && p2
8754 && compare_tree_int (len, strlen (p1) + 1) <= 0
8755 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8756 {
8757 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8758
8759 if (r > 0)
8760 return integer_one_node;
8761 else if (r < 0)
8762 return integer_minus_one_node;
8763 else
8764 return integer_zero_node;
8765 }
8766
8767 /* If len parameter is one, return an expression corresponding to
8768 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8769 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8770 {
8771 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8772 tree cst_uchar_ptr_node
8773 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8774
8775 tree ind1
8776 = fold_convert_loc (loc, integer_type_node,
8777 build1 (INDIRECT_REF, cst_uchar_node,
8778 fold_convert_loc (loc,
8779 cst_uchar_ptr_node,
8780 arg1)));
8781 tree ind2
8782 = fold_convert_loc (loc, integer_type_node,
8783 build1 (INDIRECT_REF, cst_uchar_node,
8784 fold_convert_loc (loc,
8785 cst_uchar_ptr_node,
8786 arg2)));
8787 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8788 }
8789
8790 return NULL_TREE;
8791 }
8792
8793 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8794 Return NULL_TREE if no simplification can be made. */
8795
8796 static tree
8797 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8798 {
8799 const char *p1, *p2;
8800
8801 if (!validate_arg (arg1, POINTER_TYPE)
8802 || !validate_arg (arg2, POINTER_TYPE))
8803 return NULL_TREE;
8804
8805 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8806 if (operand_equal_p (arg1, arg2, 0))
8807 return integer_zero_node;
8808
8809 p1 = c_getstr (arg1);
8810 p2 = c_getstr (arg2);
8811
8812 if (p1 && p2)
8813 {
8814 const int i = strcmp (p1, p2);
8815 if (i < 0)
8816 return integer_minus_one_node;
8817 else if (i > 0)
8818 return integer_one_node;
8819 else
8820 return integer_zero_node;
8821 }
8822
8823 /* If the second arg is "", return *(const unsigned char*)arg1. */
8824 if (p2 && *p2 == '\0')
8825 {
8826 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8827 tree cst_uchar_ptr_node
8828 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8829
8830 return fold_convert_loc (loc, integer_type_node,
8831 build1 (INDIRECT_REF, cst_uchar_node,
8832 fold_convert_loc (loc,
8833 cst_uchar_ptr_node,
8834 arg1)));
8835 }
8836
8837 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8838 if (p1 && *p1 == '\0')
8839 {
8840 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8841 tree cst_uchar_ptr_node
8842 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8843
8844 tree temp
8845 = fold_convert_loc (loc, integer_type_node,
8846 build1 (INDIRECT_REF, cst_uchar_node,
8847 fold_convert_loc (loc,
8848 cst_uchar_ptr_node,
8849 arg2)));
8850 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8851 }
8852
8853 return NULL_TREE;
8854 }
8855
8856 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8857 Return NULL_TREE if no simplification can be made. */
8858
8859 static tree
8860 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8861 {
8862 const char *p1, *p2;
8863
8864 if (!validate_arg (arg1, POINTER_TYPE)
8865 || !validate_arg (arg2, POINTER_TYPE)
8866 || !validate_arg (len, INTEGER_TYPE))
8867 return NULL_TREE;
8868
8869 /* If the LEN parameter is zero, return zero. */
8870 if (integer_zerop (len))
8871 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8872 arg1, arg2);
8873
8874 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8875 if (operand_equal_p (arg1, arg2, 0))
8876 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8877
8878 p1 = c_getstr (arg1);
8879 p2 = c_getstr (arg2);
8880
8881 if (tree_fits_uhwi_p (len) && p1 && p2)
8882 {
8883 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8884 if (i > 0)
8885 return integer_one_node;
8886 else if (i < 0)
8887 return integer_minus_one_node;
8888 else
8889 return integer_zero_node;
8890 }
8891
8892 /* If the second arg is "", and the length is greater than zero,
8893 return *(const unsigned char*)arg1. */
8894 if (p2 && *p2 == '\0'
8895 && TREE_CODE (len) == INTEGER_CST
8896 && tree_int_cst_sgn (len) == 1)
8897 {
8898 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8899 tree cst_uchar_ptr_node
8900 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8901
8902 return fold_convert_loc (loc, integer_type_node,
8903 build1 (INDIRECT_REF, cst_uchar_node,
8904 fold_convert_loc (loc,
8905 cst_uchar_ptr_node,
8906 arg1)));
8907 }
8908
8909 /* If the first arg is "", and the length is greater than zero,
8910 return -*(const unsigned char*)arg2. */
8911 if (p1 && *p1 == '\0'
8912 && TREE_CODE (len) == INTEGER_CST
8913 && tree_int_cst_sgn (len) == 1)
8914 {
8915 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8916 tree cst_uchar_ptr_node
8917 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8918
8919 tree temp = fold_convert_loc (loc, integer_type_node,
8920 build1 (INDIRECT_REF, cst_uchar_node,
8921 fold_convert_loc (loc,
8922 cst_uchar_ptr_node,
8923 arg2)));
8924 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8925 }
8926
8927 /* If len parameter is one, return an expression corresponding to
8928 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8929 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8930 {
8931 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8932 tree cst_uchar_ptr_node
8933 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8934
8935 tree ind1 = fold_convert_loc (loc, integer_type_node,
8936 build1 (INDIRECT_REF, cst_uchar_node,
8937 fold_convert_loc (loc,
8938 cst_uchar_ptr_node,
8939 arg1)));
8940 tree ind2 = fold_convert_loc (loc, integer_type_node,
8941 build1 (INDIRECT_REF, cst_uchar_node,
8942 fold_convert_loc (loc,
8943 cst_uchar_ptr_node,
8944 arg2)));
8945 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8946 }
8947
8948 return NULL_TREE;
8949 }
8950
8951 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8952 ARG. Return NULL_TREE if no simplification can be made. */
8953
8954 static tree
8955 fold_builtin_signbit (location_t loc, tree arg, tree type)
8956 {
8957 if (!validate_arg (arg, REAL_TYPE))
8958 return NULL_TREE;
8959
8960 /* If ARG is a compile-time constant, determine the result. */
8961 if (TREE_CODE (arg) == REAL_CST
8962 && !TREE_OVERFLOW (arg))
8963 {
8964 REAL_VALUE_TYPE c;
8965
8966 c = TREE_REAL_CST (arg);
8967 return (REAL_VALUE_NEGATIVE (c)
8968 ? build_one_cst (type)
8969 : build_zero_cst (type));
8970 }
8971
8972 /* If ARG is non-negative, the result is always zero. */
8973 if (tree_expr_nonnegative_p (arg))
8974 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8975
8976 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8977 if (!HONOR_SIGNED_ZEROS (arg))
8978 return fold_convert (type,
8979 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8980 build_real (TREE_TYPE (arg), dconst0)));
8981
8982 return NULL_TREE;
8983 }
8984
8985 /* Fold function call to builtin copysign, copysignf or copysignl with
8986 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8987 be made. */
8988
8989 static tree
8990 fold_builtin_copysign (location_t loc, tree fndecl,
8991 tree arg1, tree arg2, tree type)
8992 {
8993 tree tem;
8994
8995 if (!validate_arg (arg1, REAL_TYPE)
8996 || !validate_arg (arg2, REAL_TYPE))
8997 return NULL_TREE;
8998
8999 /* copysign(X,X) is X. */
9000 if (operand_equal_p (arg1, arg2, 0))
9001 return fold_convert_loc (loc, type, arg1);
9002
9003 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9004 if (TREE_CODE (arg1) == REAL_CST
9005 && TREE_CODE (arg2) == REAL_CST
9006 && !TREE_OVERFLOW (arg1)
9007 && !TREE_OVERFLOW (arg2))
9008 {
9009 REAL_VALUE_TYPE c1, c2;
9010
9011 c1 = TREE_REAL_CST (arg1);
9012 c2 = TREE_REAL_CST (arg2);
9013 /* c1.sign := c2.sign. */
9014 real_copysign (&c1, &c2);
9015 return build_real (type, c1);
9016 }
9017
9018 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9019 Remember to evaluate Y for side-effects. */
9020 if (tree_expr_nonnegative_p (arg2))
9021 return omit_one_operand_loc (loc, type,
9022 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9023 arg2);
9024
9025 /* Strip sign changing operations for the first argument. */
9026 tem = fold_strip_sign_ops (arg1);
9027 if (tem)
9028 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9029
9030 return NULL_TREE;
9031 }
9032
9033 /* Fold a call to builtin isascii with argument ARG. */
9034
9035 static tree
9036 fold_builtin_isascii (location_t loc, tree arg)
9037 {
9038 if (!validate_arg (arg, INTEGER_TYPE))
9039 return NULL_TREE;
9040 else
9041 {
9042 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9043 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9044 build_int_cst (integer_type_node,
9045 ~ (unsigned HOST_WIDE_INT) 0x7f));
9046 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9047 arg, integer_zero_node);
9048 }
9049 }
9050
9051 /* Fold a call to builtin toascii with argument ARG. */
9052
9053 static tree
9054 fold_builtin_toascii (location_t loc, tree arg)
9055 {
9056 if (!validate_arg (arg, INTEGER_TYPE))
9057 return NULL_TREE;
9058
9059 /* Transform toascii(c) -> (c & 0x7f). */
9060 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9061 build_int_cst (integer_type_node, 0x7f));
9062 }
9063
9064 /* Fold a call to builtin isdigit with argument ARG. */
9065
9066 static tree
9067 fold_builtin_isdigit (location_t loc, tree arg)
9068 {
9069 if (!validate_arg (arg, INTEGER_TYPE))
9070 return NULL_TREE;
9071 else
9072 {
9073 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9074 /* According to the C standard, isdigit is unaffected by locale.
9075 However, it definitely is affected by the target character set. */
9076 unsigned HOST_WIDE_INT target_digit0
9077 = lang_hooks.to_target_charset ('0');
9078
9079 if (target_digit0 == 0)
9080 return NULL_TREE;
9081
9082 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9083 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9084 build_int_cst (unsigned_type_node, target_digit0));
9085 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9086 build_int_cst (unsigned_type_node, 9));
9087 }
9088 }
9089
9090 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9091
9092 static tree
9093 fold_builtin_fabs (location_t loc, tree arg, tree type)
9094 {
9095 if (!validate_arg (arg, REAL_TYPE))
9096 return NULL_TREE;
9097
9098 arg = fold_convert_loc (loc, type, arg);
9099 if (TREE_CODE (arg) == REAL_CST)
9100 return fold_abs_const (arg, type);
9101 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9102 }
9103
9104 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9105
9106 static tree
9107 fold_builtin_abs (location_t loc, tree arg, tree type)
9108 {
9109 if (!validate_arg (arg, INTEGER_TYPE))
9110 return NULL_TREE;
9111
9112 arg = fold_convert_loc (loc, type, arg);
9113 if (TREE_CODE (arg) == INTEGER_CST)
9114 return fold_abs_const (arg, type);
9115 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9116 }
9117
9118 /* Fold a fma operation with arguments ARG[012]. */
9119
9120 tree
9121 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9122 tree type, tree arg0, tree arg1, tree arg2)
9123 {
9124 if (TREE_CODE (arg0) == REAL_CST
9125 && TREE_CODE (arg1) == REAL_CST
9126 && TREE_CODE (arg2) == REAL_CST)
9127 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9128
9129 return NULL_TREE;
9130 }
9131
9132 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9133
9134 static tree
9135 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9136 {
9137 if (validate_arg (arg0, REAL_TYPE)
9138 && validate_arg (arg1, REAL_TYPE)
9139 && validate_arg (arg2, REAL_TYPE))
9140 {
9141 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9142 if (tem)
9143 return tem;
9144
9145 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9146 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9147 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9148 }
9149 return NULL_TREE;
9150 }
9151
9152 /* Fold a call to builtin fmin or fmax. */
9153
9154 static tree
9155 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9156 tree type, bool max)
9157 {
9158 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9159 {
9160 /* Calculate the result when the argument is a constant. */
9161 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9162
9163 if (res)
9164 return res;
9165
9166 /* If either argument is NaN, return the other one. Avoid the
9167 transformation if we get (and honor) a signalling NaN. Using
9168 omit_one_operand() ensures we create a non-lvalue. */
9169 if (TREE_CODE (arg0) == REAL_CST
9170 && real_isnan (&TREE_REAL_CST (arg0))
9171 && (! HONOR_SNANS (arg0)
9172 || ! TREE_REAL_CST (arg0).signalling))
9173 return omit_one_operand_loc (loc, type, arg1, arg0);
9174 if (TREE_CODE (arg1) == REAL_CST
9175 && real_isnan (&TREE_REAL_CST (arg1))
9176 && (! HONOR_SNANS (arg1)
9177 || ! TREE_REAL_CST (arg1).signalling))
9178 return omit_one_operand_loc (loc, type, arg0, arg1);
9179
9180 /* Transform fmin/fmax(x,x) -> x. */
9181 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9182 return omit_one_operand_loc (loc, type, arg0, arg1);
9183
9184 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9185 functions to return the numeric arg if the other one is NaN.
9186 These tree codes don't honor that, so only transform if
9187 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9188 handled, so we don't have to worry about it either. */
9189 if (flag_finite_math_only)
9190 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9191 fold_convert_loc (loc, type, arg0),
9192 fold_convert_loc (loc, type, arg1));
9193 }
9194 return NULL_TREE;
9195 }
9196
9197 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9198
9199 static tree
9200 fold_builtin_carg (location_t loc, tree arg, tree type)
9201 {
9202 if (validate_arg (arg, COMPLEX_TYPE)
9203 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9204 {
9205 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9206
9207 if (atan2_fn)
9208 {
9209 tree new_arg = builtin_save_expr (arg);
9210 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9211 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9212 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9213 }
9214 }
9215
9216 return NULL_TREE;
9217 }
9218
9219 /* Fold a call to builtin logb/ilogb. */
9220
9221 static tree
9222 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9223 {
9224 if (! validate_arg (arg, REAL_TYPE))
9225 return NULL_TREE;
9226
9227 STRIP_NOPS (arg);
9228
9229 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9230 {
9231 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9232
9233 switch (value->cl)
9234 {
9235 case rvc_nan:
9236 case rvc_inf:
9237 /* If arg is Inf or NaN and we're logb, return it. */
9238 if (TREE_CODE (rettype) == REAL_TYPE)
9239 {
9240 /* For logb(-Inf) we have to return +Inf. */
9241 if (real_isinf (value) && real_isneg (value))
9242 {
9243 REAL_VALUE_TYPE tem;
9244 real_inf (&tem);
9245 return build_real (rettype, tem);
9246 }
9247 return fold_convert_loc (loc, rettype, arg);
9248 }
9249 /* Fall through... */
9250 case rvc_zero:
9251 /* Zero may set errno and/or raise an exception for logb, also
9252 for ilogb we don't know FP_ILOGB0. */
9253 return NULL_TREE;
9254 case rvc_normal:
9255 /* For normal numbers, proceed iff radix == 2. In GCC,
9256 normalized significands are in the range [0.5, 1.0). We
9257 want the exponent as if they were [1.0, 2.0) so get the
9258 exponent and subtract 1. */
9259 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9260 return fold_convert_loc (loc, rettype,
9261 build_int_cst (integer_type_node,
9262 REAL_EXP (value)-1));
9263 break;
9264 }
9265 }
9266
9267 return NULL_TREE;
9268 }
9269
9270 /* Fold a call to builtin significand, if radix == 2. */
9271
9272 static tree
9273 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9274 {
9275 if (! validate_arg (arg, REAL_TYPE))
9276 return NULL_TREE;
9277
9278 STRIP_NOPS (arg);
9279
9280 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9281 {
9282 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9283
9284 switch (value->cl)
9285 {
9286 case rvc_zero:
9287 case rvc_nan:
9288 case rvc_inf:
9289 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9290 return fold_convert_loc (loc, rettype, arg);
9291 case rvc_normal:
9292 /* For normal numbers, proceed iff radix == 2. */
9293 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9294 {
9295 REAL_VALUE_TYPE result = *value;
9296 /* In GCC, normalized significands are in the range [0.5,
9297 1.0). We want them to be [1.0, 2.0) so set the
9298 exponent to 1. */
9299 SET_REAL_EXP (&result, 1);
9300 return build_real (rettype, result);
9301 }
9302 break;
9303 }
9304 }
9305
9306 return NULL_TREE;
9307 }
9308
9309 /* Fold a call to builtin frexp, we can assume the base is 2. */
9310
9311 static tree
9312 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9313 {
9314 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9315 return NULL_TREE;
9316
9317 STRIP_NOPS (arg0);
9318
9319 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9320 return NULL_TREE;
9321
9322 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9323
9324 /* Proceed if a valid pointer type was passed in. */
9325 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9326 {
9327 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9328 tree frac, exp;
9329
9330 switch (value->cl)
9331 {
9332 case rvc_zero:
9333 /* For +-0, return (*exp = 0, +-0). */
9334 exp = integer_zero_node;
9335 frac = arg0;
9336 break;
9337 case rvc_nan:
9338 case rvc_inf:
9339 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9340 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9341 case rvc_normal:
9342 {
9343 /* Since the frexp function always expects base 2, and in
9344 GCC normalized significands are already in the range
9345 [0.5, 1.0), we have exactly what frexp wants. */
9346 REAL_VALUE_TYPE frac_rvt = *value;
9347 SET_REAL_EXP (&frac_rvt, 0);
9348 frac = build_real (rettype, frac_rvt);
9349 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9350 }
9351 break;
9352 default:
9353 gcc_unreachable ();
9354 }
9355
9356 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9357 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9358 TREE_SIDE_EFFECTS (arg1) = 1;
9359 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9360 }
9361
9362 return NULL_TREE;
9363 }
9364
9365 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9366 then we can assume the base is two. If it's false, then we have to
9367 check the mode of the TYPE parameter in certain cases. */
9368
9369 static tree
9370 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9371 tree type, bool ldexp)
9372 {
9373 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9374 {
9375 STRIP_NOPS (arg0);
9376 STRIP_NOPS (arg1);
9377
9378 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9379 if (real_zerop (arg0) || integer_zerop (arg1)
9380 || (TREE_CODE (arg0) == REAL_CST
9381 && !real_isfinite (&TREE_REAL_CST (arg0))))
9382 return omit_one_operand_loc (loc, type, arg0, arg1);
9383
9384 /* If both arguments are constant, then try to evaluate it. */
9385 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9386 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9387 && tree_fits_shwi_p (arg1))
9388 {
9389 /* Bound the maximum adjustment to twice the range of the
9390 mode's valid exponents. Use abs to ensure the range is
9391 positive as a sanity check. */
9392 const long max_exp_adj = 2 *
9393 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9394 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9395
9396 /* Get the user-requested adjustment. */
9397 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9398
9399 /* The requested adjustment must be inside this range. This
9400 is a preliminary cap to avoid things like overflow, we
9401 may still fail to compute the result for other reasons. */
9402 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9403 {
9404 REAL_VALUE_TYPE initial_result;
9405
9406 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9407
9408 /* Ensure we didn't overflow. */
9409 if (! real_isinf (&initial_result))
9410 {
9411 const REAL_VALUE_TYPE trunc_result
9412 = real_value_truncate (TYPE_MODE (type), initial_result);
9413
9414 /* Only proceed if the target mode can hold the
9415 resulting value. */
9416 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9417 return build_real (type, trunc_result);
9418 }
9419 }
9420 }
9421 }
9422
9423 return NULL_TREE;
9424 }
9425
9426 /* Fold a call to builtin modf. */
9427
9428 static tree
9429 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9430 {
9431 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9432 return NULL_TREE;
9433
9434 STRIP_NOPS (arg0);
9435
9436 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9437 return NULL_TREE;
9438
9439 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9440
9441 /* Proceed if a valid pointer type was passed in. */
9442 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9443 {
9444 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9445 REAL_VALUE_TYPE trunc, frac;
9446
9447 switch (value->cl)
9448 {
9449 case rvc_nan:
9450 case rvc_zero:
9451 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9452 trunc = frac = *value;
9453 break;
9454 case rvc_inf:
9455 /* For +-Inf, return (*arg1 = arg0, +-0). */
9456 frac = dconst0;
9457 frac.sign = value->sign;
9458 trunc = *value;
9459 break;
9460 case rvc_normal:
9461 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9462 real_trunc (&trunc, VOIDmode, value);
9463 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9464 /* If the original number was negative and already
9465 integral, then the fractional part is -0.0. */
9466 if (value->sign && frac.cl == rvc_zero)
9467 frac.sign = value->sign;
9468 break;
9469 }
9470
9471 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9472 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9473 build_real (rettype, trunc));
9474 TREE_SIDE_EFFECTS (arg1) = 1;
9475 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9476 build_real (rettype, frac));
9477 }
9478
9479 return NULL_TREE;
9480 }
9481
9482 /* Given a location LOC, an interclass builtin function decl FNDECL
9483 and its single argument ARG, return an folded expression computing
9484 the same, or NULL_TREE if we either couldn't or didn't want to fold
9485 (the latter happen if there's an RTL instruction available). */
9486
9487 static tree
9488 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9489 {
9490 machine_mode mode;
9491
9492 if (!validate_arg (arg, REAL_TYPE))
9493 return NULL_TREE;
9494
9495 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9496 return NULL_TREE;
9497
9498 mode = TYPE_MODE (TREE_TYPE (arg));
9499
9500 /* If there is no optab, try generic code. */
9501 switch (DECL_FUNCTION_CODE (fndecl))
9502 {
9503 tree result;
9504
9505 CASE_FLT_FN (BUILT_IN_ISINF):
9506 {
9507 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9508 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9509 tree const type = TREE_TYPE (arg);
9510 REAL_VALUE_TYPE r;
9511 char buf[128];
9512
9513 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9514 real_from_string (&r, buf);
9515 result = build_call_expr (isgr_fn, 2,
9516 fold_build1_loc (loc, ABS_EXPR, type, arg),
9517 build_real (type, r));
9518 return result;
9519 }
9520 CASE_FLT_FN (BUILT_IN_FINITE):
9521 case BUILT_IN_ISFINITE:
9522 {
9523 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9524 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9525 tree const type = TREE_TYPE (arg);
9526 REAL_VALUE_TYPE r;
9527 char buf[128];
9528
9529 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9530 real_from_string (&r, buf);
9531 result = build_call_expr (isle_fn, 2,
9532 fold_build1_loc (loc, ABS_EXPR, type, arg),
9533 build_real (type, r));
9534 /*result = fold_build2_loc (loc, UNGT_EXPR,
9535 TREE_TYPE (TREE_TYPE (fndecl)),
9536 fold_build1_loc (loc, ABS_EXPR, type, arg),
9537 build_real (type, r));
9538 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9539 TREE_TYPE (TREE_TYPE (fndecl)),
9540 result);*/
9541 return result;
9542 }
9543 case BUILT_IN_ISNORMAL:
9544 {
9545 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9546 islessequal(fabs(x),DBL_MAX). */
9547 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9548 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9549 tree const type = TREE_TYPE (arg);
9550 REAL_VALUE_TYPE rmax, rmin;
9551 char buf[128];
9552
9553 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9554 real_from_string (&rmax, buf);
9555 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9556 real_from_string (&rmin, buf);
9557 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9558 result = build_call_expr (isle_fn, 2, arg,
9559 build_real (type, rmax));
9560 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9561 build_call_expr (isge_fn, 2, arg,
9562 build_real (type, rmin)));
9563 return result;
9564 }
9565 default:
9566 break;
9567 }
9568
9569 return NULL_TREE;
9570 }
9571
9572 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9573 ARG is the argument for the call. */
9574
9575 static tree
9576 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9577 {
9578 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9579 REAL_VALUE_TYPE r;
9580
9581 if (!validate_arg (arg, REAL_TYPE))
9582 return NULL_TREE;
9583
9584 switch (builtin_index)
9585 {
9586 case BUILT_IN_ISINF:
9587 if (!HONOR_INFINITIES (arg))
9588 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9589
9590 if (TREE_CODE (arg) == REAL_CST)
9591 {
9592 r = TREE_REAL_CST (arg);
9593 if (real_isinf (&r))
9594 return real_compare (GT_EXPR, &r, &dconst0)
9595 ? integer_one_node : integer_minus_one_node;
9596 else
9597 return integer_zero_node;
9598 }
9599
9600 return NULL_TREE;
9601
9602 case BUILT_IN_ISINF_SIGN:
9603 {
9604 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9605 /* In a boolean context, GCC will fold the inner COND_EXPR to
9606 1. So e.g. "if (isinf_sign(x))" would be folded to just
9607 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9608 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9609 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9610 tree tmp = NULL_TREE;
9611
9612 arg = builtin_save_expr (arg);
9613
9614 if (signbit_fn && isinf_fn)
9615 {
9616 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9617 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9618
9619 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9620 signbit_call, integer_zero_node);
9621 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9622 isinf_call, integer_zero_node);
9623
9624 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9625 integer_minus_one_node, integer_one_node);
9626 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9627 isinf_call, tmp,
9628 integer_zero_node);
9629 }
9630
9631 return tmp;
9632 }
9633
9634 case BUILT_IN_ISFINITE:
9635 if (!HONOR_NANS (arg)
9636 && !HONOR_INFINITIES (arg))
9637 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9638
9639 if (TREE_CODE (arg) == REAL_CST)
9640 {
9641 r = TREE_REAL_CST (arg);
9642 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9643 }
9644
9645 return NULL_TREE;
9646
9647 case BUILT_IN_ISNAN:
9648 if (!HONOR_NANS (arg))
9649 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9650
9651 if (TREE_CODE (arg) == REAL_CST)
9652 {
9653 r = TREE_REAL_CST (arg);
9654 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9655 }
9656
9657 arg = builtin_save_expr (arg);
9658 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9659
9660 default:
9661 gcc_unreachable ();
9662 }
9663 }
9664
9665 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9666 This builtin will generate code to return the appropriate floating
9667 point classification depending on the value of the floating point
9668 number passed in. The possible return values must be supplied as
9669 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9670 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9671 one floating point argument which is "type generic". */
9672
9673 static tree
9674 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9675 {
9676 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9677 arg, type, res, tmp;
9678 machine_mode mode;
9679 REAL_VALUE_TYPE r;
9680 char buf[128];
9681
9682 /* Verify the required arguments in the original call. */
9683 if (nargs != 6
9684 || !validate_arg (args[0], INTEGER_TYPE)
9685 || !validate_arg (args[1], INTEGER_TYPE)
9686 || !validate_arg (args[2], INTEGER_TYPE)
9687 || !validate_arg (args[3], INTEGER_TYPE)
9688 || !validate_arg (args[4], INTEGER_TYPE)
9689 || !validate_arg (args[5], REAL_TYPE))
9690 return NULL_TREE;
9691
9692 fp_nan = args[0];
9693 fp_infinite = args[1];
9694 fp_normal = args[2];
9695 fp_subnormal = args[3];
9696 fp_zero = args[4];
9697 arg = args[5];
9698 type = TREE_TYPE (arg);
9699 mode = TYPE_MODE (type);
9700 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9701
9702 /* fpclassify(x) ->
9703 isnan(x) ? FP_NAN :
9704 (fabs(x) == Inf ? FP_INFINITE :
9705 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9706 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9707
9708 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9709 build_real (type, dconst0));
9710 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9711 tmp, fp_zero, fp_subnormal);
9712
9713 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9714 real_from_string (&r, buf);
9715 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9716 arg, build_real (type, r));
9717 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9718
9719 if (HONOR_INFINITIES (mode))
9720 {
9721 real_inf (&r);
9722 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9723 build_real (type, r));
9724 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9725 fp_infinite, res);
9726 }
9727
9728 if (HONOR_NANS (mode))
9729 {
9730 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9731 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9732 }
9733
9734 return res;
9735 }
9736
9737 /* Fold a call to an unordered comparison function such as
9738 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9739 being called and ARG0 and ARG1 are the arguments for the call.
9740 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9741 the opposite of the desired result. UNORDERED_CODE is used
9742 for modes that can hold NaNs and ORDERED_CODE is used for
9743 the rest. */
9744
9745 static tree
9746 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9747 enum tree_code unordered_code,
9748 enum tree_code ordered_code)
9749 {
9750 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9751 enum tree_code code;
9752 tree type0, type1;
9753 enum tree_code code0, code1;
9754 tree cmp_type = NULL_TREE;
9755
9756 type0 = TREE_TYPE (arg0);
9757 type1 = TREE_TYPE (arg1);
9758
9759 code0 = TREE_CODE (type0);
9760 code1 = TREE_CODE (type1);
9761
9762 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9763 /* Choose the wider of two real types. */
9764 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9765 ? type0 : type1;
9766 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9767 cmp_type = type0;
9768 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9769 cmp_type = type1;
9770
9771 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9772 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9773
9774 if (unordered_code == UNORDERED_EXPR)
9775 {
9776 if (!HONOR_NANS (arg0))
9777 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9778 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9779 }
9780
9781 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9782 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9783 fold_build2_loc (loc, code, type, arg0, arg1));
9784 }
9785
9786 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9787 arithmetics if it can never overflow, or into internal functions that
9788 return both result of arithmetics and overflowed boolean flag in
9789 a complex integer result, or some other check for overflow. */
9790
9791 static tree
9792 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9793 tree arg0, tree arg1, tree arg2)
9794 {
9795 enum internal_fn ifn = IFN_LAST;
9796 tree type = TREE_TYPE (TREE_TYPE (arg2));
9797 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9798 switch (fcode)
9799 {
9800 case BUILT_IN_ADD_OVERFLOW:
9801 case BUILT_IN_SADD_OVERFLOW:
9802 case BUILT_IN_SADDL_OVERFLOW:
9803 case BUILT_IN_SADDLL_OVERFLOW:
9804 case BUILT_IN_UADD_OVERFLOW:
9805 case BUILT_IN_UADDL_OVERFLOW:
9806 case BUILT_IN_UADDLL_OVERFLOW:
9807 ifn = IFN_ADD_OVERFLOW;
9808 break;
9809 case BUILT_IN_SUB_OVERFLOW:
9810 case BUILT_IN_SSUB_OVERFLOW:
9811 case BUILT_IN_SSUBL_OVERFLOW:
9812 case BUILT_IN_SSUBLL_OVERFLOW:
9813 case BUILT_IN_USUB_OVERFLOW:
9814 case BUILT_IN_USUBL_OVERFLOW:
9815 case BUILT_IN_USUBLL_OVERFLOW:
9816 ifn = IFN_SUB_OVERFLOW;
9817 break;
9818 case BUILT_IN_MUL_OVERFLOW:
9819 case BUILT_IN_SMUL_OVERFLOW:
9820 case BUILT_IN_SMULL_OVERFLOW:
9821 case BUILT_IN_SMULLL_OVERFLOW:
9822 case BUILT_IN_UMUL_OVERFLOW:
9823 case BUILT_IN_UMULL_OVERFLOW:
9824 case BUILT_IN_UMULLL_OVERFLOW:
9825 ifn = IFN_MUL_OVERFLOW;
9826 break;
9827 default:
9828 gcc_unreachable ();
9829 }
9830 tree ctype = build_complex_type (type);
9831 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9832 2, arg0, arg1);
9833 tree tgt = save_expr (call);
9834 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9835 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9836 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9837 tree store
9838 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9839 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9840 }
9841
9842 /* Fold a call to built-in function FNDECL with 0 arguments.
9843 This function returns NULL_TREE if no simplification was possible. */
9844
9845 static tree
9846 fold_builtin_0 (location_t loc, tree fndecl)
9847 {
9848 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9849 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9850 switch (fcode)
9851 {
9852 CASE_FLT_FN (BUILT_IN_INF):
9853 case BUILT_IN_INFD32:
9854 case BUILT_IN_INFD64:
9855 case BUILT_IN_INFD128:
9856 return fold_builtin_inf (loc, type, true);
9857
9858 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9859 return fold_builtin_inf (loc, type, false);
9860
9861 case BUILT_IN_CLASSIFY_TYPE:
9862 return fold_builtin_classify_type (NULL_TREE);
9863
9864 default:
9865 break;
9866 }
9867 return NULL_TREE;
9868 }
9869
9870 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9871 This function returns NULL_TREE if no simplification was possible. */
9872
9873 static tree
9874 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9875 {
9876 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9877 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9878 switch (fcode)
9879 {
9880 case BUILT_IN_CONSTANT_P:
9881 {
9882 tree val = fold_builtin_constant_p (arg0);
9883
9884 /* Gimplification will pull the CALL_EXPR for the builtin out of
9885 an if condition. When not optimizing, we'll not CSE it back.
9886 To avoid link error types of regressions, return false now. */
9887 if (!val && !optimize)
9888 val = integer_zero_node;
9889
9890 return val;
9891 }
9892
9893 case BUILT_IN_CLASSIFY_TYPE:
9894 return fold_builtin_classify_type (arg0);
9895
9896 case BUILT_IN_STRLEN:
9897 return fold_builtin_strlen (loc, type, arg0);
9898
9899 CASE_FLT_FN (BUILT_IN_FABS):
9900 case BUILT_IN_FABSD32:
9901 case BUILT_IN_FABSD64:
9902 case BUILT_IN_FABSD128:
9903 return fold_builtin_fabs (loc, arg0, type);
9904
9905 case BUILT_IN_ABS:
9906 case BUILT_IN_LABS:
9907 case BUILT_IN_LLABS:
9908 case BUILT_IN_IMAXABS:
9909 return fold_builtin_abs (loc, arg0, type);
9910
9911 CASE_FLT_FN (BUILT_IN_CONJ):
9912 if (validate_arg (arg0, COMPLEX_TYPE)
9913 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9914 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9915 break;
9916
9917 CASE_FLT_FN (BUILT_IN_CREAL):
9918 if (validate_arg (arg0, COMPLEX_TYPE)
9919 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9920 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9921 break;
9922
9923 CASE_FLT_FN (BUILT_IN_CIMAG):
9924 if (validate_arg (arg0, COMPLEX_TYPE)
9925 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9926 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9927 break;
9928
9929 CASE_FLT_FN (BUILT_IN_CCOS):
9930 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9931
9932 CASE_FLT_FN (BUILT_IN_CCOSH):
9933 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9934
9935 CASE_FLT_FN (BUILT_IN_CPROJ):
9936 return fold_builtin_cproj (loc, arg0, type);
9937
9938 CASE_FLT_FN (BUILT_IN_CSIN):
9939 if (validate_arg (arg0, COMPLEX_TYPE)
9940 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9941 return do_mpc_arg1 (arg0, type, mpc_sin);
9942 break;
9943
9944 CASE_FLT_FN (BUILT_IN_CSINH):
9945 if (validate_arg (arg0, COMPLEX_TYPE)
9946 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9947 return do_mpc_arg1 (arg0, type, mpc_sinh);
9948 break;
9949
9950 CASE_FLT_FN (BUILT_IN_CTAN):
9951 if (validate_arg (arg0, COMPLEX_TYPE)
9952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9953 return do_mpc_arg1 (arg0, type, mpc_tan);
9954 break;
9955
9956 CASE_FLT_FN (BUILT_IN_CTANH):
9957 if (validate_arg (arg0, COMPLEX_TYPE)
9958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9959 return do_mpc_arg1 (arg0, type, mpc_tanh);
9960 break;
9961
9962 CASE_FLT_FN (BUILT_IN_CLOG):
9963 if (validate_arg (arg0, COMPLEX_TYPE)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9965 return do_mpc_arg1 (arg0, type, mpc_log);
9966 break;
9967
9968 CASE_FLT_FN (BUILT_IN_CSQRT):
9969 if (validate_arg (arg0, COMPLEX_TYPE)
9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9971 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9972 break;
9973
9974 CASE_FLT_FN (BUILT_IN_CASIN):
9975 if (validate_arg (arg0, COMPLEX_TYPE)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9977 return do_mpc_arg1 (arg0, type, mpc_asin);
9978 break;
9979
9980 CASE_FLT_FN (BUILT_IN_CACOS):
9981 if (validate_arg (arg0, COMPLEX_TYPE)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9983 return do_mpc_arg1 (arg0, type, mpc_acos);
9984 break;
9985
9986 CASE_FLT_FN (BUILT_IN_CATAN):
9987 if (validate_arg (arg0, COMPLEX_TYPE)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9989 return do_mpc_arg1 (arg0, type, mpc_atan);
9990 break;
9991
9992 CASE_FLT_FN (BUILT_IN_CASINH):
9993 if (validate_arg (arg0, COMPLEX_TYPE)
9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9995 return do_mpc_arg1 (arg0, type, mpc_asinh);
9996 break;
9997
9998 CASE_FLT_FN (BUILT_IN_CACOSH):
9999 if (validate_arg (arg0, COMPLEX_TYPE)
10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10001 return do_mpc_arg1 (arg0, type, mpc_acosh);
10002 break;
10003
10004 CASE_FLT_FN (BUILT_IN_CATANH):
10005 if (validate_arg (arg0, COMPLEX_TYPE)
10006 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10007 return do_mpc_arg1 (arg0, type, mpc_atanh);
10008 break;
10009
10010 CASE_FLT_FN (BUILT_IN_CABS):
10011 return fold_builtin_cabs (loc, arg0, type, fndecl);
10012
10013 CASE_FLT_FN (BUILT_IN_CARG):
10014 return fold_builtin_carg (loc, arg0, type);
10015
10016 CASE_FLT_FN (BUILT_IN_SQRT):
10017 return fold_builtin_sqrt (loc, arg0, type);
10018
10019 CASE_FLT_FN (BUILT_IN_CBRT):
10020 return fold_builtin_cbrt (loc, arg0, type);
10021
10022 CASE_FLT_FN (BUILT_IN_ASIN):
10023 if (validate_arg (arg0, REAL_TYPE))
10024 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10025 &dconstm1, &dconst1, true);
10026 break;
10027
10028 CASE_FLT_FN (BUILT_IN_ACOS):
10029 if (validate_arg (arg0, REAL_TYPE))
10030 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10031 &dconstm1, &dconst1, true);
10032 break;
10033
10034 CASE_FLT_FN (BUILT_IN_ATAN):
10035 if (validate_arg (arg0, REAL_TYPE))
10036 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10037 break;
10038
10039 CASE_FLT_FN (BUILT_IN_ASINH):
10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10042 break;
10043
10044 CASE_FLT_FN (BUILT_IN_ACOSH):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10047 &dconst1, NULL, true);
10048 break;
10049
10050 CASE_FLT_FN (BUILT_IN_ATANH):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10053 &dconstm1, &dconst1, false);
10054 break;
10055
10056 CASE_FLT_FN (BUILT_IN_SIN):
10057 if (validate_arg (arg0, REAL_TYPE))
10058 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10059 break;
10060
10061 CASE_FLT_FN (BUILT_IN_COS):
10062 return fold_builtin_cos (loc, arg0, type, fndecl);
10063
10064 CASE_FLT_FN (BUILT_IN_TAN):
10065 return fold_builtin_tan (arg0, type);
10066
10067 CASE_FLT_FN (BUILT_IN_CEXP):
10068 return fold_builtin_cexp (loc, arg0, type);
10069
10070 CASE_FLT_FN (BUILT_IN_CEXPI):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10073 break;
10074
10075 CASE_FLT_FN (BUILT_IN_SINH):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10078 break;
10079
10080 CASE_FLT_FN (BUILT_IN_COSH):
10081 return fold_builtin_cosh (loc, arg0, type, fndecl);
10082
10083 CASE_FLT_FN (BUILT_IN_TANH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10086 break;
10087
10088 CASE_FLT_FN (BUILT_IN_ERF):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_ERFC):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10096 break;
10097
10098 CASE_FLT_FN (BUILT_IN_TGAMMA):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10101 break;
10102
10103 CASE_FLT_FN (BUILT_IN_EXP):
10104 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10105
10106 CASE_FLT_FN (BUILT_IN_EXP2):
10107 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10108
10109 CASE_FLT_FN (BUILT_IN_EXP10):
10110 CASE_FLT_FN (BUILT_IN_POW10):
10111 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10112
10113 CASE_FLT_FN (BUILT_IN_EXPM1):
10114 if (validate_arg (arg0, REAL_TYPE))
10115 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10116 break;
10117
10118 CASE_FLT_FN (BUILT_IN_LOG):
10119 if (validate_arg (arg0, REAL_TYPE))
10120 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10121 break;
10122
10123 CASE_FLT_FN (BUILT_IN_LOG2):
10124 if (validate_arg (arg0, REAL_TYPE))
10125 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10126 break;
10127
10128 CASE_FLT_FN (BUILT_IN_LOG10):
10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10131 break;
10132
10133 CASE_FLT_FN (BUILT_IN_LOG1P):
10134 if (validate_arg (arg0, REAL_TYPE))
10135 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10136 &dconstm1, NULL, false);
10137 break;
10138
10139 CASE_FLT_FN (BUILT_IN_J0):
10140 if (validate_arg (arg0, REAL_TYPE))
10141 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10142 NULL, NULL, 0);
10143 break;
10144
10145 CASE_FLT_FN (BUILT_IN_J1):
10146 if (validate_arg (arg0, REAL_TYPE))
10147 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10148 NULL, NULL, 0);
10149 break;
10150
10151 CASE_FLT_FN (BUILT_IN_Y0):
10152 if (validate_arg (arg0, REAL_TYPE))
10153 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10154 &dconst0, NULL, false);
10155 break;
10156
10157 CASE_FLT_FN (BUILT_IN_Y1):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10160 &dconst0, NULL, false);
10161 break;
10162
10163 CASE_FLT_FN (BUILT_IN_NAN):
10164 case BUILT_IN_NAND32:
10165 case BUILT_IN_NAND64:
10166 case BUILT_IN_NAND128:
10167 return fold_builtin_nan (arg0, type, true);
10168
10169 CASE_FLT_FN (BUILT_IN_NANS):
10170 return fold_builtin_nan (arg0, type, false);
10171
10172 CASE_FLT_FN (BUILT_IN_FLOOR):
10173 return fold_builtin_floor (loc, fndecl, arg0);
10174
10175 CASE_FLT_FN (BUILT_IN_CEIL):
10176 return fold_builtin_ceil (loc, fndecl, arg0);
10177
10178 CASE_FLT_FN (BUILT_IN_TRUNC):
10179 return fold_builtin_trunc (loc, fndecl, arg0);
10180
10181 CASE_FLT_FN (BUILT_IN_ROUND):
10182 return fold_builtin_round (loc, fndecl, arg0);
10183
10184 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10185 CASE_FLT_FN (BUILT_IN_RINT):
10186 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10187
10188 CASE_FLT_FN (BUILT_IN_ICEIL):
10189 CASE_FLT_FN (BUILT_IN_LCEIL):
10190 CASE_FLT_FN (BUILT_IN_LLCEIL):
10191 CASE_FLT_FN (BUILT_IN_LFLOOR):
10192 CASE_FLT_FN (BUILT_IN_IFLOOR):
10193 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10194 CASE_FLT_FN (BUILT_IN_IROUND):
10195 CASE_FLT_FN (BUILT_IN_LROUND):
10196 CASE_FLT_FN (BUILT_IN_LLROUND):
10197 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10198
10199 CASE_FLT_FN (BUILT_IN_IRINT):
10200 CASE_FLT_FN (BUILT_IN_LRINT):
10201 CASE_FLT_FN (BUILT_IN_LLRINT):
10202 return fold_fixed_mathfn (loc, fndecl, arg0);
10203
10204 case BUILT_IN_BSWAP16:
10205 case BUILT_IN_BSWAP32:
10206 case BUILT_IN_BSWAP64:
10207 return fold_builtin_bswap (fndecl, arg0);
10208
10209 CASE_INT_FN (BUILT_IN_FFS):
10210 CASE_INT_FN (BUILT_IN_CLZ):
10211 CASE_INT_FN (BUILT_IN_CTZ):
10212 CASE_INT_FN (BUILT_IN_CLRSB):
10213 CASE_INT_FN (BUILT_IN_POPCOUNT):
10214 CASE_INT_FN (BUILT_IN_PARITY):
10215 return fold_builtin_bitop (fndecl, arg0);
10216
10217 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10218 return fold_builtin_signbit (loc, arg0, type);
10219
10220 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10221 return fold_builtin_significand (loc, arg0, type);
10222
10223 CASE_FLT_FN (BUILT_IN_ILOGB):
10224 CASE_FLT_FN (BUILT_IN_LOGB):
10225 return fold_builtin_logb (loc, arg0, type);
10226
10227 case BUILT_IN_ISASCII:
10228 return fold_builtin_isascii (loc, arg0);
10229
10230 case BUILT_IN_TOASCII:
10231 return fold_builtin_toascii (loc, arg0);
10232
10233 case BUILT_IN_ISDIGIT:
10234 return fold_builtin_isdigit (loc, arg0);
10235
10236 CASE_FLT_FN (BUILT_IN_FINITE):
10237 case BUILT_IN_FINITED32:
10238 case BUILT_IN_FINITED64:
10239 case BUILT_IN_FINITED128:
10240 case BUILT_IN_ISFINITE:
10241 {
10242 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10243 if (ret)
10244 return ret;
10245 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10246 }
10247
10248 CASE_FLT_FN (BUILT_IN_ISINF):
10249 case BUILT_IN_ISINFD32:
10250 case BUILT_IN_ISINFD64:
10251 case BUILT_IN_ISINFD128:
10252 {
10253 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10254 if (ret)
10255 return ret;
10256 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10257 }
10258
10259 case BUILT_IN_ISNORMAL:
10260 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10261
10262 case BUILT_IN_ISINF_SIGN:
10263 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10264
10265 CASE_FLT_FN (BUILT_IN_ISNAN):
10266 case BUILT_IN_ISNAND32:
10267 case BUILT_IN_ISNAND64:
10268 case BUILT_IN_ISNAND128:
10269 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10270
10271 case BUILT_IN_FREE:
10272 if (integer_zerop (arg0))
10273 return build_empty_stmt (loc);
10274 break;
10275
10276 default:
10277 break;
10278 }
10279
10280 return NULL_TREE;
10281
10282 }
10283
10284 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10285 This function returns NULL_TREE if no simplification was possible. */
10286
10287 static tree
10288 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10289 {
10290 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10291 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10292
10293 switch (fcode)
10294 {
10295 CASE_FLT_FN (BUILT_IN_JN):
10296 if (validate_arg (arg0, INTEGER_TYPE)
10297 && validate_arg (arg1, REAL_TYPE))
10298 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10299 break;
10300
10301 CASE_FLT_FN (BUILT_IN_YN):
10302 if (validate_arg (arg0, INTEGER_TYPE)
10303 && validate_arg (arg1, REAL_TYPE))
10304 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10305 &dconst0, false);
10306 break;
10307
10308 CASE_FLT_FN (BUILT_IN_DREM):
10309 CASE_FLT_FN (BUILT_IN_REMAINDER):
10310 if (validate_arg (arg0, REAL_TYPE)
10311 && validate_arg (arg1, REAL_TYPE))
10312 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10313 break;
10314
10315 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10316 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10317 if (validate_arg (arg0, REAL_TYPE)
10318 && validate_arg (arg1, POINTER_TYPE))
10319 return do_mpfr_lgamma_r (arg0, arg1, type);
10320 break;
10321
10322 CASE_FLT_FN (BUILT_IN_ATAN2):
10323 if (validate_arg (arg0, REAL_TYPE)
10324 && validate_arg (arg1, REAL_TYPE))
10325 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10326 break;
10327
10328 CASE_FLT_FN (BUILT_IN_FDIM):
10329 if (validate_arg (arg0, REAL_TYPE)
10330 && validate_arg (arg1, REAL_TYPE))
10331 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10332 break;
10333
10334 CASE_FLT_FN (BUILT_IN_HYPOT):
10335 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10336
10337 CASE_FLT_FN (BUILT_IN_CPOW):
10338 if (validate_arg (arg0, COMPLEX_TYPE)
10339 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10340 && validate_arg (arg1, COMPLEX_TYPE)
10341 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10342 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10343 break;
10344
10345 CASE_FLT_FN (BUILT_IN_LDEXP):
10346 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10347 CASE_FLT_FN (BUILT_IN_SCALBN):
10348 CASE_FLT_FN (BUILT_IN_SCALBLN):
10349 return fold_builtin_load_exponent (loc, arg0, arg1,
10350 type, /*ldexp=*/false);
10351
10352 CASE_FLT_FN (BUILT_IN_FREXP):
10353 return fold_builtin_frexp (loc, arg0, arg1, type);
10354
10355 CASE_FLT_FN (BUILT_IN_MODF):
10356 return fold_builtin_modf (loc, arg0, arg1, type);
10357
10358 case BUILT_IN_STRSTR:
10359 return fold_builtin_strstr (loc, arg0, arg1, type);
10360
10361 case BUILT_IN_STRSPN:
10362 return fold_builtin_strspn (loc, arg0, arg1);
10363
10364 case BUILT_IN_STRCSPN:
10365 return fold_builtin_strcspn (loc, arg0, arg1);
10366
10367 case BUILT_IN_STRCHR:
10368 case BUILT_IN_INDEX:
10369 return fold_builtin_strchr (loc, arg0, arg1, type);
10370
10371 case BUILT_IN_STRRCHR:
10372 case BUILT_IN_RINDEX:
10373 return fold_builtin_strrchr (loc, arg0, arg1, type);
10374
10375 case BUILT_IN_STRCMP:
10376 return fold_builtin_strcmp (loc, arg0, arg1);
10377
10378 case BUILT_IN_STRPBRK:
10379 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10380
10381 case BUILT_IN_EXPECT:
10382 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10383
10384 CASE_FLT_FN (BUILT_IN_POW):
10385 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10386
10387 CASE_FLT_FN (BUILT_IN_POWI):
10388 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10389
10390 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10391 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10392
10393 CASE_FLT_FN (BUILT_IN_FMIN):
10394 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10395
10396 CASE_FLT_FN (BUILT_IN_FMAX):
10397 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10398
10399 case BUILT_IN_ISGREATER:
10400 return fold_builtin_unordered_cmp (loc, fndecl,
10401 arg0, arg1, UNLE_EXPR, LE_EXPR);
10402 case BUILT_IN_ISGREATEREQUAL:
10403 return fold_builtin_unordered_cmp (loc, fndecl,
10404 arg0, arg1, UNLT_EXPR, LT_EXPR);
10405 case BUILT_IN_ISLESS:
10406 return fold_builtin_unordered_cmp (loc, fndecl,
10407 arg0, arg1, UNGE_EXPR, GE_EXPR);
10408 case BUILT_IN_ISLESSEQUAL:
10409 return fold_builtin_unordered_cmp (loc, fndecl,
10410 arg0, arg1, UNGT_EXPR, GT_EXPR);
10411 case BUILT_IN_ISLESSGREATER:
10412 return fold_builtin_unordered_cmp (loc, fndecl,
10413 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10414 case BUILT_IN_ISUNORDERED:
10415 return fold_builtin_unordered_cmp (loc, fndecl,
10416 arg0, arg1, UNORDERED_EXPR,
10417 NOP_EXPR);
10418
10419 /* We do the folding for va_start in the expander. */
10420 case BUILT_IN_VA_START:
10421 break;
10422
10423 case BUILT_IN_OBJECT_SIZE:
10424 return fold_builtin_object_size (arg0, arg1);
10425
10426 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10427 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10428
10429 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10430 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10431
10432 default:
10433 break;
10434 }
10435 return NULL_TREE;
10436 }
10437
10438 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10439 and ARG2.
10440 This function returns NULL_TREE if no simplification was possible. */
10441
10442 static tree
10443 fold_builtin_3 (location_t loc, tree fndecl,
10444 tree arg0, tree arg1, tree arg2)
10445 {
10446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10447 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10448 switch (fcode)
10449 {
10450
10451 CASE_FLT_FN (BUILT_IN_SINCOS):
10452 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10453
10454 CASE_FLT_FN (BUILT_IN_FMA):
10455 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10456 break;
10457
10458 CASE_FLT_FN (BUILT_IN_REMQUO):
10459 if (validate_arg (arg0, REAL_TYPE)
10460 && validate_arg (arg1, REAL_TYPE)
10461 && validate_arg (arg2, POINTER_TYPE))
10462 return do_mpfr_remquo (arg0, arg1, arg2);
10463 break;
10464
10465 case BUILT_IN_STRNCMP:
10466 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10467
10468 case BUILT_IN_MEMCHR:
10469 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10470
10471 case BUILT_IN_BCMP:
10472 case BUILT_IN_MEMCMP:
10473 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10474
10475 case BUILT_IN_EXPECT:
10476 return fold_builtin_expect (loc, arg0, arg1, arg2);
10477
10478 case BUILT_IN_ADD_OVERFLOW:
10479 case BUILT_IN_SUB_OVERFLOW:
10480 case BUILT_IN_MUL_OVERFLOW:
10481 case BUILT_IN_SADD_OVERFLOW:
10482 case BUILT_IN_SADDL_OVERFLOW:
10483 case BUILT_IN_SADDLL_OVERFLOW:
10484 case BUILT_IN_SSUB_OVERFLOW:
10485 case BUILT_IN_SSUBL_OVERFLOW:
10486 case BUILT_IN_SSUBLL_OVERFLOW:
10487 case BUILT_IN_SMUL_OVERFLOW:
10488 case BUILT_IN_SMULL_OVERFLOW:
10489 case BUILT_IN_SMULLL_OVERFLOW:
10490 case BUILT_IN_UADD_OVERFLOW:
10491 case BUILT_IN_UADDL_OVERFLOW:
10492 case BUILT_IN_UADDLL_OVERFLOW:
10493 case BUILT_IN_USUB_OVERFLOW:
10494 case BUILT_IN_USUBL_OVERFLOW:
10495 case BUILT_IN_USUBLL_OVERFLOW:
10496 case BUILT_IN_UMUL_OVERFLOW:
10497 case BUILT_IN_UMULL_OVERFLOW:
10498 case BUILT_IN_UMULLL_OVERFLOW:
10499 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10500
10501 default:
10502 break;
10503 }
10504 return NULL_TREE;
10505 }
10506
10507 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10508 arguments. IGNORE is true if the result of the
10509 function call is ignored. This function returns NULL_TREE if no
10510 simplification was possible. */
10511
10512 tree
10513 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10514 {
10515 tree ret = NULL_TREE;
10516
10517 switch (nargs)
10518 {
10519 case 0:
10520 ret = fold_builtin_0 (loc, fndecl);
10521 break;
10522 case 1:
10523 ret = fold_builtin_1 (loc, fndecl, args[0]);
10524 break;
10525 case 2:
10526 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10527 break;
10528 case 3:
10529 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10530 break;
10531 default:
10532 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10533 break;
10534 }
10535 if (ret)
10536 {
10537 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10538 SET_EXPR_LOCATION (ret, loc);
10539 TREE_NO_WARNING (ret) = 1;
10540 return ret;
10541 }
10542 return NULL_TREE;
10543 }
10544
10545 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10546 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10547 of arguments in ARGS to be omitted. OLDNARGS is the number of
10548 elements in ARGS. */
10549
10550 static tree
10551 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10552 int skip, tree fndecl, int n, va_list newargs)
10553 {
10554 int nargs = oldnargs - skip + n;
10555 tree *buffer;
10556
10557 if (n > 0)
10558 {
10559 int i, j;
10560
10561 buffer = XALLOCAVEC (tree, nargs);
10562 for (i = 0; i < n; i++)
10563 buffer[i] = va_arg (newargs, tree);
10564 for (j = skip; j < oldnargs; j++, i++)
10565 buffer[i] = args[j];
10566 }
10567 else
10568 buffer = args + skip;
10569
10570 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10571 }
10572
10573 /* Return true if FNDECL shouldn't be folded right now.
10574 If a built-in function has an inline attribute always_inline
10575 wrapper, defer folding it after always_inline functions have
10576 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10577 might not be performed. */
10578
10579 bool
10580 avoid_folding_inline_builtin (tree fndecl)
10581 {
10582 return (DECL_DECLARED_INLINE_P (fndecl)
10583 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10584 && cfun
10585 && !cfun->always_inline_functions_inlined
10586 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10587 }
10588
10589 /* A wrapper function for builtin folding that prevents warnings for
10590 "statement without effect" and the like, caused by removing the
10591 call node earlier than the warning is generated. */
10592
10593 tree
10594 fold_call_expr (location_t loc, tree exp, bool ignore)
10595 {
10596 tree ret = NULL_TREE;
10597 tree fndecl = get_callee_fndecl (exp);
10598 if (fndecl
10599 && TREE_CODE (fndecl) == FUNCTION_DECL
10600 && DECL_BUILT_IN (fndecl)
10601 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10602 yet. Defer folding until we see all the arguments
10603 (after inlining). */
10604 && !CALL_EXPR_VA_ARG_PACK (exp))
10605 {
10606 int nargs = call_expr_nargs (exp);
10607
10608 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10609 instead last argument is __builtin_va_arg_pack (). Defer folding
10610 even in that case, until arguments are finalized. */
10611 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10612 {
10613 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10614 if (fndecl2
10615 && TREE_CODE (fndecl2) == FUNCTION_DECL
10616 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10617 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10618 return NULL_TREE;
10619 }
10620
10621 if (avoid_folding_inline_builtin (fndecl))
10622 return NULL_TREE;
10623
10624 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10625 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10626 CALL_EXPR_ARGP (exp), ignore);
10627 else
10628 {
10629 tree *args = CALL_EXPR_ARGP (exp);
10630 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10631 if (ret)
10632 return ret;
10633 }
10634 }
10635 return NULL_TREE;
10636 }
10637
10638 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10639 N arguments are passed in the array ARGARRAY. Return a folded
10640 expression or NULL_TREE if no simplification was possible. */
10641
10642 tree
10643 fold_builtin_call_array (location_t loc, tree,
10644 tree fn,
10645 int n,
10646 tree *argarray)
10647 {
10648 if (TREE_CODE (fn) != ADDR_EXPR)
10649 return NULL_TREE;
10650
10651 tree fndecl = TREE_OPERAND (fn, 0);
10652 if (TREE_CODE (fndecl) == FUNCTION_DECL
10653 && DECL_BUILT_IN (fndecl))
10654 {
10655 /* If last argument is __builtin_va_arg_pack (), arguments to this
10656 function are not finalized yet. Defer folding until they are. */
10657 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10658 {
10659 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10660 if (fndecl2
10661 && TREE_CODE (fndecl2) == FUNCTION_DECL
10662 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10663 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10664 return NULL_TREE;
10665 }
10666 if (avoid_folding_inline_builtin (fndecl))
10667 return NULL_TREE;
10668 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10669 return targetm.fold_builtin (fndecl, n, argarray, false);
10670 else
10671 return fold_builtin_n (loc, fndecl, argarray, n, false);
10672 }
10673
10674 return NULL_TREE;
10675 }
10676
10677 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10678 along with N new arguments specified as the "..." parameters. SKIP
10679 is the number of arguments in EXP to be omitted. This function is used
10680 to do varargs-to-varargs transformations. */
10681
10682 static tree
10683 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10684 {
10685 va_list ap;
10686 tree t;
10687
10688 va_start (ap, n);
10689 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10690 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10691 va_end (ap);
10692
10693 return t;
10694 }
10695
10696 /* Validate a single argument ARG against a tree code CODE representing
10697 a type. */
10698
10699 static bool
10700 validate_arg (const_tree arg, enum tree_code code)
10701 {
10702 if (!arg)
10703 return false;
10704 else if (code == POINTER_TYPE)
10705 return POINTER_TYPE_P (TREE_TYPE (arg));
10706 else if (code == INTEGER_TYPE)
10707 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10708 return code == TREE_CODE (TREE_TYPE (arg));
10709 }
10710
10711 /* This function validates the types of a function call argument list
10712 against a specified list of tree_codes. If the last specifier is a 0,
10713 that represents an ellipses, otherwise the last specifier must be a
10714 VOID_TYPE.
10715
10716 This is the GIMPLE version of validate_arglist. Eventually we want to
10717 completely convert builtins.c to work from GIMPLEs and the tree based
10718 validate_arglist will then be removed. */
10719
10720 bool
10721 validate_gimple_arglist (const gcall *call, ...)
10722 {
10723 enum tree_code code;
10724 bool res = 0;
10725 va_list ap;
10726 const_tree arg;
10727 size_t i;
10728
10729 va_start (ap, call);
10730 i = 0;
10731
10732 do
10733 {
10734 code = (enum tree_code) va_arg (ap, int);
10735 switch (code)
10736 {
10737 case 0:
10738 /* This signifies an ellipses, any further arguments are all ok. */
10739 res = true;
10740 goto end;
10741 case VOID_TYPE:
10742 /* This signifies an endlink, if no arguments remain, return
10743 true, otherwise return false. */
10744 res = (i == gimple_call_num_args (call));
10745 goto end;
10746 default:
10747 /* If no parameters remain or the parameter's code does not
10748 match the specified code, return false. Otherwise continue
10749 checking any remaining arguments. */
10750 arg = gimple_call_arg (call, i++);
10751 if (!validate_arg (arg, code))
10752 goto end;
10753 break;
10754 }
10755 }
10756 while (1);
10757
10758 /* We need gotos here since we can only have one VA_CLOSE in a
10759 function. */
10760 end: ;
10761 va_end (ap);
10762
10763 return res;
10764 }
10765
10766 /* Default target-specific builtin expander that does nothing. */
10767
10768 rtx
10769 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10770 rtx target ATTRIBUTE_UNUSED,
10771 rtx subtarget ATTRIBUTE_UNUSED,
10772 machine_mode mode ATTRIBUTE_UNUSED,
10773 int ignore ATTRIBUTE_UNUSED)
10774 {
10775 return NULL_RTX;
10776 }
10777
10778 /* Returns true is EXP represents data that would potentially reside
10779 in a readonly section. */
10780
10781 bool
10782 readonly_data_expr (tree exp)
10783 {
10784 STRIP_NOPS (exp);
10785
10786 if (TREE_CODE (exp) != ADDR_EXPR)
10787 return false;
10788
10789 exp = get_base_address (TREE_OPERAND (exp, 0));
10790 if (!exp)
10791 return false;
10792
10793 /* Make sure we call decl_readonly_section only for trees it
10794 can handle (since it returns true for everything it doesn't
10795 understand). */
10796 if (TREE_CODE (exp) == STRING_CST
10797 || TREE_CODE (exp) == CONSTRUCTOR
10798 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10799 return decl_readonly_section (exp, 0);
10800 else
10801 return false;
10802 }
10803
10804 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10805 to the call, and TYPE is its return type.
10806
10807 Return NULL_TREE if no simplification was possible, otherwise return the
10808 simplified form of the call as a tree.
10809
10810 The simplified form may be a constant or other expression which
10811 computes the same value, but in a more efficient manner (including
10812 calls to other builtin functions).
10813
10814 The call may contain arguments which need to be evaluated, but
10815 which are not useful to determine the result of the call. In
10816 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10817 COMPOUND_EXPR will be an argument which must be evaluated.
10818 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10819 COMPOUND_EXPR in the chain will contain the tree for the simplified
10820 form of the builtin function call. */
10821
10822 static tree
10823 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10824 {
10825 if (!validate_arg (s1, POINTER_TYPE)
10826 || !validate_arg (s2, POINTER_TYPE))
10827 return NULL_TREE;
10828 else
10829 {
10830 tree fn;
10831 const char *p1, *p2;
10832
10833 p2 = c_getstr (s2);
10834 if (p2 == NULL)
10835 return NULL_TREE;
10836
10837 p1 = c_getstr (s1);
10838 if (p1 != NULL)
10839 {
10840 const char *r = strstr (p1, p2);
10841 tree tem;
10842
10843 if (r == NULL)
10844 return build_int_cst (TREE_TYPE (s1), 0);
10845
10846 /* Return an offset into the constant string argument. */
10847 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10848 return fold_convert_loc (loc, type, tem);
10849 }
10850
10851 /* The argument is const char *, and the result is char *, so we need
10852 a type conversion here to avoid a warning. */
10853 if (p2[0] == '\0')
10854 return fold_convert_loc (loc, type, s1);
10855
10856 if (p2[1] != '\0')
10857 return NULL_TREE;
10858
10859 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10860 if (!fn)
10861 return NULL_TREE;
10862
10863 /* New argument list transforming strstr(s1, s2) to
10864 strchr(s1, s2[0]). */
10865 return build_call_expr_loc (loc, fn, 2, s1,
10866 build_int_cst (integer_type_node, p2[0]));
10867 }
10868 }
10869
10870 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10871 the call, and TYPE is its return type.
10872
10873 Return NULL_TREE if no simplification was possible, otherwise return the
10874 simplified form of the call as a tree.
10875
10876 The simplified form may be a constant or other expression which
10877 computes the same value, but in a more efficient manner (including
10878 calls to other builtin functions).
10879
10880 The call may contain arguments which need to be evaluated, but
10881 which are not useful to determine the result of the call. In
10882 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10883 COMPOUND_EXPR will be an argument which must be evaluated.
10884 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10885 COMPOUND_EXPR in the chain will contain the tree for the simplified
10886 form of the builtin function call. */
10887
10888 static tree
10889 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10890 {
10891 if (!validate_arg (s1, POINTER_TYPE)
10892 || !validate_arg (s2, INTEGER_TYPE))
10893 return NULL_TREE;
10894 else
10895 {
10896 const char *p1;
10897
10898 if (TREE_CODE (s2) != INTEGER_CST)
10899 return NULL_TREE;
10900
10901 p1 = c_getstr (s1);
10902 if (p1 != NULL)
10903 {
10904 char c;
10905 const char *r;
10906 tree tem;
10907
10908 if (target_char_cast (s2, &c))
10909 return NULL_TREE;
10910
10911 r = strchr (p1, c);
10912
10913 if (r == NULL)
10914 return build_int_cst (TREE_TYPE (s1), 0);
10915
10916 /* Return an offset into the constant string argument. */
10917 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10918 return fold_convert_loc (loc, type, tem);
10919 }
10920 return NULL_TREE;
10921 }
10922 }
10923
10924 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10925 the call, and TYPE is its return type.
10926
10927 Return NULL_TREE if no simplification was possible, otherwise return the
10928 simplified form of the call as a tree.
10929
10930 The simplified form may be a constant or other expression which
10931 computes the same value, but in a more efficient manner (including
10932 calls to other builtin functions).
10933
10934 The call may contain arguments which need to be evaluated, but
10935 which are not useful to determine the result of the call. In
10936 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10937 COMPOUND_EXPR will be an argument which must be evaluated.
10938 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10939 COMPOUND_EXPR in the chain will contain the tree for the simplified
10940 form of the builtin function call. */
10941
10942 static tree
10943 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10944 {
10945 if (!validate_arg (s1, POINTER_TYPE)
10946 || !validate_arg (s2, INTEGER_TYPE))
10947 return NULL_TREE;
10948 else
10949 {
10950 tree fn;
10951 const char *p1;
10952
10953 if (TREE_CODE (s2) != INTEGER_CST)
10954 return NULL_TREE;
10955
10956 p1 = c_getstr (s1);
10957 if (p1 != NULL)
10958 {
10959 char c;
10960 const char *r;
10961 tree tem;
10962
10963 if (target_char_cast (s2, &c))
10964 return NULL_TREE;
10965
10966 r = strrchr (p1, c);
10967
10968 if (r == NULL)
10969 return build_int_cst (TREE_TYPE (s1), 0);
10970
10971 /* Return an offset into the constant string argument. */
10972 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10973 return fold_convert_loc (loc, type, tem);
10974 }
10975
10976 if (! integer_zerop (s2))
10977 return NULL_TREE;
10978
10979 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10980 if (!fn)
10981 return NULL_TREE;
10982
10983 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10984 return build_call_expr_loc (loc, fn, 2, s1, s2);
10985 }
10986 }
10987
10988 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10989 to the call, and TYPE is its return type.
10990
10991 Return NULL_TREE if no simplification was possible, otherwise return the
10992 simplified form of the call as a tree.
10993
10994 The simplified form may be a constant or other expression which
10995 computes the same value, but in a more efficient manner (including
10996 calls to other builtin functions).
10997
10998 The call may contain arguments which need to be evaluated, but
10999 which are not useful to determine the result of the call. In
11000 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11001 COMPOUND_EXPR will be an argument which must be evaluated.
11002 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11003 COMPOUND_EXPR in the chain will contain the tree for the simplified
11004 form of the builtin function call. */
11005
11006 static tree
11007 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11008 {
11009 if (!validate_arg (s1, POINTER_TYPE)
11010 || !validate_arg (s2, POINTER_TYPE))
11011 return NULL_TREE;
11012 else
11013 {
11014 tree fn;
11015 const char *p1, *p2;
11016
11017 p2 = c_getstr (s2);
11018 if (p2 == NULL)
11019 return NULL_TREE;
11020
11021 p1 = c_getstr (s1);
11022 if (p1 != NULL)
11023 {
11024 const char *r = strpbrk (p1, p2);
11025 tree tem;
11026
11027 if (r == NULL)
11028 return build_int_cst (TREE_TYPE (s1), 0);
11029
11030 /* Return an offset into the constant string argument. */
11031 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11032 return fold_convert_loc (loc, type, tem);
11033 }
11034
11035 if (p2[0] == '\0')
11036 /* strpbrk(x, "") == NULL.
11037 Evaluate and ignore s1 in case it had side-effects. */
11038 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11039
11040 if (p2[1] != '\0')
11041 return NULL_TREE; /* Really call strpbrk. */
11042
11043 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11044 if (!fn)
11045 return NULL_TREE;
11046
11047 /* New argument list transforming strpbrk(s1, s2) to
11048 strchr(s1, s2[0]). */
11049 return build_call_expr_loc (loc, fn, 2, s1,
11050 build_int_cst (integer_type_node, p2[0]));
11051 }
11052 }
11053
11054 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11055 to the call.
11056
11057 Return NULL_TREE if no simplification was possible, otherwise return the
11058 simplified form of the call as a tree.
11059
11060 The simplified form may be a constant or other expression which
11061 computes the same value, but in a more efficient manner (including
11062 calls to other builtin functions).
11063
11064 The call may contain arguments which need to be evaluated, but
11065 which are not useful to determine the result of the call. In
11066 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11067 COMPOUND_EXPR will be an argument which must be evaluated.
11068 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11069 COMPOUND_EXPR in the chain will contain the tree for the simplified
11070 form of the builtin function call. */
11071
11072 static tree
11073 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11074 {
11075 if (!validate_arg (s1, POINTER_TYPE)
11076 || !validate_arg (s2, POINTER_TYPE))
11077 return NULL_TREE;
11078 else
11079 {
11080 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11081
11082 /* If both arguments are constants, evaluate at compile-time. */
11083 if (p1 && p2)
11084 {
11085 const size_t r = strspn (p1, p2);
11086 return build_int_cst (size_type_node, r);
11087 }
11088
11089 /* If either argument is "", return NULL_TREE. */
11090 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11091 /* Evaluate and ignore both arguments in case either one has
11092 side-effects. */
11093 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11094 s1, s2);
11095 return NULL_TREE;
11096 }
11097 }
11098
11099 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11100 to the call.
11101
11102 Return NULL_TREE if no simplification was possible, otherwise return the
11103 simplified form of the call as a tree.
11104
11105 The simplified form may be a constant or other expression which
11106 computes the same value, but in a more efficient manner (including
11107 calls to other builtin functions).
11108
11109 The call may contain arguments which need to be evaluated, but
11110 which are not useful to determine the result of the call. In
11111 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11112 COMPOUND_EXPR will be an argument which must be evaluated.
11113 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11114 COMPOUND_EXPR in the chain will contain the tree for the simplified
11115 form of the builtin function call. */
11116
11117 static tree
11118 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11119 {
11120 if (!validate_arg (s1, POINTER_TYPE)
11121 || !validate_arg (s2, POINTER_TYPE))
11122 return NULL_TREE;
11123 else
11124 {
11125 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11126
11127 /* If both arguments are constants, evaluate at compile-time. */
11128 if (p1 && p2)
11129 {
11130 const size_t r = strcspn (p1, p2);
11131 return build_int_cst (size_type_node, r);
11132 }
11133
11134 /* If the first argument is "", return NULL_TREE. */
11135 if (p1 && *p1 == '\0')
11136 {
11137 /* Evaluate and ignore argument s2 in case it has
11138 side-effects. */
11139 return omit_one_operand_loc (loc, size_type_node,
11140 size_zero_node, s2);
11141 }
11142
11143 /* If the second argument is "", return __builtin_strlen(s1). */
11144 if (p2 && *p2 == '\0')
11145 {
11146 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11147
11148 /* If the replacement _DECL isn't initialized, don't do the
11149 transformation. */
11150 if (!fn)
11151 return NULL_TREE;
11152
11153 return build_call_expr_loc (loc, fn, 1, s1);
11154 }
11155 return NULL_TREE;
11156 }
11157 }
11158
11159 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11160 produced. False otherwise. This is done so that we don't output the error
11161 or warning twice or three times. */
11162
11163 bool
11164 fold_builtin_next_arg (tree exp, bool va_start_p)
11165 {
11166 tree fntype = TREE_TYPE (current_function_decl);
11167 int nargs = call_expr_nargs (exp);
11168 tree arg;
11169 /* There is good chance the current input_location points inside the
11170 definition of the va_start macro (perhaps on the token for
11171 builtin) in a system header, so warnings will not be emitted.
11172 Use the location in real source code. */
11173 source_location current_location =
11174 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11175 NULL);
11176
11177 if (!stdarg_p (fntype))
11178 {
11179 error ("%<va_start%> used in function with fixed args");
11180 return true;
11181 }
11182
11183 if (va_start_p)
11184 {
11185 if (va_start_p && (nargs != 2))
11186 {
11187 error ("wrong number of arguments to function %<va_start%>");
11188 return true;
11189 }
11190 arg = CALL_EXPR_ARG (exp, 1);
11191 }
11192 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11193 when we checked the arguments and if needed issued a warning. */
11194 else
11195 {
11196 if (nargs == 0)
11197 {
11198 /* Evidently an out of date version of <stdarg.h>; can't validate
11199 va_start's second argument, but can still work as intended. */
11200 warning_at (current_location,
11201 OPT_Wvarargs,
11202 "%<__builtin_next_arg%> called without an argument");
11203 return true;
11204 }
11205 else if (nargs > 1)
11206 {
11207 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11208 return true;
11209 }
11210 arg = CALL_EXPR_ARG (exp, 0);
11211 }
11212
11213 if (TREE_CODE (arg) == SSA_NAME)
11214 arg = SSA_NAME_VAR (arg);
11215
11216 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11217 or __builtin_next_arg (0) the first time we see it, after checking
11218 the arguments and if needed issuing a warning. */
11219 if (!integer_zerop (arg))
11220 {
11221 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11222
11223 /* Strip off all nops for the sake of the comparison. This
11224 is not quite the same as STRIP_NOPS. It does more.
11225 We must also strip off INDIRECT_EXPR for C++ reference
11226 parameters. */
11227 while (CONVERT_EXPR_P (arg)
11228 || TREE_CODE (arg) == INDIRECT_REF)
11229 arg = TREE_OPERAND (arg, 0);
11230 if (arg != last_parm)
11231 {
11232 /* FIXME: Sometimes with the tree optimizers we can get the
11233 not the last argument even though the user used the last
11234 argument. We just warn and set the arg to be the last
11235 argument so that we will get wrong-code because of
11236 it. */
11237 warning_at (current_location,
11238 OPT_Wvarargs,
11239 "second parameter of %<va_start%> not last named argument");
11240 }
11241
11242 /* Undefined by C99 7.15.1.4p4 (va_start):
11243 "If the parameter parmN is declared with the register storage
11244 class, with a function or array type, or with a type that is
11245 not compatible with the type that results after application of
11246 the default argument promotions, the behavior is undefined."
11247 */
11248 else if (DECL_REGISTER (arg))
11249 {
11250 warning_at (current_location,
11251 OPT_Wvarargs,
11252 "undefined behaviour when second parameter of "
11253 "%<va_start%> is declared with %<register%> storage");
11254 }
11255
11256 /* We want to verify the second parameter just once before the tree
11257 optimizers are run and then avoid keeping it in the tree,
11258 as otherwise we could warn even for correct code like:
11259 void foo (int i, ...)
11260 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11261 if (va_start_p)
11262 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11263 else
11264 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11265 }
11266 return false;
11267 }
11268
11269
11270 /* Expand a call EXP to __builtin_object_size. */
11271
11272 static rtx
11273 expand_builtin_object_size (tree exp)
11274 {
11275 tree ost;
11276 int object_size_type;
11277 tree fndecl = get_callee_fndecl (exp);
11278
11279 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11280 {
11281 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11282 exp, fndecl);
11283 expand_builtin_trap ();
11284 return const0_rtx;
11285 }
11286
11287 ost = CALL_EXPR_ARG (exp, 1);
11288 STRIP_NOPS (ost);
11289
11290 if (TREE_CODE (ost) != INTEGER_CST
11291 || tree_int_cst_sgn (ost) < 0
11292 || compare_tree_int (ost, 3) > 0)
11293 {
11294 error ("%Klast argument of %D is not integer constant between 0 and 3",
11295 exp, fndecl);
11296 expand_builtin_trap ();
11297 return const0_rtx;
11298 }
11299
11300 object_size_type = tree_to_shwi (ost);
11301
11302 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11303 }
11304
11305 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11306 FCODE is the BUILT_IN_* to use.
11307 Return NULL_RTX if we failed; the caller should emit a normal call,
11308 otherwise try to get the result in TARGET, if convenient (and in
11309 mode MODE if that's convenient). */
11310
11311 static rtx
11312 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11313 enum built_in_function fcode)
11314 {
11315 tree dest, src, len, size;
11316
11317 if (!validate_arglist (exp,
11318 POINTER_TYPE,
11319 fcode == BUILT_IN_MEMSET_CHK
11320 ? INTEGER_TYPE : POINTER_TYPE,
11321 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11322 return NULL_RTX;
11323
11324 dest = CALL_EXPR_ARG (exp, 0);
11325 src = CALL_EXPR_ARG (exp, 1);
11326 len = CALL_EXPR_ARG (exp, 2);
11327 size = CALL_EXPR_ARG (exp, 3);
11328
11329 if (! tree_fits_uhwi_p (size))
11330 return NULL_RTX;
11331
11332 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11333 {
11334 tree fn;
11335
11336 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11337 {
11338 warning_at (tree_nonartificial_location (exp),
11339 0, "%Kcall to %D will always overflow destination buffer",
11340 exp, get_callee_fndecl (exp));
11341 return NULL_RTX;
11342 }
11343
11344 fn = NULL_TREE;
11345 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11346 mem{cpy,pcpy,move,set} is available. */
11347 switch (fcode)
11348 {
11349 case BUILT_IN_MEMCPY_CHK:
11350 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11351 break;
11352 case BUILT_IN_MEMPCPY_CHK:
11353 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11354 break;
11355 case BUILT_IN_MEMMOVE_CHK:
11356 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11357 break;
11358 case BUILT_IN_MEMSET_CHK:
11359 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11360 break;
11361 default:
11362 break;
11363 }
11364
11365 if (! fn)
11366 return NULL_RTX;
11367
11368 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11369 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11370 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11371 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11372 }
11373 else if (fcode == BUILT_IN_MEMSET_CHK)
11374 return NULL_RTX;
11375 else
11376 {
11377 unsigned int dest_align = get_pointer_alignment (dest);
11378
11379 /* If DEST is not a pointer type, call the normal function. */
11380 if (dest_align == 0)
11381 return NULL_RTX;
11382
11383 /* If SRC and DEST are the same (and not volatile), do nothing. */
11384 if (operand_equal_p (src, dest, 0))
11385 {
11386 tree expr;
11387
11388 if (fcode != BUILT_IN_MEMPCPY_CHK)
11389 {
11390 /* Evaluate and ignore LEN in case it has side-effects. */
11391 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11392 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11393 }
11394
11395 expr = fold_build_pointer_plus (dest, len);
11396 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11397 }
11398
11399 /* __memmove_chk special case. */
11400 if (fcode == BUILT_IN_MEMMOVE_CHK)
11401 {
11402 unsigned int src_align = get_pointer_alignment (src);
11403
11404 if (src_align == 0)
11405 return NULL_RTX;
11406
11407 /* If src is categorized for a readonly section we can use
11408 normal __memcpy_chk. */
11409 if (readonly_data_expr (src))
11410 {
11411 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11412 if (!fn)
11413 return NULL_RTX;
11414 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11415 dest, src, len, size);
11416 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11417 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11418 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11419 }
11420 }
11421 return NULL_RTX;
11422 }
11423 }
11424
11425 /* Emit warning if a buffer overflow is detected at compile time. */
11426
11427 static void
11428 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11429 {
11430 int is_strlen = 0;
11431 tree len, size;
11432 location_t loc = tree_nonartificial_location (exp);
11433
11434 switch (fcode)
11435 {
11436 case BUILT_IN_STRCPY_CHK:
11437 case BUILT_IN_STPCPY_CHK:
11438 /* For __strcat_chk the warning will be emitted only if overflowing
11439 by at least strlen (dest) + 1 bytes. */
11440 case BUILT_IN_STRCAT_CHK:
11441 len = CALL_EXPR_ARG (exp, 1);
11442 size = CALL_EXPR_ARG (exp, 2);
11443 is_strlen = 1;
11444 break;
11445 case BUILT_IN_STRNCAT_CHK:
11446 case BUILT_IN_STRNCPY_CHK:
11447 case BUILT_IN_STPNCPY_CHK:
11448 len = CALL_EXPR_ARG (exp, 2);
11449 size = CALL_EXPR_ARG (exp, 3);
11450 break;
11451 case BUILT_IN_SNPRINTF_CHK:
11452 case BUILT_IN_VSNPRINTF_CHK:
11453 len = CALL_EXPR_ARG (exp, 1);
11454 size = CALL_EXPR_ARG (exp, 3);
11455 break;
11456 default:
11457 gcc_unreachable ();
11458 }
11459
11460 if (!len || !size)
11461 return;
11462
11463 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11464 return;
11465
11466 if (is_strlen)
11467 {
11468 len = c_strlen (len, 1);
11469 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11470 return;
11471 }
11472 else if (fcode == BUILT_IN_STRNCAT_CHK)
11473 {
11474 tree src = CALL_EXPR_ARG (exp, 1);
11475 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11476 return;
11477 src = c_strlen (src, 1);
11478 if (! src || ! tree_fits_uhwi_p (src))
11479 {
11480 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11481 exp, get_callee_fndecl (exp));
11482 return;
11483 }
11484 else if (tree_int_cst_lt (src, size))
11485 return;
11486 }
11487 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11488 return;
11489
11490 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11491 exp, get_callee_fndecl (exp));
11492 }
11493
11494 /* Emit warning if a buffer overflow is detected at compile time
11495 in __sprintf_chk/__vsprintf_chk calls. */
11496
11497 static void
11498 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11499 {
11500 tree size, len, fmt;
11501 const char *fmt_str;
11502 int nargs = call_expr_nargs (exp);
11503
11504 /* Verify the required arguments in the original call. */
11505
11506 if (nargs < 4)
11507 return;
11508 size = CALL_EXPR_ARG (exp, 2);
11509 fmt = CALL_EXPR_ARG (exp, 3);
11510
11511 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11512 return;
11513
11514 /* Check whether the format is a literal string constant. */
11515 fmt_str = c_getstr (fmt);
11516 if (fmt_str == NULL)
11517 return;
11518
11519 if (!init_target_chars ())
11520 return;
11521
11522 /* If the format doesn't contain % args or %%, we know its size. */
11523 if (strchr (fmt_str, target_percent) == 0)
11524 len = build_int_cstu (size_type_node, strlen (fmt_str));
11525 /* If the format is "%s" and first ... argument is a string literal,
11526 we know it too. */
11527 else if (fcode == BUILT_IN_SPRINTF_CHK
11528 && strcmp (fmt_str, target_percent_s) == 0)
11529 {
11530 tree arg;
11531
11532 if (nargs < 5)
11533 return;
11534 arg = CALL_EXPR_ARG (exp, 4);
11535 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11536 return;
11537
11538 len = c_strlen (arg, 1);
11539 if (!len || ! tree_fits_uhwi_p (len))
11540 return;
11541 }
11542 else
11543 return;
11544
11545 if (! tree_int_cst_lt (len, size))
11546 warning_at (tree_nonartificial_location (exp),
11547 0, "%Kcall to %D will always overflow destination buffer",
11548 exp, get_callee_fndecl (exp));
11549 }
11550
11551 /* Emit warning if a free is called with address of a variable. */
11552
11553 static void
11554 maybe_emit_free_warning (tree exp)
11555 {
11556 tree arg = CALL_EXPR_ARG (exp, 0);
11557
11558 STRIP_NOPS (arg);
11559 if (TREE_CODE (arg) != ADDR_EXPR)
11560 return;
11561
11562 arg = get_base_address (TREE_OPERAND (arg, 0));
11563 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11564 return;
11565
11566 if (SSA_VAR_P (arg))
11567 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11568 "%Kattempt to free a non-heap object %qD", exp, arg);
11569 else
11570 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11571 "%Kattempt to free a non-heap object", exp);
11572 }
11573
11574 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11575 if possible. */
11576
11577 static tree
11578 fold_builtin_object_size (tree ptr, tree ost)
11579 {
11580 unsigned HOST_WIDE_INT bytes;
11581 int object_size_type;
11582
11583 if (!validate_arg (ptr, POINTER_TYPE)
11584 || !validate_arg (ost, INTEGER_TYPE))
11585 return NULL_TREE;
11586
11587 STRIP_NOPS (ost);
11588
11589 if (TREE_CODE (ost) != INTEGER_CST
11590 || tree_int_cst_sgn (ost) < 0
11591 || compare_tree_int (ost, 3) > 0)
11592 return NULL_TREE;
11593
11594 object_size_type = tree_to_shwi (ost);
11595
11596 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11597 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11598 and (size_t) 0 for types 2 and 3. */
11599 if (TREE_SIDE_EFFECTS (ptr))
11600 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11601
11602 if (TREE_CODE (ptr) == ADDR_EXPR)
11603 {
11604 bytes = compute_builtin_object_size (ptr, object_size_type);
11605 if (wi::fits_to_tree_p (bytes, size_type_node))
11606 return build_int_cstu (size_type_node, bytes);
11607 }
11608 else if (TREE_CODE (ptr) == SSA_NAME)
11609 {
11610 /* If object size is not known yet, delay folding until
11611 later. Maybe subsequent passes will help determining
11612 it. */
11613 bytes = compute_builtin_object_size (ptr, object_size_type);
11614 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11615 && wi::fits_to_tree_p (bytes, size_type_node))
11616 return build_int_cstu (size_type_node, bytes);
11617 }
11618
11619 return NULL_TREE;
11620 }
11621
11622 /* Builtins with folding operations that operate on "..." arguments
11623 need special handling; we need to store the arguments in a convenient
11624 data structure before attempting any folding. Fortunately there are
11625 only a few builtins that fall into this category. FNDECL is the
11626 function, EXP is the CALL_EXPR for the call. */
11627
11628 static tree
11629 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11630 {
11631 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11632 tree ret = NULL_TREE;
11633
11634 switch (fcode)
11635 {
11636 case BUILT_IN_FPCLASSIFY:
11637 ret = fold_builtin_fpclassify (loc, args, nargs);
11638 break;
11639
11640 default:
11641 break;
11642 }
11643 if (ret)
11644 {
11645 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11646 SET_EXPR_LOCATION (ret, loc);
11647 TREE_NO_WARNING (ret) = 1;
11648 return ret;
11649 }
11650 return NULL_TREE;
11651 }
11652
11653 /* Initialize format string characters in the target charset. */
11654
11655 bool
11656 init_target_chars (void)
11657 {
11658 static bool init;
11659 if (!init)
11660 {
11661 target_newline = lang_hooks.to_target_charset ('\n');
11662 target_percent = lang_hooks.to_target_charset ('%');
11663 target_c = lang_hooks.to_target_charset ('c');
11664 target_s = lang_hooks.to_target_charset ('s');
11665 if (target_newline == 0 || target_percent == 0 || target_c == 0
11666 || target_s == 0)
11667 return false;
11668
11669 target_percent_c[0] = target_percent;
11670 target_percent_c[1] = target_c;
11671 target_percent_c[2] = '\0';
11672
11673 target_percent_s[0] = target_percent;
11674 target_percent_s[1] = target_s;
11675 target_percent_s[2] = '\0';
11676
11677 target_percent_s_newline[0] = target_percent;
11678 target_percent_s_newline[1] = target_s;
11679 target_percent_s_newline[2] = target_newline;
11680 target_percent_s_newline[3] = '\0';
11681
11682 init = true;
11683 }
11684 return true;
11685 }
11686
11687 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11688 and no overflow/underflow occurred. INEXACT is true if M was not
11689 exactly calculated. TYPE is the tree type for the result. This
11690 function assumes that you cleared the MPFR flags and then
11691 calculated M to see if anything subsequently set a flag prior to
11692 entering this function. Return NULL_TREE if any checks fail. */
11693
11694 static tree
11695 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11696 {
11697 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11698 overflow/underflow occurred. If -frounding-math, proceed iff the
11699 result of calling FUNC was exact. */
11700 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11701 && (!flag_rounding_math || !inexact))
11702 {
11703 REAL_VALUE_TYPE rr;
11704
11705 real_from_mpfr (&rr, m, type, GMP_RNDN);
11706 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11707 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11708 but the mpft_t is not, then we underflowed in the
11709 conversion. */
11710 if (real_isfinite (&rr)
11711 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11712 {
11713 REAL_VALUE_TYPE rmode;
11714
11715 real_convert (&rmode, TYPE_MODE (type), &rr);
11716 /* Proceed iff the specified mode can hold the value. */
11717 if (real_identical (&rmode, &rr))
11718 return build_real (type, rmode);
11719 }
11720 }
11721 return NULL_TREE;
11722 }
11723
11724 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11725 number and no overflow/underflow occurred. INEXACT is true if M
11726 was not exactly calculated. TYPE is the tree type for the result.
11727 This function assumes that you cleared the MPFR flags and then
11728 calculated M to see if anything subsequently set a flag prior to
11729 entering this function. Return NULL_TREE if any checks fail, if
11730 FORCE_CONVERT is true, then bypass the checks. */
11731
11732 static tree
11733 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11734 {
11735 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11736 overflow/underflow occurred. If -frounding-math, proceed iff the
11737 result of calling FUNC was exact. */
11738 if (force_convert
11739 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11740 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11741 && (!flag_rounding_math || !inexact)))
11742 {
11743 REAL_VALUE_TYPE re, im;
11744
11745 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11746 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11747 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11748 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11749 but the mpft_t is not, then we underflowed in the
11750 conversion. */
11751 if (force_convert
11752 || (real_isfinite (&re) && real_isfinite (&im)
11753 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11754 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11755 {
11756 REAL_VALUE_TYPE re_mode, im_mode;
11757
11758 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11759 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11760 /* Proceed iff the specified mode can hold the value. */
11761 if (force_convert
11762 || (real_identical (&re_mode, &re)
11763 && real_identical (&im_mode, &im)))
11764 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11765 build_real (TREE_TYPE (type), im_mode));
11766 }
11767 }
11768 return NULL_TREE;
11769 }
11770
11771 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11772 FUNC on it and return the resulting value as a tree with type TYPE.
11773 If MIN and/or MAX are not NULL, then the supplied ARG must be
11774 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11775 acceptable values, otherwise they are not. The mpfr precision is
11776 set to the precision of TYPE. We assume that function FUNC returns
11777 zero if the result could be calculated exactly within the requested
11778 precision. */
11779
11780 static tree
11781 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11782 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11783 bool inclusive)
11784 {
11785 tree result = NULL_TREE;
11786
11787 STRIP_NOPS (arg);
11788
11789 /* To proceed, MPFR must exactly represent the target floating point
11790 format, which only happens when the target base equals two. */
11791 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11792 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11793 {
11794 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11795
11796 if (real_isfinite (ra)
11797 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11798 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11799 {
11800 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11801 const int prec = fmt->p;
11802 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11803 int inexact;
11804 mpfr_t m;
11805
11806 mpfr_init2 (m, prec);
11807 mpfr_from_real (m, ra, GMP_RNDN);
11808 mpfr_clear_flags ();
11809 inexact = func (m, m, rnd);
11810 result = do_mpfr_ckconv (m, type, inexact);
11811 mpfr_clear (m);
11812 }
11813 }
11814
11815 return result;
11816 }
11817
11818 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11819 FUNC on it and return the resulting value as a tree with type TYPE.
11820 The mpfr precision is set to the precision of TYPE. We assume that
11821 function FUNC returns zero if the result could be calculated
11822 exactly within the requested precision. */
11823
11824 static tree
11825 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11826 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11827 {
11828 tree result = NULL_TREE;
11829
11830 STRIP_NOPS (arg1);
11831 STRIP_NOPS (arg2);
11832
11833 /* To proceed, MPFR must exactly represent the target floating point
11834 format, which only happens when the target base equals two. */
11835 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11836 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11837 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11838 {
11839 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11840 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11841
11842 if (real_isfinite (ra1) && real_isfinite (ra2))
11843 {
11844 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11845 const int prec = fmt->p;
11846 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11847 int inexact;
11848 mpfr_t m1, m2;
11849
11850 mpfr_inits2 (prec, m1, m2, NULL);
11851 mpfr_from_real (m1, ra1, GMP_RNDN);
11852 mpfr_from_real (m2, ra2, GMP_RNDN);
11853 mpfr_clear_flags ();
11854 inexact = func (m1, m1, m2, rnd);
11855 result = do_mpfr_ckconv (m1, type, inexact);
11856 mpfr_clears (m1, m2, NULL);
11857 }
11858 }
11859
11860 return result;
11861 }
11862
11863 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11864 FUNC on it and return the resulting value as a tree with type TYPE.
11865 The mpfr precision is set to the precision of TYPE. We assume that
11866 function FUNC returns zero if the result could be calculated
11867 exactly within the requested precision. */
11868
11869 static tree
11870 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11871 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11872 {
11873 tree result = NULL_TREE;
11874
11875 STRIP_NOPS (arg1);
11876 STRIP_NOPS (arg2);
11877 STRIP_NOPS (arg3);
11878
11879 /* To proceed, MPFR must exactly represent the target floating point
11880 format, which only happens when the target base equals two. */
11881 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11882 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11883 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11884 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11885 {
11886 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11887 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11888 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11889
11890 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11891 {
11892 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11893 const int prec = fmt->p;
11894 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11895 int inexact;
11896 mpfr_t m1, m2, m3;
11897
11898 mpfr_inits2 (prec, m1, m2, m3, NULL);
11899 mpfr_from_real (m1, ra1, GMP_RNDN);
11900 mpfr_from_real (m2, ra2, GMP_RNDN);
11901 mpfr_from_real (m3, ra3, GMP_RNDN);
11902 mpfr_clear_flags ();
11903 inexact = func (m1, m1, m2, m3, rnd);
11904 result = do_mpfr_ckconv (m1, type, inexact);
11905 mpfr_clears (m1, m2, m3, NULL);
11906 }
11907 }
11908
11909 return result;
11910 }
11911
11912 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11913 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11914 If ARG_SINP and ARG_COSP are NULL then the result is returned
11915 as a complex value.
11916 The type is taken from the type of ARG and is used for setting the
11917 precision of the calculation and results. */
11918
11919 static tree
11920 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11921 {
11922 tree const type = TREE_TYPE (arg);
11923 tree result = NULL_TREE;
11924
11925 STRIP_NOPS (arg);
11926
11927 /* To proceed, MPFR must exactly represent the target floating point
11928 format, which only happens when the target base equals two. */
11929 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11930 && TREE_CODE (arg) == REAL_CST
11931 && !TREE_OVERFLOW (arg))
11932 {
11933 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11934
11935 if (real_isfinite (ra))
11936 {
11937 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11938 const int prec = fmt->p;
11939 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11940 tree result_s, result_c;
11941 int inexact;
11942 mpfr_t m, ms, mc;
11943
11944 mpfr_inits2 (prec, m, ms, mc, NULL);
11945 mpfr_from_real (m, ra, GMP_RNDN);
11946 mpfr_clear_flags ();
11947 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11948 result_s = do_mpfr_ckconv (ms, type, inexact);
11949 result_c = do_mpfr_ckconv (mc, type, inexact);
11950 mpfr_clears (m, ms, mc, NULL);
11951 if (result_s && result_c)
11952 {
11953 /* If we are to return in a complex value do so. */
11954 if (!arg_sinp && !arg_cosp)
11955 return build_complex (build_complex_type (type),
11956 result_c, result_s);
11957
11958 /* Dereference the sin/cos pointer arguments. */
11959 arg_sinp = build_fold_indirect_ref (arg_sinp);
11960 arg_cosp = build_fold_indirect_ref (arg_cosp);
11961 /* Proceed if valid pointer type were passed in. */
11962 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11963 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11964 {
11965 /* Set the values. */
11966 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11967 result_s);
11968 TREE_SIDE_EFFECTS (result_s) = 1;
11969 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11970 result_c);
11971 TREE_SIDE_EFFECTS (result_c) = 1;
11972 /* Combine the assignments into a compound expr. */
11973 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11974 result_s, result_c));
11975 }
11976 }
11977 }
11978 }
11979 return result;
11980 }
11981
11982 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11983 two-argument mpfr order N Bessel function FUNC on them and return
11984 the resulting value as a tree with type TYPE. The mpfr precision
11985 is set to the precision of TYPE. We assume that function FUNC
11986 returns zero if the result could be calculated exactly within the
11987 requested precision. */
11988 static tree
11989 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11990 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11991 const REAL_VALUE_TYPE *min, bool inclusive)
11992 {
11993 tree result = NULL_TREE;
11994
11995 STRIP_NOPS (arg1);
11996 STRIP_NOPS (arg2);
11997
11998 /* To proceed, MPFR must exactly represent the target floating point
11999 format, which only happens when the target base equals two. */
12000 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12001 && tree_fits_shwi_p (arg1)
12002 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12003 {
12004 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12005 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12006
12007 if (n == (long)n
12008 && real_isfinite (ra)
12009 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12010 {
12011 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12012 const int prec = fmt->p;
12013 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12014 int inexact;
12015 mpfr_t m;
12016
12017 mpfr_init2 (m, prec);
12018 mpfr_from_real (m, ra, GMP_RNDN);
12019 mpfr_clear_flags ();
12020 inexact = func (m, n, m, rnd);
12021 result = do_mpfr_ckconv (m, type, inexact);
12022 mpfr_clear (m);
12023 }
12024 }
12025
12026 return result;
12027 }
12028
12029 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12030 the pointer *(ARG_QUO) and return the result. The type is taken
12031 from the type of ARG0 and is used for setting the precision of the
12032 calculation and results. */
12033
12034 static tree
12035 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12036 {
12037 tree const type = TREE_TYPE (arg0);
12038 tree result = NULL_TREE;
12039
12040 STRIP_NOPS (arg0);
12041 STRIP_NOPS (arg1);
12042
12043 /* To proceed, MPFR must exactly represent the target floating point
12044 format, which only happens when the target base equals two. */
12045 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12046 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12047 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12048 {
12049 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12050 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12051
12052 if (real_isfinite (ra0) && real_isfinite (ra1))
12053 {
12054 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12055 const int prec = fmt->p;
12056 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12057 tree result_rem;
12058 long integer_quo;
12059 mpfr_t m0, m1;
12060
12061 mpfr_inits2 (prec, m0, m1, NULL);
12062 mpfr_from_real (m0, ra0, GMP_RNDN);
12063 mpfr_from_real (m1, ra1, GMP_RNDN);
12064 mpfr_clear_flags ();
12065 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12066 /* Remquo is independent of the rounding mode, so pass
12067 inexact=0 to do_mpfr_ckconv(). */
12068 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12069 mpfr_clears (m0, m1, NULL);
12070 if (result_rem)
12071 {
12072 /* MPFR calculates quo in the host's long so it may
12073 return more bits in quo than the target int can hold
12074 if sizeof(host long) > sizeof(target int). This can
12075 happen even for native compilers in LP64 mode. In
12076 these cases, modulo the quo value with the largest
12077 number that the target int can hold while leaving one
12078 bit for the sign. */
12079 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12080 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12081
12082 /* Dereference the quo pointer argument. */
12083 arg_quo = build_fold_indirect_ref (arg_quo);
12084 /* Proceed iff a valid pointer type was passed in. */
12085 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12086 {
12087 /* Set the value. */
12088 tree result_quo
12089 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12090 build_int_cst (TREE_TYPE (arg_quo),
12091 integer_quo));
12092 TREE_SIDE_EFFECTS (result_quo) = 1;
12093 /* Combine the quo assignment with the rem. */
12094 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12095 result_quo, result_rem));
12096 }
12097 }
12098 }
12099 }
12100 return result;
12101 }
12102
12103 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12104 resulting value as a tree with type TYPE. The mpfr precision is
12105 set to the precision of TYPE. We assume that this mpfr function
12106 returns zero if the result could be calculated exactly within the
12107 requested precision. In addition, the integer pointer represented
12108 by ARG_SG will be dereferenced and set to the appropriate signgam
12109 (-1,1) value. */
12110
12111 static tree
12112 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12113 {
12114 tree result = NULL_TREE;
12115
12116 STRIP_NOPS (arg);
12117
12118 /* To proceed, MPFR must exactly represent the target floating point
12119 format, which only happens when the target base equals two. Also
12120 verify ARG is a constant and that ARG_SG is an int pointer. */
12121 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12122 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12123 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12124 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12125 {
12126 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12127
12128 /* In addition to NaN and Inf, the argument cannot be zero or a
12129 negative integer. */
12130 if (real_isfinite (ra)
12131 && ra->cl != rvc_zero
12132 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12133 {
12134 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12135 const int prec = fmt->p;
12136 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12137 int inexact, sg;
12138 mpfr_t m;
12139 tree result_lg;
12140
12141 mpfr_init2 (m, prec);
12142 mpfr_from_real (m, ra, GMP_RNDN);
12143 mpfr_clear_flags ();
12144 inexact = mpfr_lgamma (m, &sg, m, rnd);
12145 result_lg = do_mpfr_ckconv (m, type, inexact);
12146 mpfr_clear (m);
12147 if (result_lg)
12148 {
12149 tree result_sg;
12150
12151 /* Dereference the arg_sg pointer argument. */
12152 arg_sg = build_fold_indirect_ref (arg_sg);
12153 /* Assign the signgam value into *arg_sg. */
12154 result_sg = fold_build2 (MODIFY_EXPR,
12155 TREE_TYPE (arg_sg), arg_sg,
12156 build_int_cst (TREE_TYPE (arg_sg), sg));
12157 TREE_SIDE_EFFECTS (result_sg) = 1;
12158 /* Combine the signgam assignment with the lgamma result. */
12159 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12160 result_sg, result_lg));
12161 }
12162 }
12163 }
12164
12165 return result;
12166 }
12167
12168 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12169 function FUNC on it and return the resulting value as a tree with
12170 type TYPE. The mpfr precision is set to the precision of TYPE. We
12171 assume that function FUNC returns zero if the result could be
12172 calculated exactly within the requested precision. */
12173
12174 static tree
12175 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12176 {
12177 tree result = NULL_TREE;
12178
12179 STRIP_NOPS (arg);
12180
12181 /* To proceed, MPFR must exactly represent the target floating point
12182 format, which only happens when the target base equals two. */
12183 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12184 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12185 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12186 {
12187 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12188 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12189
12190 if (real_isfinite (re) && real_isfinite (im))
12191 {
12192 const struct real_format *const fmt =
12193 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12194 const int prec = fmt->p;
12195 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12196 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12197 int inexact;
12198 mpc_t m;
12199
12200 mpc_init2 (m, prec);
12201 mpfr_from_real (mpc_realref (m), re, rnd);
12202 mpfr_from_real (mpc_imagref (m), im, rnd);
12203 mpfr_clear_flags ();
12204 inexact = func (m, m, crnd);
12205 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12206 mpc_clear (m);
12207 }
12208 }
12209
12210 return result;
12211 }
12212
12213 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12214 mpc function FUNC on it and return the resulting value as a tree
12215 with type TYPE. The mpfr precision is set to the precision of
12216 TYPE. We assume that function FUNC returns zero if the result
12217 could be calculated exactly within the requested precision. If
12218 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12219 in the arguments and/or results. */
12220
12221 tree
12222 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12223 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12224 {
12225 tree result = NULL_TREE;
12226
12227 STRIP_NOPS (arg0);
12228 STRIP_NOPS (arg1);
12229
12230 /* To proceed, MPFR must exactly represent the target floating point
12231 format, which only happens when the target base equals two. */
12232 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12234 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12235 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12236 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12237 {
12238 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12239 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12240 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12241 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12242
12243 if (do_nonfinite
12244 || (real_isfinite (re0) && real_isfinite (im0)
12245 && real_isfinite (re1) && real_isfinite (im1)))
12246 {
12247 const struct real_format *const fmt =
12248 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12249 const int prec = fmt->p;
12250 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12251 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12252 int inexact;
12253 mpc_t m0, m1;
12254
12255 mpc_init2 (m0, prec);
12256 mpc_init2 (m1, prec);
12257 mpfr_from_real (mpc_realref (m0), re0, rnd);
12258 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12259 mpfr_from_real (mpc_realref (m1), re1, rnd);
12260 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12261 mpfr_clear_flags ();
12262 inexact = func (m0, m0, m1, crnd);
12263 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12264 mpc_clear (m0);
12265 mpc_clear (m1);
12266 }
12267 }
12268
12269 return result;
12270 }
12271
12272 /* A wrapper function for builtin folding that prevents warnings for
12273 "statement without effect" and the like, caused by removing the
12274 call node earlier than the warning is generated. */
12275
12276 tree
12277 fold_call_stmt (gcall *stmt, bool ignore)
12278 {
12279 tree ret = NULL_TREE;
12280 tree fndecl = gimple_call_fndecl (stmt);
12281 location_t loc = gimple_location (stmt);
12282 if (fndecl
12283 && TREE_CODE (fndecl) == FUNCTION_DECL
12284 && DECL_BUILT_IN (fndecl)
12285 && !gimple_call_va_arg_pack_p (stmt))
12286 {
12287 int nargs = gimple_call_num_args (stmt);
12288 tree *args = (nargs > 0
12289 ? gimple_call_arg_ptr (stmt, 0)
12290 : &error_mark_node);
12291
12292 if (avoid_folding_inline_builtin (fndecl))
12293 return NULL_TREE;
12294 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12295 {
12296 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12297 }
12298 else
12299 {
12300 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12301 if (ret)
12302 {
12303 /* Propagate location information from original call to
12304 expansion of builtin. Otherwise things like
12305 maybe_emit_chk_warning, that operate on the expansion
12306 of a builtin, will use the wrong location information. */
12307 if (gimple_has_location (stmt))
12308 {
12309 tree realret = ret;
12310 if (TREE_CODE (ret) == NOP_EXPR)
12311 realret = TREE_OPERAND (ret, 0);
12312 if (CAN_HAVE_LOCATION_P (realret)
12313 && !EXPR_HAS_LOCATION (realret))
12314 SET_EXPR_LOCATION (realret, loc);
12315 return realret;
12316 }
12317 return ret;
12318 }
12319 }
12320 }
12321 return NULL_TREE;
12322 }
12323
12324 /* Look up the function in builtin_decl that corresponds to DECL
12325 and set ASMSPEC as its user assembler name. DECL must be a
12326 function decl that declares a builtin. */
12327
12328 void
12329 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12330 {
12331 tree builtin;
12332 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12333 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12334 && asmspec != 0);
12335
12336 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12337 set_user_assembler_name (builtin, asmspec);
12338 switch (DECL_FUNCTION_CODE (decl))
12339 {
12340 case BUILT_IN_MEMCPY:
12341 init_block_move_fn (asmspec);
12342 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12343 break;
12344 case BUILT_IN_MEMSET:
12345 init_block_clear_fn (asmspec);
12346 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12347 break;
12348 case BUILT_IN_MEMMOVE:
12349 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12350 break;
12351 case BUILT_IN_MEMCMP:
12352 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12353 break;
12354 case BUILT_IN_ABORT:
12355 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12356 break;
12357 case BUILT_IN_FFS:
12358 if (INT_TYPE_SIZE < BITS_PER_WORD)
12359 {
12360 set_user_assembler_libfunc ("ffs", asmspec);
12361 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12362 MODE_INT, 0), "ffs");
12363 }
12364 break;
12365 default:
12366 break;
12367 }
12368 }
12369
12370 /* Return true if DECL is a builtin that expands to a constant or similarly
12371 simple code. */
12372 bool
12373 is_simple_builtin (tree decl)
12374 {
12375 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12376 switch (DECL_FUNCTION_CODE (decl))
12377 {
12378 /* Builtins that expand to constants. */
12379 case BUILT_IN_CONSTANT_P:
12380 case BUILT_IN_EXPECT:
12381 case BUILT_IN_OBJECT_SIZE:
12382 case BUILT_IN_UNREACHABLE:
12383 /* Simple register moves or loads from stack. */
12384 case BUILT_IN_ASSUME_ALIGNED:
12385 case BUILT_IN_RETURN_ADDRESS:
12386 case BUILT_IN_EXTRACT_RETURN_ADDR:
12387 case BUILT_IN_FROB_RETURN_ADDR:
12388 case BUILT_IN_RETURN:
12389 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12390 case BUILT_IN_FRAME_ADDRESS:
12391 case BUILT_IN_VA_END:
12392 case BUILT_IN_STACK_SAVE:
12393 case BUILT_IN_STACK_RESTORE:
12394 /* Exception state returns or moves registers around. */
12395 case BUILT_IN_EH_FILTER:
12396 case BUILT_IN_EH_POINTER:
12397 case BUILT_IN_EH_COPY_VALUES:
12398 return true;
12399
12400 default:
12401 return false;
12402 }
12403
12404 return false;
12405 }
12406
12407 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12408 most probably expanded inline into reasonably simple code. This is a
12409 superset of is_simple_builtin. */
12410 bool
12411 is_inexpensive_builtin (tree decl)
12412 {
12413 if (!decl)
12414 return false;
12415 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12416 return true;
12417 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12418 switch (DECL_FUNCTION_CODE (decl))
12419 {
12420 case BUILT_IN_ABS:
12421 case BUILT_IN_ALLOCA:
12422 case BUILT_IN_ALLOCA_WITH_ALIGN:
12423 case BUILT_IN_BSWAP16:
12424 case BUILT_IN_BSWAP32:
12425 case BUILT_IN_BSWAP64:
12426 case BUILT_IN_CLZ:
12427 case BUILT_IN_CLZIMAX:
12428 case BUILT_IN_CLZL:
12429 case BUILT_IN_CLZLL:
12430 case BUILT_IN_CTZ:
12431 case BUILT_IN_CTZIMAX:
12432 case BUILT_IN_CTZL:
12433 case BUILT_IN_CTZLL:
12434 case BUILT_IN_FFS:
12435 case BUILT_IN_FFSIMAX:
12436 case BUILT_IN_FFSL:
12437 case BUILT_IN_FFSLL:
12438 case BUILT_IN_IMAXABS:
12439 case BUILT_IN_FINITE:
12440 case BUILT_IN_FINITEF:
12441 case BUILT_IN_FINITEL:
12442 case BUILT_IN_FINITED32:
12443 case BUILT_IN_FINITED64:
12444 case BUILT_IN_FINITED128:
12445 case BUILT_IN_FPCLASSIFY:
12446 case BUILT_IN_ISFINITE:
12447 case BUILT_IN_ISINF_SIGN:
12448 case BUILT_IN_ISINF:
12449 case BUILT_IN_ISINFF:
12450 case BUILT_IN_ISINFL:
12451 case BUILT_IN_ISINFD32:
12452 case BUILT_IN_ISINFD64:
12453 case BUILT_IN_ISINFD128:
12454 case BUILT_IN_ISNAN:
12455 case BUILT_IN_ISNANF:
12456 case BUILT_IN_ISNANL:
12457 case BUILT_IN_ISNAND32:
12458 case BUILT_IN_ISNAND64:
12459 case BUILT_IN_ISNAND128:
12460 case BUILT_IN_ISNORMAL:
12461 case BUILT_IN_ISGREATER:
12462 case BUILT_IN_ISGREATEREQUAL:
12463 case BUILT_IN_ISLESS:
12464 case BUILT_IN_ISLESSEQUAL:
12465 case BUILT_IN_ISLESSGREATER:
12466 case BUILT_IN_ISUNORDERED:
12467 case BUILT_IN_VA_ARG_PACK:
12468 case BUILT_IN_VA_ARG_PACK_LEN:
12469 case BUILT_IN_VA_COPY:
12470 case BUILT_IN_TRAP:
12471 case BUILT_IN_SAVEREGS:
12472 case BUILT_IN_POPCOUNTL:
12473 case BUILT_IN_POPCOUNTLL:
12474 case BUILT_IN_POPCOUNTIMAX:
12475 case BUILT_IN_POPCOUNT:
12476 case BUILT_IN_PARITYL:
12477 case BUILT_IN_PARITYLL:
12478 case BUILT_IN_PARITYIMAX:
12479 case BUILT_IN_PARITY:
12480 case BUILT_IN_LABS:
12481 case BUILT_IN_LLABS:
12482 case BUILT_IN_PREFETCH:
12483 case BUILT_IN_ACC_ON_DEVICE:
12484 return true;
12485
12486 default:
12487 return is_simple_builtin (decl);
12488 }
12489
12490 return false;
12491 }