re PR libgomp/65742 (Several libgomp.oacc-* failures after r221922.)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "varasm.h"
40 #include "tree-object-size.h"
41 #include "realmpfr.h"
42 #include "predict.h"
43 #include "hashtab.h"
44 #include "hard-reg-set.h"
45 #include "function.h"
46 #include "cfgrtl.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "flags.h"
54 #include "regs.h"
55 #include "except.h"
56 #include "insn-config.h"
57 #include "statistics.h"
58 #include "real.h"
59 #include "fixed-value.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "libfuncs.h"
69 #include "recog.h"
70 #include "output.h"
71 #include "typeclass.h"
72 #include "tm_p.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "tree-ssanames.h"
76 #include "tree-dfa.h"
77 #include "value-prof.h"
78 #include "diagnostic-core.h"
79 #include "builtins.h"
80 #include "asan.h"
81 #include "cilk.h"
82 #include "ipa-ref.h"
83 #include "lto-streamer.h"
84 #include "cgraph.h"
85 #include "tree-chkp.h"
86 #include "rtl-chkp.h"
87 #include "gomp-constants.h"
88
89
90 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
91
92 struct target_builtins default_target_builtins;
93 #if SWITCHABLE_TARGET
94 struct target_builtins *this_target_builtins = &default_target_builtins;
95 #endif
96
97 /* Define the names of the builtin function types and codes. */
98 const char *const built_in_class_names[BUILT_IN_LAST]
99 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
100
101 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
102 const char * built_in_names[(int) END_BUILTINS] =
103 {
104 #include "builtins.def"
105 };
106 #undef DEF_BUILTIN
107
108 /* Setup an array of builtin_info_type, make sure each element decl is
109 initialized to NULL_TREE. */
110 builtin_info_type builtin_info[(int)END_BUILTINS];
111
112 /* Non-zero if __builtin_constant_p should be folded right away. */
113 bool force_folding_builtin_constant_p;
114
115 static rtx c_readstr (const char *, machine_mode);
116 static int target_char_cast (tree, char *);
117 static rtx get_memory_rtx (tree, tree);
118 static int apply_args_size (void);
119 static int apply_result_size (void);
120 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
121 static rtx result_vector (int, rtx);
122 #endif
123 static void expand_builtin_prefetch (tree);
124 static rtx expand_builtin_apply_args (void);
125 static rtx expand_builtin_apply_args_1 (void);
126 static rtx expand_builtin_apply (rtx, rtx, rtx);
127 static void expand_builtin_return (rtx);
128 static enum type_class type_to_class (tree);
129 static rtx expand_builtin_classify_type (tree);
130 static void expand_errno_check (tree, rtx);
131 static rtx expand_builtin_mathfn (tree, rtx, rtx);
132 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
133 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
134 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
135 static rtx expand_builtin_interclass_mathfn (tree, rtx);
136 static rtx expand_builtin_sincos (tree);
137 static rtx expand_builtin_cexpi (tree, rtx);
138 static rtx expand_builtin_int_roundingfn (tree, rtx);
139 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
140 static rtx expand_builtin_next_arg (void);
141 static rtx expand_builtin_va_start (tree);
142 static rtx expand_builtin_va_end (tree);
143 static rtx expand_builtin_va_copy (tree);
144 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
145 static rtx expand_builtin_strcmp (tree, rtx);
146 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
147 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
148 static rtx expand_builtin_memcpy (tree, rtx);
149 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
150 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
151 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
152 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
153 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
154 machine_mode, int, tree);
155 static rtx expand_builtin_strcpy (tree, rtx);
156 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
157 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
158 static rtx expand_builtin_strncpy (tree, rtx);
159 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
160 static rtx expand_builtin_memset (tree, rtx, machine_mode);
161 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
162 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
163 static rtx expand_builtin_bzero (tree);
164 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
165 static rtx expand_builtin_alloca (tree, bool);
166 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
167 static rtx expand_builtin_frame_address (tree, tree);
168 static tree stabilize_va_list_loc (location_t, tree, int);
169 static rtx expand_builtin_expect (tree, rtx);
170 static tree fold_builtin_constant_p (tree);
171 static tree fold_builtin_classify_type (tree);
172 static tree fold_builtin_strlen (location_t, tree, tree);
173 static tree fold_builtin_inf (location_t, tree, int);
174 static tree fold_builtin_nan (tree, tree, int);
175 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
176 static bool validate_arg (const_tree, enum tree_code code);
177 static bool integer_valued_real_p (tree);
178 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
179 static rtx expand_builtin_fabs (tree, rtx, rtx);
180 static rtx expand_builtin_signbit (tree, rtx);
181 static tree fold_builtin_sqrt (location_t, tree, tree);
182 static tree fold_builtin_cbrt (location_t, tree, tree);
183 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
184 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
185 static tree fold_builtin_cos (location_t, tree, tree, tree);
186 static tree fold_builtin_cosh (location_t, tree, tree, tree);
187 static tree fold_builtin_tan (tree, tree);
188 static tree fold_builtin_trunc (location_t, tree, tree);
189 static tree fold_builtin_floor (location_t, tree, tree);
190 static tree fold_builtin_ceil (location_t, tree, tree);
191 static tree fold_builtin_round (location_t, tree, tree);
192 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
193 static tree fold_builtin_bitop (tree, tree);
194 static tree fold_builtin_strchr (location_t, tree, tree, tree);
195 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
196 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
197 static tree fold_builtin_strcmp (location_t, tree, tree);
198 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
199 static tree fold_builtin_signbit (location_t, tree, tree);
200 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_isascii (location_t, tree);
202 static tree fold_builtin_toascii (location_t, tree);
203 static tree fold_builtin_isdigit (location_t, tree);
204 static tree fold_builtin_fabs (location_t, tree, tree);
205 static tree fold_builtin_abs (location_t, tree, tree);
206 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
207 enum tree_code);
208 static tree fold_builtin_0 (location_t, tree);
209 static tree fold_builtin_1 (location_t, tree, tree);
210 static tree fold_builtin_2 (location_t, tree, tree, tree);
211 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
212 static tree fold_builtin_varargs (location_t, tree, tree*, int);
213
214 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
215 static tree fold_builtin_strstr (location_t, tree, tree, tree);
216 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
217 static tree fold_builtin_strspn (location_t, tree, tree);
218 static tree fold_builtin_strcspn (location_t, tree, tree);
219
220 static rtx expand_builtin_object_size (tree);
221 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
222 enum built_in_function);
223 static void maybe_emit_chk_warning (tree, enum built_in_function);
224 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
225 static void maybe_emit_free_warning (tree);
226 static tree fold_builtin_object_size (tree, tree);
227
228 unsigned HOST_WIDE_INT target_newline;
229 unsigned HOST_WIDE_INT target_percent;
230 static unsigned HOST_WIDE_INT target_c;
231 static unsigned HOST_WIDE_INT target_s;
232 char target_percent_c[3];
233 char target_percent_s[3];
234 char target_percent_s_newline[4];
235 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_arg2 (tree, tree, tree,
238 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
239 static tree do_mpfr_arg3 (tree, tree, tree, tree,
240 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
241 static tree do_mpfr_sincos (tree, tree, tree);
242 static tree do_mpfr_bessel_n (tree, tree, tree,
243 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
244 const REAL_VALUE_TYPE *, bool);
245 static tree do_mpfr_remquo (tree, tree, tree);
246 static tree do_mpfr_lgamma_r (tree, tree, tree);
247 static void expand_builtin_sync_synchronize (void);
248
249 /* Return true if NAME starts with __builtin_ or __sync_. */
250
251 static bool
252 is_builtin_name (const char *name)
253 {
254 if (strncmp (name, "__builtin_", 10) == 0)
255 return true;
256 if (strncmp (name, "__sync_", 7) == 0)
257 return true;
258 if (strncmp (name, "__atomic_", 9) == 0)
259 return true;
260 if (flag_cilkplus
261 && (!strcmp (name, "__cilkrts_detach")
262 || !strcmp (name, "__cilkrts_pop_frame")))
263 return true;
264 return false;
265 }
266
267
268 /* Return true if DECL is a function symbol representing a built-in. */
269
270 bool
271 is_builtin_fn (tree decl)
272 {
273 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
274 }
275
276 /* Return true if NODE should be considered for inline expansion regardless
277 of the optimization level. This means whenever a function is invoked with
278 its "internal" name, which normally contains the prefix "__builtin". */
279
280 static bool
281 called_as_built_in (tree node)
282 {
283 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
284 we want the name used to call the function, not the name it
285 will have. */
286 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
287 return is_builtin_name (name);
288 }
289
290 /* Compute values M and N such that M divides (address of EXP - N) and such
291 that N < M. If these numbers can be determined, store M in alignp and N in
292 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
293 *alignp and any bit-offset to *bitposp.
294
295 Note that the address (and thus the alignment) computed here is based
296 on the address to which a symbol resolves, whereas DECL_ALIGN is based
297 on the address at which an object is actually located. These two
298 addresses are not always the same. For example, on ARM targets,
299 the address &foo of a Thumb function foo() has the lowest bit set,
300 whereas foo() itself starts on an even address.
301
302 If ADDR_P is true we are taking the address of the memory reference EXP
303 and thus cannot rely on the access taking place. */
304
305 static bool
306 get_object_alignment_2 (tree exp, unsigned int *alignp,
307 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
308 {
309 HOST_WIDE_INT bitsize, bitpos;
310 tree offset;
311 machine_mode mode;
312 int unsignedp, volatilep;
313 unsigned int align = BITS_PER_UNIT;
314 bool known_alignment = false;
315
316 /* Get the innermost object and the constant (bitpos) and possibly
317 variable (offset) offset of the access. */
318 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
319 &mode, &unsignedp, &volatilep, true);
320
321 /* Extract alignment information from the innermost object and
322 possibly adjust bitpos and offset. */
323 if (TREE_CODE (exp) == FUNCTION_DECL)
324 {
325 /* Function addresses can encode extra information besides their
326 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
327 allows the low bit to be used as a virtual bit, we know
328 that the address itself must be at least 2-byte aligned. */
329 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
330 align = 2 * BITS_PER_UNIT;
331 }
332 else if (TREE_CODE (exp) == LABEL_DECL)
333 ;
334 else if (TREE_CODE (exp) == CONST_DECL)
335 {
336 /* The alignment of a CONST_DECL is determined by its initializer. */
337 exp = DECL_INITIAL (exp);
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 #ifdef CONSTANT_ALIGNMENT
340 if (CONSTANT_CLASS_P (exp))
341 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
342 #endif
343 known_alignment = true;
344 }
345 else if (DECL_P (exp))
346 {
347 align = DECL_ALIGN (exp);
348 known_alignment = true;
349 }
350 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
351 {
352 align = TYPE_ALIGN (TREE_TYPE (exp));
353 }
354 else if (TREE_CODE (exp) == INDIRECT_REF
355 || TREE_CODE (exp) == MEM_REF
356 || TREE_CODE (exp) == TARGET_MEM_REF)
357 {
358 tree addr = TREE_OPERAND (exp, 0);
359 unsigned ptr_align;
360 unsigned HOST_WIDE_INT ptr_bitpos;
361 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
362
363 /* If the address is explicitely aligned, handle that. */
364 if (TREE_CODE (addr) == BIT_AND_EXPR
365 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
366 {
367 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
368 ptr_bitmask *= BITS_PER_UNIT;
369 align = ptr_bitmask & -ptr_bitmask;
370 addr = TREE_OPERAND (addr, 0);
371 }
372
373 known_alignment
374 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
375 align = MAX (ptr_align, align);
376
377 /* Re-apply explicit alignment to the bitpos. */
378 ptr_bitpos &= ptr_bitmask;
379
380 /* The alignment of the pointer operand in a TARGET_MEM_REF
381 has to take the variable offset parts into account. */
382 if (TREE_CODE (exp) == TARGET_MEM_REF)
383 {
384 if (TMR_INDEX (exp))
385 {
386 unsigned HOST_WIDE_INT step = 1;
387 if (TMR_STEP (exp))
388 step = TREE_INT_CST_LOW (TMR_STEP (exp));
389 align = MIN (align, (step & -step) * BITS_PER_UNIT);
390 }
391 if (TMR_INDEX2 (exp))
392 align = BITS_PER_UNIT;
393 known_alignment = false;
394 }
395
396 /* When EXP is an actual memory reference then we can use
397 TYPE_ALIGN of a pointer indirection to derive alignment.
398 Do so only if get_pointer_alignment_1 did not reveal absolute
399 alignment knowledge and if using that alignment would
400 improve the situation. */
401 if (!addr_p && !known_alignment
402 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
403 align = TYPE_ALIGN (TREE_TYPE (exp));
404 else
405 {
406 /* Else adjust bitpos accordingly. */
407 bitpos += ptr_bitpos;
408 if (TREE_CODE (exp) == MEM_REF
409 || TREE_CODE (exp) == TARGET_MEM_REF)
410 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
411 }
412 }
413 else if (TREE_CODE (exp) == STRING_CST)
414 {
415 /* STRING_CST are the only constant objects we allow to be not
416 wrapped inside a CONST_DECL. */
417 align = TYPE_ALIGN (TREE_TYPE (exp));
418 #ifdef CONSTANT_ALIGNMENT
419 if (CONSTANT_CLASS_P (exp))
420 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
421 #endif
422 known_alignment = true;
423 }
424
425 /* If there is a non-constant offset part extract the maximum
426 alignment that can prevail. */
427 if (offset)
428 {
429 unsigned int trailing_zeros = tree_ctz (offset);
430 if (trailing_zeros < HOST_BITS_PER_INT)
431 {
432 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
433 if (inner)
434 align = MIN (align, inner);
435 }
436 }
437
438 *alignp = align;
439 *bitposp = bitpos & (*alignp - 1);
440 return known_alignment;
441 }
442
443 /* For a memory reference expression EXP compute values M and N such that M
444 divides (&EXP - N) and such that N < M. If these numbers can be determined,
445 store M in alignp and N in *BITPOSP and return true. Otherwise return false
446 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
447
448 bool
449 get_object_alignment_1 (tree exp, unsigned int *alignp,
450 unsigned HOST_WIDE_INT *bitposp)
451 {
452 return get_object_alignment_2 (exp, alignp, bitposp, false);
453 }
454
455 /* Return the alignment in bits of EXP, an object. */
456
457 unsigned int
458 get_object_alignment (tree exp)
459 {
460 unsigned HOST_WIDE_INT bitpos = 0;
461 unsigned int align;
462
463 get_object_alignment_1 (exp, &align, &bitpos);
464
465 /* align and bitpos now specify known low bits of the pointer.
466 ptr & (align - 1) == bitpos. */
467
468 if (bitpos != 0)
469 align = (bitpos & -bitpos);
470 return align;
471 }
472
473 /* For a pointer valued expression EXP compute values M and N such that M
474 divides (EXP - N) and such that N < M. If these numbers can be determined,
475 store M in alignp and N in *BITPOSP and return true. Return false if
476 the results are just a conservative approximation.
477
478 If EXP is not a pointer, false is returned too. */
479
480 bool
481 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
482 unsigned HOST_WIDE_INT *bitposp)
483 {
484 STRIP_NOPS (exp);
485
486 if (TREE_CODE (exp) == ADDR_EXPR)
487 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
488 alignp, bitposp, true);
489 else if (TREE_CODE (exp) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (exp)))
491 {
492 unsigned int ptr_align, ptr_misalign;
493 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
494
495 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
496 {
497 *bitposp = ptr_misalign * BITS_PER_UNIT;
498 *alignp = ptr_align * BITS_PER_UNIT;
499 /* We cannot really tell whether this result is an approximation. */
500 return true;
501 }
502 else
503 {
504 *bitposp = 0;
505 *alignp = BITS_PER_UNIT;
506 return false;
507 }
508 }
509 else if (TREE_CODE (exp) == INTEGER_CST)
510 {
511 *alignp = BIGGEST_ALIGNMENT;
512 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
513 & (BIGGEST_ALIGNMENT - 1));
514 return true;
515 }
516
517 *bitposp = 0;
518 *alignp = BITS_PER_UNIT;
519 return false;
520 }
521
522 /* Return the alignment in bits of EXP, a pointer valued expression.
523 The alignment returned is, by default, the alignment of the thing that
524 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
525
526 Otherwise, look at the expression to see if we can do better, i.e., if the
527 expression is actually pointing at an object whose alignment is tighter. */
528
529 unsigned int
530 get_pointer_alignment (tree exp)
531 {
532 unsigned HOST_WIDE_INT bitpos = 0;
533 unsigned int align;
534
535 get_pointer_alignment_1 (exp, &align, &bitpos);
536
537 /* align and bitpos now specify known low bits of the pointer.
538 ptr & (align - 1) == bitpos. */
539
540 if (bitpos != 0)
541 align = (bitpos & -bitpos);
542
543 return align;
544 }
545
546 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
547 way, because it could contain a zero byte in the middle.
548 TREE_STRING_LENGTH is the size of the character array, not the string.
549
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
556
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
560
561 The value returned is of type `ssizetype'.
562
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
565
566 tree
567 c_strlen (tree src, int only_value)
568 {
569 tree offset_node;
570 HOST_WIDE_INT offset;
571 int max;
572 const char *ptr;
573 location_t loc;
574
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 {
579 tree len1, len2;
580
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
585 }
586
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
590
591 loc = EXPR_LOC_OR_LOC (src, input_location);
592
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return NULL_TREE;
596
597 max = TREE_STRING_LENGTH (src) - 1;
598 ptr = TREE_STRING_POINTER (src);
599
600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
601 {
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
606
607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
609 return NULL_TREE;
610
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
617
618 return size_diffop_loc (loc, size_int (max), offset_node);
619 }
620
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node == 0)
624 offset = 0;
625 else if (! tree_fits_shwi_p (offset_node))
626 offset = -1;
627 else
628 offset = tree_to_shwi (offset_node);
629
630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
632 if (offset < 0 || offset > max)
633 {
634 /* Suppress multiple warnings for propagated constant strings. */
635 if (only_value != 2
636 && !TREE_NO_WARNING (src))
637 {
638 warning_at (loc, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src) = 1;
640 }
641 return NULL_TREE;
642 }
643
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
647
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr + offset));
651 }
652
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
655
656 const char *
657 c_getstr (tree src)
658 {
659 tree offset_node;
660
661 src = string_constant (src, &offset_node);
662 if (src == 0)
663 return 0;
664
665 if (offset_node == 0)
666 return TREE_STRING_POINTER (src);
667 else if (!tree_fits_uhwi_p (offset_node)
668 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
669 return 0;
670
671 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
672 }
673
674 /* Return a constant integer corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676
677 static rtx
678 c_readstr (const char *str, machine_mode mode)
679 {
680 HOST_WIDE_INT ch;
681 unsigned int i, j;
682 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
683
684 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
685 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
686 / HOST_BITS_PER_WIDE_INT;
687
688 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
689 for (i = 0; i < len; i++)
690 tmp[i] = 0;
691
692 ch = 1;
693 for (i = 0; i < GET_MODE_SIZE (mode); i++)
694 {
695 j = i;
696 if (WORDS_BIG_ENDIAN)
697 j = GET_MODE_SIZE (mode) - i - 1;
698 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
699 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
700 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
701 j *= BITS_PER_UNIT;
702
703 if (ch)
704 ch = (unsigned char) str[i];
705 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
706 }
707
708 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
709 return immed_wide_int_const (c, mode);
710 }
711
712 /* Cast a target constant CST to target CHAR and if that value fits into
713 host char type, return zero and put that value into variable pointed to by
714 P. */
715
716 static int
717 target_char_cast (tree cst, char *p)
718 {
719 unsigned HOST_WIDE_INT val, hostval;
720
721 if (TREE_CODE (cst) != INTEGER_CST
722 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
723 return 1;
724
725 /* Do not care if it fits or not right here. */
726 val = TREE_INT_CST_LOW (cst);
727
728 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
729 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
730
731 hostval = val;
732 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
733 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
734
735 if (val != hostval)
736 return 1;
737
738 *p = hostval;
739 return 0;
740 }
741
742 /* Similar to save_expr, but assumes that arbitrary code is not executed
743 in between the multiple evaluations. In particular, we assume that a
744 non-addressable local variable will not be modified. */
745
746 static tree
747 builtin_save_expr (tree exp)
748 {
749 if (TREE_CODE (exp) == SSA_NAME
750 || (TREE_ADDRESSABLE (exp) == 0
751 && (TREE_CODE (exp) == PARM_DECL
752 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
753 return exp;
754
755 return save_expr (exp);
756 }
757
758 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
759 times to get the address of either a higher stack frame, or a return
760 address located within it (depending on FNDECL_CODE). */
761
762 static rtx
763 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
764 {
765 int i;
766
767 #ifdef INITIAL_FRAME_ADDRESS_RTX
768 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
769 #else
770 rtx tem;
771
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
776
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 tem = frame_pointer_rtx;
783 else
784 {
785 tem = hard_frame_pointer_rtx;
786
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl->accesses_prior_frames = 1;
789 }
790 #endif
791
792 /* Some machines need special handling before we can access
793 arbitrary frames. For example, on the SPARC, we must first flush
794 all register windows to the stack. */
795 #ifdef SETUP_FRAME_ADDRESSES
796 if (count > 0)
797 SETUP_FRAME_ADDRESSES ();
798 #endif
799
800 /* On the SPARC, the return address is not in the frame, it is in a
801 register. There is no way to access it off of the current frame
802 pointer, but it can be accessed off the previous frame pointer by
803 reading the value from the register window save area. */
804 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
805 count--;
806
807 /* Scan back COUNT frames to the specified frame. */
808 for (i = 0; i < count; i++)
809 {
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812 #ifdef DYNAMIC_CHAIN_ADDRESS
813 tem = DYNAMIC_CHAIN_ADDRESS (tem);
814 #endif
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
818 }
819
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 #ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem);
825 #else
826 return tem;
827 #endif
828
829 /* For __builtin_return_address, get the return address from that frame. */
830 #ifdef RETURN_ADDR_RTX
831 tem = RETURN_ADDR_RTX (count, tem);
832 #else
833 tem = memory_address (Pmode,
834 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
835 tem = gen_frame_mem (Pmode, tem);
836 #endif
837 return tem;
838 }
839
840 /* Alias set used for setjmp buffer. */
841 static alias_set_type setjmp_alias_set = -1;
842
843 /* Construct the leading half of a __builtin_setjmp call. Control will
844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
846
847 void
848 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
849 {
850 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
851 rtx stack_save;
852 rtx mem;
853
854 if (setjmp_alias_set == -1)
855 setjmp_alias_set = new_alias_set ();
856
857 buf_addr = convert_memory_address (Pmode, buf_addr);
858
859 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
860
861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
864
865 mem = gen_rtx_MEM (Pmode, buf_addr);
866 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
868
869 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
870 GET_MODE_SIZE (Pmode))),
871 set_mem_alias_set (mem, setjmp_alias_set);
872
873 emit_move_insn (validize_mem (mem),
874 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
875
876 stack_save = gen_rtx_MEM (sa_mode,
877 plus_constant (Pmode, buf_addr,
878 2 * GET_MODE_SIZE (Pmode)));
879 set_mem_alias_set (stack_save, setjmp_alias_set);
880 emit_stack_save (SAVE_NONLOCAL, &stack_save);
881
882 /* If there is further processing to do, do it. */
883 #ifdef HAVE_builtin_setjmp_setup
884 if (HAVE_builtin_setjmp_setup)
885 emit_insn (gen_builtin_setjmp_setup (buf_addr));
886 #endif
887
888 /* We have a nonlocal label. */
889 cfun->has_nonlocal_label = 1;
890 }
891
892 /* Construct the trailing part of a __builtin_setjmp call. This is
893 also called directly by the SJLJ exception handling code.
894 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
895
896 void
897 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
898 {
899 rtx chain;
900
901 /* Mark the FP as used when we get here, so we have to make sure it's
902 marked as used by this function. */
903 emit_use (hard_frame_pointer_rtx);
904
905 /* Mark the static chain as clobbered here so life information
906 doesn't get messed up for it. */
907 chain = targetm.calls.static_chain (current_function_decl, true);
908 if (chain && REG_P (chain))
909 emit_clobber (chain);
910
911 /* Now put in the code to restore the frame pointer, and argument
912 pointer, if needed. */
913 #ifdef HAVE_nonlocal_goto
914 if (! HAVE_nonlocal_goto)
915 #endif
916 {
917 /* First adjust our frame pointer to its actual value. It was
918 previously set to the start of the virtual area corresponding to
919 the stacked variables when we branched here and now needs to be
920 adjusted to the actual hardware fp value.
921
922 Assignments to virtual registers are converted by
923 instantiate_virtual_regs into the corresponding assignment
924 to the underlying register (fp in this case) that makes
925 the original assignment true.
926 So the following insn will actually be decrementing fp by
927 STARTING_FRAME_OFFSET. */
928 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
929
930 /* Restoring the frame pointer also modifies the hard frame pointer.
931 Mark it used (so that the previous assignment remains live once
932 the frame pointer is eliminated) and clobbered (to represent the
933 implicit update from the assignment). */
934 emit_use (hard_frame_pointer_rtx);
935 emit_clobber (hard_frame_pointer_rtx);
936 }
937
938 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
939 if (fixed_regs[ARG_POINTER_REGNUM])
940 {
941 #ifdef ELIMINABLE_REGS
942 /* If the argument pointer can be eliminated in favor of the
943 frame pointer, we don't need to restore it. We assume here
944 that if such an elimination is present, it can always be used.
945 This is the case on all known machines; if we don't make this
946 assumption, we do unnecessary saving on many machines. */
947 size_t i;
948 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
949
950 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
951 if (elim_regs[i].from == ARG_POINTER_REGNUM
952 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
953 break;
954
955 if (i == ARRAY_SIZE (elim_regs))
956 #endif
957 {
958 /* Now restore our arg pointer from the address at which it
959 was saved in our stack frame. */
960 emit_move_insn (crtl->args.internal_arg_pointer,
961 copy_to_reg (get_arg_pointer_save_area ()));
962 }
963 }
964 #endif
965
966 #ifdef HAVE_builtin_setjmp_receiver
967 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
968 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
969 else
970 #endif
971 #ifdef HAVE_nonlocal_goto_receiver
972 if (HAVE_nonlocal_goto_receiver)
973 emit_insn (gen_nonlocal_goto_receiver ());
974 else
975 #endif
976 { /* Nothing */ }
977
978 /* We must not allow the code we just generated to be reordered by
979 scheduling. Specifically, the update of the frame pointer must
980 happen immediately, not later. */
981 emit_insn (gen_blockage ());
982 }
983
984 /* __builtin_longjmp is passed a pointer to an array of five words (not
985 all will be used on all machines). It operates similarly to the C
986 library function of the same name, but is more efficient. Much of
987 the code below is copied from the handling of non-local gotos. */
988
989 static void
990 expand_builtin_longjmp (rtx buf_addr, rtx value)
991 {
992 rtx fp, lab, stack;
993 rtx_insn *insn, *last;
994 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
995
996 /* DRAP is needed for stack realign if longjmp is expanded to current
997 function */
998 if (SUPPORTS_STACK_ALIGNMENT)
999 crtl->need_drap = true;
1000
1001 if (setjmp_alias_set == -1)
1002 setjmp_alias_set = new_alias_set ();
1003
1004 buf_addr = convert_memory_address (Pmode, buf_addr);
1005
1006 buf_addr = force_reg (Pmode, buf_addr);
1007
1008 /* We require that the user must pass a second argument of 1, because
1009 that is what builtin_setjmp will return. */
1010 gcc_assert (value == const1_rtx);
1011
1012 last = get_last_insn ();
1013 #ifdef HAVE_builtin_longjmp
1014 if (HAVE_builtin_longjmp)
1015 emit_insn (gen_builtin_longjmp (buf_addr));
1016 else
1017 #endif
1018 {
1019 fp = gen_rtx_MEM (Pmode, buf_addr);
1020 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1021 GET_MODE_SIZE (Pmode)));
1022
1023 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1024 2 * GET_MODE_SIZE (Pmode)));
1025 set_mem_alias_set (fp, setjmp_alias_set);
1026 set_mem_alias_set (lab, setjmp_alias_set);
1027 set_mem_alias_set (stack, setjmp_alias_set);
1028
1029 /* Pick up FP, label, and SP from the block and jump. This code is
1030 from expand_goto in stmt.c; see there for detailed comments. */
1031 #ifdef HAVE_nonlocal_goto
1032 if (HAVE_nonlocal_goto)
1033 /* We have to pass a value to the nonlocal_goto pattern that will
1034 get copied into the static_chain pointer, but it does not matter
1035 what that value is, because builtin_setjmp does not use it. */
1036 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1037 else
1038 #endif
1039 {
1040 lab = copy_to_reg (lab);
1041
1042 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1043 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1044
1045 emit_move_insn (hard_frame_pointer_rtx, fp);
1046 emit_stack_restore (SAVE_NONLOCAL, stack);
1047
1048 emit_use (hard_frame_pointer_rtx);
1049 emit_use (stack_pointer_rtx);
1050 emit_indirect_jump (lab);
1051 }
1052 }
1053
1054 /* Search backwards and mark the jump insn as a non-local goto.
1055 Note that this precludes the use of __builtin_longjmp to a
1056 __builtin_setjmp target in the same function. However, we've
1057 already cautioned the user that these functions are for
1058 internal exception handling use only. */
1059 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1060 {
1061 gcc_assert (insn != last);
1062
1063 if (JUMP_P (insn))
1064 {
1065 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1066 break;
1067 }
1068 else if (CALL_P (insn))
1069 break;
1070 }
1071 }
1072
1073 static inline bool
1074 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1075 {
1076 return (iter->i < iter->n);
1077 }
1078
1079 /* This function validates the types of a function call argument list
1080 against a specified list of tree_codes. If the last specifier is a 0,
1081 that represents an ellipses, otherwise the last specifier must be a
1082 VOID_TYPE. */
1083
1084 static bool
1085 validate_arglist (const_tree callexpr, ...)
1086 {
1087 enum tree_code code;
1088 bool res = 0;
1089 va_list ap;
1090 const_call_expr_arg_iterator iter;
1091 const_tree arg;
1092
1093 va_start (ap, callexpr);
1094 init_const_call_expr_arg_iterator (callexpr, &iter);
1095
1096 do
1097 {
1098 code = (enum tree_code) va_arg (ap, int);
1099 switch (code)
1100 {
1101 case 0:
1102 /* This signifies an ellipses, any further arguments are all ok. */
1103 res = true;
1104 goto end;
1105 case VOID_TYPE:
1106 /* This signifies an endlink, if no arguments remain, return
1107 true, otherwise return false. */
1108 res = !more_const_call_expr_args_p (&iter);
1109 goto end;
1110 default:
1111 /* If no parameters remain or the parameter's code does not
1112 match the specified code, return false. Otherwise continue
1113 checking any remaining arguments. */
1114 arg = next_const_call_expr_arg (&iter);
1115 if (!validate_arg (arg, code))
1116 goto end;
1117 break;
1118 }
1119 }
1120 while (1);
1121
1122 /* We need gotos here since we can only have one VA_CLOSE in a
1123 function. */
1124 end: ;
1125 va_end (ap);
1126
1127 return res;
1128 }
1129
1130 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1131 and the address of the save area. */
1132
1133 static rtx
1134 expand_builtin_nonlocal_goto (tree exp)
1135 {
1136 tree t_label, t_save_area;
1137 rtx r_label, r_save_area, r_fp, r_sp;
1138 rtx_insn *insn;
1139
1140 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1141 return NULL_RTX;
1142
1143 t_label = CALL_EXPR_ARG (exp, 0);
1144 t_save_area = CALL_EXPR_ARG (exp, 1);
1145
1146 r_label = expand_normal (t_label);
1147 r_label = convert_memory_address (Pmode, r_label);
1148 r_save_area = expand_normal (t_save_area);
1149 r_save_area = convert_memory_address (Pmode, r_save_area);
1150 /* Copy the address of the save location to a register just in case it was
1151 based on the frame pointer. */
1152 r_save_area = copy_to_reg (r_save_area);
1153 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1154 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1155 plus_constant (Pmode, r_save_area,
1156 GET_MODE_SIZE (Pmode)));
1157
1158 crtl->has_nonlocal_goto = 1;
1159
1160 #ifdef HAVE_nonlocal_goto
1161 /* ??? We no longer need to pass the static chain value, afaik. */
1162 if (HAVE_nonlocal_goto)
1163 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1164 else
1165 #endif
1166 {
1167 r_label = copy_to_reg (r_label);
1168
1169 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1170 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1171
1172 /* Restore frame pointer for containing function. */
1173 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1174 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1175
1176 /* USE of hard_frame_pointer_rtx added for consistency;
1177 not clear if really needed. */
1178 emit_use (hard_frame_pointer_rtx);
1179 emit_use (stack_pointer_rtx);
1180
1181 /* If the architecture is using a GP register, we must
1182 conservatively assume that the target function makes use of it.
1183 The prologue of functions with nonlocal gotos must therefore
1184 initialize the GP register to the appropriate value, and we
1185 must then make sure that this value is live at the point
1186 of the jump. (Note that this doesn't necessarily apply
1187 to targets with a nonlocal_goto pattern; they are free
1188 to implement it in their own way. Note also that this is
1189 a no-op if the GP register is a global invariant.) */
1190 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1191 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1192 emit_use (pic_offset_table_rtx);
1193
1194 emit_indirect_jump (r_label);
1195 }
1196
1197 /* Search backwards to the jump insn and mark it as a
1198 non-local goto. */
1199 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1200 {
1201 if (JUMP_P (insn))
1202 {
1203 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1204 break;
1205 }
1206 else if (CALL_P (insn))
1207 break;
1208 }
1209
1210 return const0_rtx;
1211 }
1212
1213 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1214 (not all will be used on all machines) that was passed to __builtin_setjmp.
1215 It updates the stack pointer in that block to the current value. This is
1216 also called directly by the SJLJ exception handling code. */
1217
1218 void
1219 expand_builtin_update_setjmp_buf (rtx buf_addr)
1220 {
1221 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1222 rtx stack_save
1223 = gen_rtx_MEM (sa_mode,
1224 memory_address
1225 (sa_mode,
1226 plus_constant (Pmode, buf_addr,
1227 2 * GET_MODE_SIZE (Pmode))));
1228
1229 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1230 }
1231
1232 /* Expand a call to __builtin_prefetch. For a target that does not support
1233 data prefetch, evaluate the memory address argument in case it has side
1234 effects. */
1235
1236 static void
1237 expand_builtin_prefetch (tree exp)
1238 {
1239 tree arg0, arg1, arg2;
1240 int nargs;
1241 rtx op0, op1, op2;
1242
1243 if (!validate_arglist (exp, POINTER_TYPE, 0))
1244 return;
1245
1246 arg0 = CALL_EXPR_ARG (exp, 0);
1247
1248 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1249 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1250 locality). */
1251 nargs = call_expr_nargs (exp);
1252 if (nargs > 1)
1253 arg1 = CALL_EXPR_ARG (exp, 1);
1254 else
1255 arg1 = integer_zero_node;
1256 if (nargs > 2)
1257 arg2 = CALL_EXPR_ARG (exp, 2);
1258 else
1259 arg2 = integer_three_node;
1260
1261 /* Argument 0 is an address. */
1262 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1263
1264 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1265 if (TREE_CODE (arg1) != INTEGER_CST)
1266 {
1267 error ("second argument to %<__builtin_prefetch%> must be a constant");
1268 arg1 = integer_zero_node;
1269 }
1270 op1 = expand_normal (arg1);
1271 /* Argument 1 must be either zero or one. */
1272 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1273 {
1274 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1275 " using zero");
1276 op1 = const0_rtx;
1277 }
1278
1279 /* Argument 2 (locality) must be a compile-time constant int. */
1280 if (TREE_CODE (arg2) != INTEGER_CST)
1281 {
1282 error ("third argument to %<__builtin_prefetch%> must be a constant");
1283 arg2 = integer_zero_node;
1284 }
1285 op2 = expand_normal (arg2);
1286 /* Argument 2 must be 0, 1, 2, or 3. */
1287 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1288 {
1289 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1290 op2 = const0_rtx;
1291 }
1292
1293 #ifdef HAVE_prefetch
1294 if (HAVE_prefetch)
1295 {
1296 struct expand_operand ops[3];
1297
1298 create_address_operand (&ops[0], op0);
1299 create_integer_operand (&ops[1], INTVAL (op1));
1300 create_integer_operand (&ops[2], INTVAL (op2));
1301 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1302 return;
1303 }
1304 #endif
1305
1306 /* Don't do anything with direct references to volatile memory, but
1307 generate code to handle other side effects. */
1308 if (!MEM_P (op0) && side_effects_p (op0))
1309 emit_insn (op0);
1310 }
1311
1312 /* Get a MEM rtx for expression EXP which is the address of an operand
1313 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1314 the maximum length of the block of memory that might be accessed or
1315 NULL if unknown. */
1316
1317 static rtx
1318 get_memory_rtx (tree exp, tree len)
1319 {
1320 tree orig_exp = exp;
1321 rtx addr, mem;
1322
1323 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1324 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1325 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1326 exp = TREE_OPERAND (exp, 0);
1327
1328 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1329 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1330
1331 /* Get an expression we can use to find the attributes to assign to MEM.
1332 First remove any nops. */
1333 while (CONVERT_EXPR_P (exp)
1334 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1335 exp = TREE_OPERAND (exp, 0);
1336
1337 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1338 (as builtin stringops may alias with anything). */
1339 exp = fold_build2 (MEM_REF,
1340 build_array_type (char_type_node,
1341 build_range_type (sizetype,
1342 size_one_node, len)),
1343 exp, build_int_cst (ptr_type_node, 0));
1344
1345 /* If the MEM_REF has no acceptable address, try to get the base object
1346 from the original address we got, and build an all-aliasing
1347 unknown-sized access to that one. */
1348 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1349 set_mem_attributes (mem, exp, 0);
1350 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1351 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1352 0))))
1353 {
1354 exp = build_fold_addr_expr (exp);
1355 exp = fold_build2 (MEM_REF,
1356 build_array_type (char_type_node,
1357 build_range_type (sizetype,
1358 size_zero_node,
1359 NULL)),
1360 exp, build_int_cst (ptr_type_node, 0));
1361 set_mem_attributes (mem, exp, 0);
1362 }
1363 set_mem_alias_set (mem, 0);
1364 return mem;
1365 }
1366 \f
1367 /* Built-in functions to perform an untyped call and return. */
1368
1369 #define apply_args_mode \
1370 (this_target_builtins->x_apply_args_mode)
1371 #define apply_result_mode \
1372 (this_target_builtins->x_apply_result_mode)
1373
1374 /* Return the size required for the block returned by __builtin_apply_args,
1375 and initialize apply_args_mode. */
1376
1377 static int
1378 apply_args_size (void)
1379 {
1380 static int size = -1;
1381 int align;
1382 unsigned int regno;
1383 machine_mode mode;
1384
1385 /* The values computed by this function never change. */
1386 if (size < 0)
1387 {
1388 /* The first value is the incoming arg-pointer. */
1389 size = GET_MODE_SIZE (Pmode);
1390
1391 /* The second value is the structure value address unless this is
1392 passed as an "invisible" first argument. */
1393 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1394 size += GET_MODE_SIZE (Pmode);
1395
1396 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1397 if (FUNCTION_ARG_REGNO_P (regno))
1398 {
1399 mode = targetm.calls.get_raw_arg_mode (regno);
1400
1401 gcc_assert (mode != VOIDmode);
1402
1403 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1404 if (size % align != 0)
1405 size = CEIL (size, align) * align;
1406 size += GET_MODE_SIZE (mode);
1407 apply_args_mode[regno] = mode;
1408 }
1409 else
1410 {
1411 apply_args_mode[regno] = VOIDmode;
1412 }
1413 }
1414 return size;
1415 }
1416
1417 /* Return the size required for the block returned by __builtin_apply,
1418 and initialize apply_result_mode. */
1419
1420 static int
1421 apply_result_size (void)
1422 {
1423 static int size = -1;
1424 int align, regno;
1425 machine_mode mode;
1426
1427 /* The values computed by this function never change. */
1428 if (size < 0)
1429 {
1430 size = 0;
1431
1432 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1433 if (targetm.calls.function_value_regno_p (regno))
1434 {
1435 mode = targetm.calls.get_raw_result_mode (regno);
1436
1437 gcc_assert (mode != VOIDmode);
1438
1439 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1440 if (size % align != 0)
1441 size = CEIL (size, align) * align;
1442 size += GET_MODE_SIZE (mode);
1443 apply_result_mode[regno] = mode;
1444 }
1445 else
1446 apply_result_mode[regno] = VOIDmode;
1447
1448 /* Allow targets that use untyped_call and untyped_return to override
1449 the size so that machine-specific information can be stored here. */
1450 #ifdef APPLY_RESULT_SIZE
1451 size = APPLY_RESULT_SIZE;
1452 #endif
1453 }
1454 return size;
1455 }
1456
1457 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1458 /* Create a vector describing the result block RESULT. If SAVEP is true,
1459 the result block is used to save the values; otherwise it is used to
1460 restore the values. */
1461
1462 static rtx
1463 result_vector (int savep, rtx result)
1464 {
1465 int regno, size, align, nelts;
1466 machine_mode mode;
1467 rtx reg, mem;
1468 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1469
1470 size = nelts = 0;
1471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 if ((mode = apply_result_mode[regno]) != VOIDmode)
1473 {
1474 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1475 if (size % align != 0)
1476 size = CEIL (size, align) * align;
1477 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1478 mem = adjust_address (result, mode, size);
1479 savevec[nelts++] = (savep
1480 ? gen_rtx_SET (mem, reg)
1481 : gen_rtx_SET (reg, mem));
1482 size += GET_MODE_SIZE (mode);
1483 }
1484 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1485 }
1486 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1487
1488 /* Save the state required to perform an untyped call with the same
1489 arguments as were passed to the current function. */
1490
1491 static rtx
1492 expand_builtin_apply_args_1 (void)
1493 {
1494 rtx registers, tem;
1495 int size, align, regno;
1496 machine_mode mode;
1497 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1498
1499 /* Create a block where the arg-pointer, structure value address,
1500 and argument registers can be saved. */
1501 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1502
1503 /* Walk past the arg-pointer and structure value address. */
1504 size = GET_MODE_SIZE (Pmode);
1505 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1506 size += GET_MODE_SIZE (Pmode);
1507
1508 /* Save each register used in calling a function to the block. */
1509 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1510 if ((mode = apply_args_mode[regno]) != VOIDmode)
1511 {
1512 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1513 if (size % align != 0)
1514 size = CEIL (size, align) * align;
1515
1516 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1517
1518 emit_move_insn (adjust_address (registers, mode, size), tem);
1519 size += GET_MODE_SIZE (mode);
1520 }
1521
1522 /* Save the arg pointer to the block. */
1523 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1524 /* We need the pointer as the caller actually passed them to us, not
1525 as we might have pretended they were passed. Make sure it's a valid
1526 operand, as emit_move_insn isn't expected to handle a PLUS. */
1527 if (STACK_GROWS_DOWNWARD)
1528 tem
1529 = force_operand (plus_constant (Pmode, tem,
1530 crtl->args.pretend_args_size),
1531 NULL_RTX);
1532 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1533
1534 size = GET_MODE_SIZE (Pmode);
1535
1536 /* Save the structure value address unless this is passed as an
1537 "invisible" first argument. */
1538 if (struct_incoming_value)
1539 {
1540 emit_move_insn (adjust_address (registers, Pmode, size),
1541 copy_to_reg (struct_incoming_value));
1542 size += GET_MODE_SIZE (Pmode);
1543 }
1544
1545 /* Return the address of the block. */
1546 return copy_addr_to_reg (XEXP (registers, 0));
1547 }
1548
1549 /* __builtin_apply_args returns block of memory allocated on
1550 the stack into which is stored the arg pointer, structure
1551 value address, static chain, and all the registers that might
1552 possibly be used in performing a function call. The code is
1553 moved to the start of the function so the incoming values are
1554 saved. */
1555
1556 static rtx
1557 expand_builtin_apply_args (void)
1558 {
1559 /* Don't do __builtin_apply_args more than once in a function.
1560 Save the result of the first call and reuse it. */
1561 if (apply_args_value != 0)
1562 return apply_args_value;
1563 {
1564 /* When this function is called, it means that registers must be
1565 saved on entry to this function. So we migrate the
1566 call to the first insn of this function. */
1567 rtx temp;
1568 rtx seq;
1569
1570 start_sequence ();
1571 temp = expand_builtin_apply_args_1 ();
1572 seq = get_insns ();
1573 end_sequence ();
1574
1575 apply_args_value = temp;
1576
1577 /* Put the insns after the NOTE that starts the function.
1578 If this is inside a start_sequence, make the outer-level insn
1579 chain current, so the code is placed at the start of the
1580 function. If internal_arg_pointer is a non-virtual pseudo,
1581 it needs to be placed after the function that initializes
1582 that pseudo. */
1583 push_topmost_sequence ();
1584 if (REG_P (crtl->args.internal_arg_pointer)
1585 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1586 emit_insn_before (seq, parm_birth_insn);
1587 else
1588 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1589 pop_topmost_sequence ();
1590 return temp;
1591 }
1592 }
1593
1594 /* Perform an untyped call and save the state required to perform an
1595 untyped return of whatever value was returned by the given function. */
1596
1597 static rtx
1598 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1599 {
1600 int size, align, regno;
1601 machine_mode mode;
1602 rtx incoming_args, result, reg, dest, src;
1603 rtx_call_insn *call_insn;
1604 rtx old_stack_level = 0;
1605 rtx call_fusage = 0;
1606 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1607
1608 arguments = convert_memory_address (Pmode, arguments);
1609
1610 /* Create a block where the return registers can be saved. */
1611 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1612
1613 /* Fetch the arg pointer from the ARGUMENTS block. */
1614 incoming_args = gen_reg_rtx (Pmode);
1615 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1616 if (!STACK_GROWS_DOWNWARD)
1617 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1618 incoming_args, 0, OPTAB_LIB_WIDEN);
1619
1620 /* Push a new argument block and copy the arguments. Do not allow
1621 the (potential) memcpy call below to interfere with our stack
1622 manipulations. */
1623 do_pending_stack_adjust ();
1624 NO_DEFER_POP;
1625
1626 /* Save the stack with nonlocal if available. */
1627 #ifdef HAVE_save_stack_nonlocal
1628 if (HAVE_save_stack_nonlocal)
1629 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1630 else
1631 #endif
1632 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1633
1634 /* Allocate a block of memory onto the stack and copy the memory
1635 arguments to the outgoing arguments address. We can pass TRUE
1636 as the 4th argument because we just saved the stack pointer
1637 and will restore it right after the call. */
1638 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1639
1640 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1641 may have already set current_function_calls_alloca to true.
1642 current_function_calls_alloca won't be set if argsize is zero,
1643 so we have to guarantee need_drap is true here. */
1644 if (SUPPORTS_STACK_ALIGNMENT)
1645 crtl->need_drap = true;
1646
1647 dest = virtual_outgoing_args_rtx;
1648 if (!STACK_GROWS_DOWNWARD)
1649 {
1650 if (CONST_INT_P (argsize))
1651 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1652 else
1653 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1654 }
1655 dest = gen_rtx_MEM (BLKmode, dest);
1656 set_mem_align (dest, PARM_BOUNDARY);
1657 src = gen_rtx_MEM (BLKmode, incoming_args);
1658 set_mem_align (src, PARM_BOUNDARY);
1659 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1660
1661 /* Refer to the argument block. */
1662 apply_args_size ();
1663 arguments = gen_rtx_MEM (BLKmode, arguments);
1664 set_mem_align (arguments, PARM_BOUNDARY);
1665
1666 /* Walk past the arg-pointer and structure value address. */
1667 size = GET_MODE_SIZE (Pmode);
1668 if (struct_value)
1669 size += GET_MODE_SIZE (Pmode);
1670
1671 /* Restore each of the registers previously saved. Make USE insns
1672 for each of these registers for use in making the call. */
1673 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1674 if ((mode = apply_args_mode[regno]) != VOIDmode)
1675 {
1676 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1677 if (size % align != 0)
1678 size = CEIL (size, align) * align;
1679 reg = gen_rtx_REG (mode, regno);
1680 emit_move_insn (reg, adjust_address (arguments, mode, size));
1681 use_reg (&call_fusage, reg);
1682 size += GET_MODE_SIZE (mode);
1683 }
1684
1685 /* Restore the structure value address unless this is passed as an
1686 "invisible" first argument. */
1687 size = GET_MODE_SIZE (Pmode);
1688 if (struct_value)
1689 {
1690 rtx value = gen_reg_rtx (Pmode);
1691 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1692 emit_move_insn (struct_value, value);
1693 if (REG_P (struct_value))
1694 use_reg (&call_fusage, struct_value);
1695 size += GET_MODE_SIZE (Pmode);
1696 }
1697
1698 /* All arguments and registers used for the call are set up by now! */
1699 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1700
1701 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1702 and we don't want to load it into a register as an optimization,
1703 because prepare_call_address already did it if it should be done. */
1704 if (GET_CODE (function) != SYMBOL_REF)
1705 function = memory_address (FUNCTION_MODE, function);
1706
1707 /* Generate the actual call instruction and save the return value. */
1708 #ifdef HAVE_untyped_call
1709 if (HAVE_untyped_call)
1710 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1711 result, result_vector (1, result)));
1712 else
1713 #endif
1714 #ifdef HAVE_call_value
1715 if (HAVE_call_value)
1716 {
1717 rtx valreg = 0;
1718
1719 /* Locate the unique return register. It is not possible to
1720 express a call that sets more than one return register using
1721 call_value; use untyped_call for that. In fact, untyped_call
1722 only needs to save the return registers in the given block. */
1723 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1724 if ((mode = apply_result_mode[regno]) != VOIDmode)
1725 {
1726 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1727
1728 valreg = gen_rtx_REG (mode, regno);
1729 }
1730
1731 emit_call_insn (GEN_CALL_VALUE (valreg,
1732 gen_rtx_MEM (FUNCTION_MODE, function),
1733 const0_rtx, NULL_RTX, const0_rtx));
1734
1735 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1736 }
1737 else
1738 #endif
1739 gcc_unreachable ();
1740
1741 /* Find the CALL insn we just emitted, and attach the register usage
1742 information. */
1743 call_insn = last_call_insn ();
1744 add_function_usage_to (call_insn, call_fusage);
1745
1746 /* Restore the stack. */
1747 #ifdef HAVE_save_stack_nonlocal
1748 if (HAVE_save_stack_nonlocal)
1749 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1750 else
1751 #endif
1752 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1753 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1754
1755 OK_DEFER_POP;
1756
1757 /* Return the address of the result block. */
1758 result = copy_addr_to_reg (XEXP (result, 0));
1759 return convert_memory_address (ptr_mode, result);
1760 }
1761
1762 /* Perform an untyped return. */
1763
1764 static void
1765 expand_builtin_return (rtx result)
1766 {
1767 int size, align, regno;
1768 machine_mode mode;
1769 rtx reg;
1770 rtx_insn *call_fusage = 0;
1771
1772 result = convert_memory_address (Pmode, result);
1773
1774 apply_result_size ();
1775 result = gen_rtx_MEM (BLKmode, result);
1776
1777 #ifdef HAVE_untyped_return
1778 if (HAVE_untyped_return)
1779 {
1780 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1781 emit_barrier ();
1782 return;
1783 }
1784 #endif
1785
1786 /* Restore the return value and note that each value is used. */
1787 size = 0;
1788 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1789 if ((mode = apply_result_mode[regno]) != VOIDmode)
1790 {
1791 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1792 if (size % align != 0)
1793 size = CEIL (size, align) * align;
1794 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1795 emit_move_insn (reg, adjust_address (result, mode, size));
1796
1797 push_to_sequence (call_fusage);
1798 emit_use (reg);
1799 call_fusage = get_insns ();
1800 end_sequence ();
1801 size += GET_MODE_SIZE (mode);
1802 }
1803
1804 /* Put the USE insns before the return. */
1805 emit_insn (call_fusage);
1806
1807 /* Return whatever values was restored by jumping directly to the end
1808 of the function. */
1809 expand_naked_return ();
1810 }
1811
1812 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1813
1814 static enum type_class
1815 type_to_class (tree type)
1816 {
1817 switch (TREE_CODE (type))
1818 {
1819 case VOID_TYPE: return void_type_class;
1820 case INTEGER_TYPE: return integer_type_class;
1821 case ENUMERAL_TYPE: return enumeral_type_class;
1822 case BOOLEAN_TYPE: return boolean_type_class;
1823 case POINTER_TYPE: return pointer_type_class;
1824 case REFERENCE_TYPE: return reference_type_class;
1825 case OFFSET_TYPE: return offset_type_class;
1826 case REAL_TYPE: return real_type_class;
1827 case COMPLEX_TYPE: return complex_type_class;
1828 case FUNCTION_TYPE: return function_type_class;
1829 case METHOD_TYPE: return method_type_class;
1830 case RECORD_TYPE: return record_type_class;
1831 case UNION_TYPE:
1832 case QUAL_UNION_TYPE: return union_type_class;
1833 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1834 ? string_type_class : array_type_class);
1835 case LANG_TYPE: return lang_type_class;
1836 default: return no_type_class;
1837 }
1838 }
1839
1840 /* Expand a call EXP to __builtin_classify_type. */
1841
1842 static rtx
1843 expand_builtin_classify_type (tree exp)
1844 {
1845 if (call_expr_nargs (exp))
1846 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1847 return GEN_INT (no_type_class);
1848 }
1849
1850 /* This helper macro, meant to be used in mathfn_built_in below,
1851 determines which among a set of three builtin math functions is
1852 appropriate for a given type mode. The `F' and `L' cases are
1853 automatically generated from the `double' case. */
1854 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1855 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1856 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1857 fcodel = BUILT_IN_MATHFN##L ; break;
1858 /* Similar to above, but appends _R after any F/L suffix. */
1859 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1860 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1861 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1862 fcodel = BUILT_IN_MATHFN##L_R ; break;
1863
1864 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1865 if available. If IMPLICIT is true use the implicit builtin declaration,
1866 otherwise use the explicit declaration. If we can't do the conversion,
1867 return zero. */
1868
1869 static tree
1870 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1871 {
1872 enum built_in_function fcode, fcodef, fcodel, fcode2;
1873
1874 switch (fn)
1875 {
1876 CASE_MATHFN (BUILT_IN_ACOS)
1877 CASE_MATHFN (BUILT_IN_ACOSH)
1878 CASE_MATHFN (BUILT_IN_ASIN)
1879 CASE_MATHFN (BUILT_IN_ASINH)
1880 CASE_MATHFN (BUILT_IN_ATAN)
1881 CASE_MATHFN (BUILT_IN_ATAN2)
1882 CASE_MATHFN (BUILT_IN_ATANH)
1883 CASE_MATHFN (BUILT_IN_CBRT)
1884 CASE_MATHFN (BUILT_IN_CEIL)
1885 CASE_MATHFN (BUILT_IN_CEXPI)
1886 CASE_MATHFN (BUILT_IN_COPYSIGN)
1887 CASE_MATHFN (BUILT_IN_COS)
1888 CASE_MATHFN (BUILT_IN_COSH)
1889 CASE_MATHFN (BUILT_IN_DREM)
1890 CASE_MATHFN (BUILT_IN_ERF)
1891 CASE_MATHFN (BUILT_IN_ERFC)
1892 CASE_MATHFN (BUILT_IN_EXP)
1893 CASE_MATHFN (BUILT_IN_EXP10)
1894 CASE_MATHFN (BUILT_IN_EXP2)
1895 CASE_MATHFN (BUILT_IN_EXPM1)
1896 CASE_MATHFN (BUILT_IN_FABS)
1897 CASE_MATHFN (BUILT_IN_FDIM)
1898 CASE_MATHFN (BUILT_IN_FLOOR)
1899 CASE_MATHFN (BUILT_IN_FMA)
1900 CASE_MATHFN (BUILT_IN_FMAX)
1901 CASE_MATHFN (BUILT_IN_FMIN)
1902 CASE_MATHFN (BUILT_IN_FMOD)
1903 CASE_MATHFN (BUILT_IN_FREXP)
1904 CASE_MATHFN (BUILT_IN_GAMMA)
1905 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1906 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1907 CASE_MATHFN (BUILT_IN_HYPOT)
1908 CASE_MATHFN (BUILT_IN_ILOGB)
1909 CASE_MATHFN (BUILT_IN_ICEIL)
1910 CASE_MATHFN (BUILT_IN_IFLOOR)
1911 CASE_MATHFN (BUILT_IN_INF)
1912 CASE_MATHFN (BUILT_IN_IRINT)
1913 CASE_MATHFN (BUILT_IN_IROUND)
1914 CASE_MATHFN (BUILT_IN_ISINF)
1915 CASE_MATHFN (BUILT_IN_J0)
1916 CASE_MATHFN (BUILT_IN_J1)
1917 CASE_MATHFN (BUILT_IN_JN)
1918 CASE_MATHFN (BUILT_IN_LCEIL)
1919 CASE_MATHFN (BUILT_IN_LDEXP)
1920 CASE_MATHFN (BUILT_IN_LFLOOR)
1921 CASE_MATHFN (BUILT_IN_LGAMMA)
1922 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1923 CASE_MATHFN (BUILT_IN_LLCEIL)
1924 CASE_MATHFN (BUILT_IN_LLFLOOR)
1925 CASE_MATHFN (BUILT_IN_LLRINT)
1926 CASE_MATHFN (BUILT_IN_LLROUND)
1927 CASE_MATHFN (BUILT_IN_LOG)
1928 CASE_MATHFN (BUILT_IN_LOG10)
1929 CASE_MATHFN (BUILT_IN_LOG1P)
1930 CASE_MATHFN (BUILT_IN_LOG2)
1931 CASE_MATHFN (BUILT_IN_LOGB)
1932 CASE_MATHFN (BUILT_IN_LRINT)
1933 CASE_MATHFN (BUILT_IN_LROUND)
1934 CASE_MATHFN (BUILT_IN_MODF)
1935 CASE_MATHFN (BUILT_IN_NAN)
1936 CASE_MATHFN (BUILT_IN_NANS)
1937 CASE_MATHFN (BUILT_IN_NEARBYINT)
1938 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1939 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1940 CASE_MATHFN (BUILT_IN_POW)
1941 CASE_MATHFN (BUILT_IN_POWI)
1942 CASE_MATHFN (BUILT_IN_POW10)
1943 CASE_MATHFN (BUILT_IN_REMAINDER)
1944 CASE_MATHFN (BUILT_IN_REMQUO)
1945 CASE_MATHFN (BUILT_IN_RINT)
1946 CASE_MATHFN (BUILT_IN_ROUND)
1947 CASE_MATHFN (BUILT_IN_SCALB)
1948 CASE_MATHFN (BUILT_IN_SCALBLN)
1949 CASE_MATHFN (BUILT_IN_SCALBN)
1950 CASE_MATHFN (BUILT_IN_SIGNBIT)
1951 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1952 CASE_MATHFN (BUILT_IN_SIN)
1953 CASE_MATHFN (BUILT_IN_SINCOS)
1954 CASE_MATHFN (BUILT_IN_SINH)
1955 CASE_MATHFN (BUILT_IN_SQRT)
1956 CASE_MATHFN (BUILT_IN_TAN)
1957 CASE_MATHFN (BUILT_IN_TANH)
1958 CASE_MATHFN (BUILT_IN_TGAMMA)
1959 CASE_MATHFN (BUILT_IN_TRUNC)
1960 CASE_MATHFN (BUILT_IN_Y0)
1961 CASE_MATHFN (BUILT_IN_Y1)
1962 CASE_MATHFN (BUILT_IN_YN)
1963
1964 default:
1965 return NULL_TREE;
1966 }
1967
1968 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1969 fcode2 = fcode;
1970 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1971 fcode2 = fcodef;
1972 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1973 fcode2 = fcodel;
1974 else
1975 return NULL_TREE;
1976
1977 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1978 return NULL_TREE;
1979
1980 return builtin_decl_explicit (fcode2);
1981 }
1982
1983 /* Like mathfn_built_in_1(), but always use the implicit array. */
1984
1985 tree
1986 mathfn_built_in (tree type, enum built_in_function fn)
1987 {
1988 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1989 }
1990
1991 /* If errno must be maintained, expand the RTL to check if the result,
1992 TARGET, of a built-in function call, EXP, is NaN, and if so set
1993 errno to EDOM. */
1994
1995 static void
1996 expand_errno_check (tree exp, rtx target)
1997 {
1998 rtx_code_label *lab = gen_label_rtx ();
1999
2000 /* Test the result; if it is NaN, set errno=EDOM because
2001 the argument was not in the domain. */
2002 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2003 NULL_RTX, NULL, lab,
2004 /* The jump is very likely. */
2005 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2006
2007 #ifdef TARGET_EDOM
2008 /* If this built-in doesn't throw an exception, set errno directly. */
2009 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2010 {
2011 #ifdef GEN_ERRNO_RTX
2012 rtx errno_rtx = GEN_ERRNO_RTX;
2013 #else
2014 rtx errno_rtx
2015 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2016 #endif
2017 emit_move_insn (errno_rtx,
2018 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2019 emit_label (lab);
2020 return;
2021 }
2022 #endif
2023
2024 /* Make sure the library call isn't expanded as a tail call. */
2025 CALL_EXPR_TAILCALL (exp) = 0;
2026
2027 /* We can't set errno=EDOM directly; let the library call do it.
2028 Pop the arguments right away in case the call gets deleted. */
2029 NO_DEFER_POP;
2030 expand_call (exp, target, 0);
2031 OK_DEFER_POP;
2032 emit_label (lab);
2033 }
2034
2035 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding
2037 the function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's operands. */
2040
2041 static rtx
2042 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2043 {
2044 optab builtin_optab;
2045 rtx op0;
2046 rtx_insn *insns;
2047 tree fndecl = get_callee_fndecl (exp);
2048 machine_mode mode;
2049 bool errno_set = false;
2050 bool try_widening = false;
2051 tree arg;
2052
2053 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2054 return NULL_RTX;
2055
2056 arg = CALL_EXPR_ARG (exp, 0);
2057
2058 switch (DECL_FUNCTION_CODE (fndecl))
2059 {
2060 CASE_FLT_FN (BUILT_IN_SQRT):
2061 errno_set = ! tree_expr_nonnegative_p (arg);
2062 try_widening = true;
2063 builtin_optab = sqrt_optab;
2064 break;
2065 CASE_FLT_FN (BUILT_IN_EXP):
2066 errno_set = true; builtin_optab = exp_optab; break;
2067 CASE_FLT_FN (BUILT_IN_EXP10):
2068 CASE_FLT_FN (BUILT_IN_POW10):
2069 errno_set = true; builtin_optab = exp10_optab; break;
2070 CASE_FLT_FN (BUILT_IN_EXP2):
2071 errno_set = true; builtin_optab = exp2_optab; break;
2072 CASE_FLT_FN (BUILT_IN_EXPM1):
2073 errno_set = true; builtin_optab = expm1_optab; break;
2074 CASE_FLT_FN (BUILT_IN_LOGB):
2075 errno_set = true; builtin_optab = logb_optab; break;
2076 CASE_FLT_FN (BUILT_IN_LOG):
2077 errno_set = true; builtin_optab = log_optab; break;
2078 CASE_FLT_FN (BUILT_IN_LOG10):
2079 errno_set = true; builtin_optab = log10_optab; break;
2080 CASE_FLT_FN (BUILT_IN_LOG2):
2081 errno_set = true; builtin_optab = log2_optab; break;
2082 CASE_FLT_FN (BUILT_IN_LOG1P):
2083 errno_set = true; builtin_optab = log1p_optab; break;
2084 CASE_FLT_FN (BUILT_IN_ASIN):
2085 builtin_optab = asin_optab; break;
2086 CASE_FLT_FN (BUILT_IN_ACOS):
2087 builtin_optab = acos_optab; break;
2088 CASE_FLT_FN (BUILT_IN_TAN):
2089 builtin_optab = tan_optab; break;
2090 CASE_FLT_FN (BUILT_IN_ATAN):
2091 builtin_optab = atan_optab; break;
2092 CASE_FLT_FN (BUILT_IN_FLOOR):
2093 builtin_optab = floor_optab; break;
2094 CASE_FLT_FN (BUILT_IN_CEIL):
2095 builtin_optab = ceil_optab; break;
2096 CASE_FLT_FN (BUILT_IN_TRUNC):
2097 builtin_optab = btrunc_optab; break;
2098 CASE_FLT_FN (BUILT_IN_ROUND):
2099 builtin_optab = round_optab; break;
2100 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2101 builtin_optab = nearbyint_optab;
2102 if (flag_trapping_math)
2103 break;
2104 /* Else fallthrough and expand as rint. */
2105 CASE_FLT_FN (BUILT_IN_RINT):
2106 builtin_optab = rint_optab; break;
2107 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2108 builtin_optab = significand_optab; break;
2109 default:
2110 gcc_unreachable ();
2111 }
2112
2113 /* Make a suitable register to place result in. */
2114 mode = TYPE_MODE (TREE_TYPE (exp));
2115
2116 if (! flag_errno_math || ! HONOR_NANS (mode))
2117 errno_set = false;
2118
2119 /* Before working hard, check whether the instruction is available, but try
2120 to widen the mode for specific operations. */
2121 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2122 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2123 && (!errno_set || !optimize_insn_for_size_p ()))
2124 {
2125 rtx result = gen_reg_rtx (mode);
2126
2127 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2128 need to expand the argument again. This way, we will not perform
2129 side-effects more the once. */
2130 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2131
2132 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2133
2134 start_sequence ();
2135
2136 /* Compute into RESULT.
2137 Set RESULT to wherever the result comes back. */
2138 result = expand_unop (mode, builtin_optab, op0, result, 0);
2139
2140 if (result != 0)
2141 {
2142 if (errno_set)
2143 expand_errno_check (exp, result);
2144
2145 /* Output the entire sequence. */
2146 insns = get_insns ();
2147 end_sequence ();
2148 emit_insn (insns);
2149 return result;
2150 }
2151
2152 /* If we were unable to expand via the builtin, stop the sequence
2153 (without outputting the insns) and call to the library function
2154 with the stabilized argument list. */
2155 end_sequence ();
2156 }
2157
2158 return expand_call (exp, target, target == const0_rtx);
2159 }
2160
2161 /* Expand a call to the builtin binary math functions (pow and atan2).
2162 Return NULL_RTX if a normal call should be emitted rather than expanding the
2163 function in-line. EXP is the expression that is a call to the builtin
2164 function; if convenient, the result should be placed in TARGET.
2165 SUBTARGET may be used as the target for computing one of EXP's
2166 operands. */
2167
2168 static rtx
2169 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2170 {
2171 optab builtin_optab;
2172 rtx op0, op1, result;
2173 rtx_insn *insns;
2174 int op1_type = REAL_TYPE;
2175 tree fndecl = get_callee_fndecl (exp);
2176 tree arg0, arg1;
2177 machine_mode mode;
2178 bool errno_set = true;
2179
2180 switch (DECL_FUNCTION_CODE (fndecl))
2181 {
2182 CASE_FLT_FN (BUILT_IN_SCALBN):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN):
2184 CASE_FLT_FN (BUILT_IN_LDEXP):
2185 op1_type = INTEGER_TYPE;
2186 default:
2187 break;
2188 }
2189
2190 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2191 return NULL_RTX;
2192
2193 arg0 = CALL_EXPR_ARG (exp, 0);
2194 arg1 = CALL_EXPR_ARG (exp, 1);
2195
2196 switch (DECL_FUNCTION_CODE (fndecl))
2197 {
2198 CASE_FLT_FN (BUILT_IN_POW):
2199 builtin_optab = pow_optab; break;
2200 CASE_FLT_FN (BUILT_IN_ATAN2):
2201 builtin_optab = atan2_optab; break;
2202 CASE_FLT_FN (BUILT_IN_SCALB):
2203 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2204 return 0;
2205 builtin_optab = scalb_optab; break;
2206 CASE_FLT_FN (BUILT_IN_SCALBN):
2207 CASE_FLT_FN (BUILT_IN_SCALBLN):
2208 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2209 return 0;
2210 /* Fall through... */
2211 CASE_FLT_FN (BUILT_IN_LDEXP):
2212 builtin_optab = ldexp_optab; break;
2213 CASE_FLT_FN (BUILT_IN_FMOD):
2214 builtin_optab = fmod_optab; break;
2215 CASE_FLT_FN (BUILT_IN_REMAINDER):
2216 CASE_FLT_FN (BUILT_IN_DREM):
2217 builtin_optab = remainder_optab; break;
2218 default:
2219 gcc_unreachable ();
2220 }
2221
2222 /* Make a suitable register to place result in. */
2223 mode = TYPE_MODE (TREE_TYPE (exp));
2224
2225 /* Before working hard, check whether the instruction is available. */
2226 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2227 return NULL_RTX;
2228
2229 result = gen_reg_rtx (mode);
2230
2231 if (! flag_errno_math || ! HONOR_NANS (mode))
2232 errno_set = false;
2233
2234 if (errno_set && optimize_insn_for_size_p ())
2235 return 0;
2236
2237 /* Always stabilize the argument list. */
2238 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2239 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2240
2241 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2242 op1 = expand_normal (arg1);
2243
2244 start_sequence ();
2245
2246 /* Compute into RESULT.
2247 Set RESULT to wherever the result comes back. */
2248 result = expand_binop (mode, builtin_optab, op0, op1,
2249 result, 0, OPTAB_DIRECT);
2250
2251 /* If we were unable to expand via the builtin, stop the sequence
2252 (without outputting the insns) and call to the library function
2253 with the stabilized argument list. */
2254 if (result == 0)
2255 {
2256 end_sequence ();
2257 return expand_call (exp, target, target == const0_rtx);
2258 }
2259
2260 if (errno_set)
2261 expand_errno_check (exp, result);
2262
2263 /* Output the entire sequence. */
2264 insns = get_insns ();
2265 end_sequence ();
2266 emit_insn (insns);
2267
2268 return result;
2269 }
2270
2271 /* Expand a call to the builtin trinary math functions (fma).
2272 Return NULL_RTX if a normal call should be emitted rather than expanding the
2273 function in-line. EXP is the expression that is a call to the builtin
2274 function; if convenient, the result should be placed in TARGET.
2275 SUBTARGET may be used as the target for computing one of EXP's
2276 operands. */
2277
2278 static rtx
2279 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2280 {
2281 optab builtin_optab;
2282 rtx op0, op1, op2, result;
2283 rtx_insn *insns;
2284 tree fndecl = get_callee_fndecl (exp);
2285 tree arg0, arg1, arg2;
2286 machine_mode mode;
2287
2288 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2289 return NULL_RTX;
2290
2291 arg0 = CALL_EXPR_ARG (exp, 0);
2292 arg1 = CALL_EXPR_ARG (exp, 1);
2293 arg2 = CALL_EXPR_ARG (exp, 2);
2294
2295 switch (DECL_FUNCTION_CODE (fndecl))
2296 {
2297 CASE_FLT_FN (BUILT_IN_FMA):
2298 builtin_optab = fma_optab; break;
2299 default:
2300 gcc_unreachable ();
2301 }
2302
2303 /* Make a suitable register to place result in. */
2304 mode = TYPE_MODE (TREE_TYPE (exp));
2305
2306 /* Before working hard, check whether the instruction is available. */
2307 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2308 return NULL_RTX;
2309
2310 result = gen_reg_rtx (mode);
2311
2312 /* Always stabilize the argument list. */
2313 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2314 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2315 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2316
2317 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2318 op1 = expand_normal (arg1);
2319 op2 = expand_normal (arg2);
2320
2321 start_sequence ();
2322
2323 /* Compute into RESULT.
2324 Set RESULT to wherever the result comes back. */
2325 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2326 result, 0);
2327
2328 /* If we were unable to expand via the builtin, stop the sequence
2329 (without outputting the insns) and call to the library function
2330 with the stabilized argument list. */
2331 if (result == 0)
2332 {
2333 end_sequence ();
2334 return expand_call (exp, target, target == const0_rtx);
2335 }
2336
2337 /* Output the entire sequence. */
2338 insns = get_insns ();
2339 end_sequence ();
2340 emit_insn (insns);
2341
2342 return result;
2343 }
2344
2345 /* Expand a call to the builtin sin and cos math functions.
2346 Return NULL_RTX if a normal call should be emitted rather than expanding the
2347 function in-line. EXP is the expression that is a call to the builtin
2348 function; if convenient, the result should be placed in TARGET.
2349 SUBTARGET may be used as the target for computing one of EXP's
2350 operands. */
2351
2352 static rtx
2353 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2354 {
2355 optab builtin_optab;
2356 rtx op0;
2357 rtx_insn *insns;
2358 tree fndecl = get_callee_fndecl (exp);
2359 machine_mode mode;
2360 tree arg;
2361
2362 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2363 return NULL_RTX;
2364
2365 arg = CALL_EXPR_ARG (exp, 0);
2366
2367 switch (DECL_FUNCTION_CODE (fndecl))
2368 {
2369 CASE_FLT_FN (BUILT_IN_SIN):
2370 CASE_FLT_FN (BUILT_IN_COS):
2371 builtin_optab = sincos_optab; break;
2372 default:
2373 gcc_unreachable ();
2374 }
2375
2376 /* Make a suitable register to place result in. */
2377 mode = TYPE_MODE (TREE_TYPE (exp));
2378
2379 /* Check if sincos insn is available, otherwise fallback
2380 to sin or cos insn. */
2381 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2382 switch (DECL_FUNCTION_CODE (fndecl))
2383 {
2384 CASE_FLT_FN (BUILT_IN_SIN):
2385 builtin_optab = sin_optab; break;
2386 CASE_FLT_FN (BUILT_IN_COS):
2387 builtin_optab = cos_optab; break;
2388 default:
2389 gcc_unreachable ();
2390 }
2391
2392 /* Before working hard, check whether the instruction is available. */
2393 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2394 {
2395 rtx result = gen_reg_rtx (mode);
2396
2397 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2398 need to expand the argument again. This way, we will not perform
2399 side-effects more the once. */
2400 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2401
2402 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2403
2404 start_sequence ();
2405
2406 /* Compute into RESULT.
2407 Set RESULT to wherever the result comes back. */
2408 if (builtin_optab == sincos_optab)
2409 {
2410 int ok;
2411
2412 switch (DECL_FUNCTION_CODE (fndecl))
2413 {
2414 CASE_FLT_FN (BUILT_IN_SIN):
2415 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2416 break;
2417 CASE_FLT_FN (BUILT_IN_COS):
2418 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2419 break;
2420 default:
2421 gcc_unreachable ();
2422 }
2423 gcc_assert (ok);
2424 }
2425 else
2426 result = expand_unop (mode, builtin_optab, op0, result, 0);
2427
2428 if (result != 0)
2429 {
2430 /* Output the entire sequence. */
2431 insns = get_insns ();
2432 end_sequence ();
2433 emit_insn (insns);
2434 return result;
2435 }
2436
2437 /* If we were unable to expand via the builtin, stop the sequence
2438 (without outputting the insns) and call to the library function
2439 with the stabilized argument list. */
2440 end_sequence ();
2441 }
2442
2443 return expand_call (exp, target, target == const0_rtx);
2444 }
2445
2446 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2447 return an RTL instruction code that implements the functionality.
2448 If that isn't possible or available return CODE_FOR_nothing. */
2449
2450 static enum insn_code
2451 interclass_mathfn_icode (tree arg, tree fndecl)
2452 {
2453 bool errno_set = false;
2454 optab builtin_optab = unknown_optab;
2455 machine_mode mode;
2456
2457 switch (DECL_FUNCTION_CODE (fndecl))
2458 {
2459 CASE_FLT_FN (BUILT_IN_ILOGB):
2460 errno_set = true; builtin_optab = ilogb_optab; break;
2461 CASE_FLT_FN (BUILT_IN_ISINF):
2462 builtin_optab = isinf_optab; break;
2463 case BUILT_IN_ISNORMAL:
2464 case BUILT_IN_ISFINITE:
2465 CASE_FLT_FN (BUILT_IN_FINITE):
2466 case BUILT_IN_FINITED32:
2467 case BUILT_IN_FINITED64:
2468 case BUILT_IN_FINITED128:
2469 case BUILT_IN_ISINFD32:
2470 case BUILT_IN_ISINFD64:
2471 case BUILT_IN_ISINFD128:
2472 /* These builtins have no optabs (yet). */
2473 break;
2474 default:
2475 gcc_unreachable ();
2476 }
2477
2478 /* There's no easy way to detect the case we need to set EDOM. */
2479 if (flag_errno_math && errno_set)
2480 return CODE_FOR_nothing;
2481
2482 /* Optab mode depends on the mode of the input argument. */
2483 mode = TYPE_MODE (TREE_TYPE (arg));
2484
2485 if (builtin_optab)
2486 return optab_handler (builtin_optab, mode);
2487 return CODE_FOR_nothing;
2488 }
2489
2490 /* Expand a call to one of the builtin math functions that operate on
2491 floating point argument and output an integer result (ilogb, isinf,
2492 isnan, etc).
2493 Return 0 if a normal call should be emitted rather than expanding the
2494 function in-line. EXP is the expression that is a call to the builtin
2495 function; if convenient, the result should be placed in TARGET. */
2496
2497 static rtx
2498 expand_builtin_interclass_mathfn (tree exp, rtx target)
2499 {
2500 enum insn_code icode = CODE_FOR_nothing;
2501 rtx op0;
2502 tree fndecl = get_callee_fndecl (exp);
2503 machine_mode mode;
2504 tree arg;
2505
2506 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2507 return NULL_RTX;
2508
2509 arg = CALL_EXPR_ARG (exp, 0);
2510 icode = interclass_mathfn_icode (arg, fndecl);
2511 mode = TYPE_MODE (TREE_TYPE (arg));
2512
2513 if (icode != CODE_FOR_nothing)
2514 {
2515 struct expand_operand ops[1];
2516 rtx_insn *last = get_last_insn ();
2517 tree orig_arg = arg;
2518
2519 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2520 need to expand the argument again. This way, we will not perform
2521 side-effects more the once. */
2522 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2523
2524 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2525
2526 if (mode != GET_MODE (op0))
2527 op0 = convert_to_mode (mode, op0, 0);
2528
2529 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2530 if (maybe_legitimize_operands (icode, 0, 1, ops)
2531 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2532 return ops[0].value;
2533
2534 delete_insns_since (last);
2535 CALL_EXPR_ARG (exp, 0) = orig_arg;
2536 }
2537
2538 return NULL_RTX;
2539 }
2540
2541 /* Expand a call to the builtin sincos math function.
2542 Return NULL_RTX if a normal call should be emitted rather than expanding the
2543 function in-line. EXP is the expression that is a call to the builtin
2544 function. */
2545
2546 static rtx
2547 expand_builtin_sincos (tree exp)
2548 {
2549 rtx op0, op1, op2, target1, target2;
2550 machine_mode mode;
2551 tree arg, sinp, cosp;
2552 int result;
2553 location_t loc = EXPR_LOCATION (exp);
2554 tree alias_type, alias_off;
2555
2556 if (!validate_arglist (exp, REAL_TYPE,
2557 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2558 return NULL_RTX;
2559
2560 arg = CALL_EXPR_ARG (exp, 0);
2561 sinp = CALL_EXPR_ARG (exp, 1);
2562 cosp = CALL_EXPR_ARG (exp, 2);
2563
2564 /* Make a suitable register to place result in. */
2565 mode = TYPE_MODE (TREE_TYPE (arg));
2566
2567 /* Check if sincos insn is available, otherwise emit the call. */
2568 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2569 return NULL_RTX;
2570
2571 target1 = gen_reg_rtx (mode);
2572 target2 = gen_reg_rtx (mode);
2573
2574 op0 = expand_normal (arg);
2575 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2576 alias_off = build_int_cst (alias_type, 0);
2577 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2578 sinp, alias_off));
2579 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2580 cosp, alias_off));
2581
2582 /* Compute into target1 and target2.
2583 Set TARGET to wherever the result comes back. */
2584 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2585 gcc_assert (result);
2586
2587 /* Move target1 and target2 to the memory locations indicated
2588 by op1 and op2. */
2589 emit_move_insn (op1, target1);
2590 emit_move_insn (op2, target2);
2591
2592 return const0_rtx;
2593 }
2594
2595 /* Expand a call to the internal cexpi builtin to the sincos math function.
2596 EXP is the expression that is a call to the builtin function; if convenient,
2597 the result should be placed in TARGET. */
2598
2599 static rtx
2600 expand_builtin_cexpi (tree exp, rtx target)
2601 {
2602 tree fndecl = get_callee_fndecl (exp);
2603 tree arg, type;
2604 machine_mode mode;
2605 rtx op0, op1, op2;
2606 location_t loc = EXPR_LOCATION (exp);
2607
2608 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2609 return NULL_RTX;
2610
2611 arg = CALL_EXPR_ARG (exp, 0);
2612 type = TREE_TYPE (arg);
2613 mode = TYPE_MODE (TREE_TYPE (arg));
2614
2615 /* Try expanding via a sincos optab, fall back to emitting a libcall
2616 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2617 is only generated from sincos, cexp or if we have either of them. */
2618 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2619 {
2620 op1 = gen_reg_rtx (mode);
2621 op2 = gen_reg_rtx (mode);
2622
2623 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2624
2625 /* Compute into op1 and op2. */
2626 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2627 }
2628 else if (targetm.libc_has_function (function_sincos))
2629 {
2630 tree call, fn = NULL_TREE;
2631 tree top1, top2;
2632 rtx op1a, op2a;
2633
2634 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2635 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2637 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2639 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2640 else
2641 gcc_unreachable ();
2642
2643 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2644 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2645 op1a = copy_addr_to_reg (XEXP (op1, 0));
2646 op2a = copy_addr_to_reg (XEXP (op2, 0));
2647 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2648 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2649
2650 /* Make sure not to fold the sincos call again. */
2651 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2652 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2653 call, 3, arg, top1, top2));
2654 }
2655 else
2656 {
2657 tree call, fn = NULL_TREE, narg;
2658 tree ctype = build_complex_type (type);
2659
2660 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2661 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2663 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2665 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2666 else
2667 gcc_unreachable ();
2668
2669 /* If we don't have a decl for cexp create one. This is the
2670 friendliest fallback if the user calls __builtin_cexpi
2671 without full target C99 function support. */
2672 if (fn == NULL_TREE)
2673 {
2674 tree fntype;
2675 const char *name = NULL;
2676
2677 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2678 name = "cexpf";
2679 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2680 name = "cexp";
2681 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2682 name = "cexpl";
2683
2684 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2685 fn = build_fn_decl (name, fntype);
2686 }
2687
2688 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2689 build_real (type, dconst0), arg);
2690
2691 /* Make sure not to fold the cexp call again. */
2692 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2693 return expand_expr (build_call_nary (ctype, call, 1, narg),
2694 target, VOIDmode, EXPAND_NORMAL);
2695 }
2696
2697 /* Now build the proper return type. */
2698 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2699 make_tree (TREE_TYPE (arg), op2),
2700 make_tree (TREE_TYPE (arg), op1)),
2701 target, VOIDmode, EXPAND_NORMAL);
2702 }
2703
2704 /* Conveniently construct a function call expression. FNDECL names the
2705 function to be called, N is the number of arguments, and the "..."
2706 parameters are the argument expressions. Unlike build_call_exr
2707 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2708
2709 static tree
2710 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2711 {
2712 va_list ap;
2713 tree fntype = TREE_TYPE (fndecl);
2714 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2715
2716 va_start (ap, n);
2717 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2718 va_end (ap);
2719 SET_EXPR_LOCATION (fn, loc);
2720 return fn;
2721 }
2722
2723 /* Expand a call to one of the builtin rounding functions gcc defines
2724 as an extension (lfloor and lceil). As these are gcc extensions we
2725 do not need to worry about setting errno to EDOM.
2726 If expanding via optab fails, lower expression to (int)(floor(x)).
2727 EXP is the expression that is a call to the builtin function;
2728 if convenient, the result should be placed in TARGET. */
2729
2730 static rtx
2731 expand_builtin_int_roundingfn (tree exp, rtx target)
2732 {
2733 convert_optab builtin_optab;
2734 rtx op0, tmp;
2735 rtx_insn *insns;
2736 tree fndecl = get_callee_fndecl (exp);
2737 enum built_in_function fallback_fn;
2738 tree fallback_fndecl;
2739 machine_mode mode;
2740 tree arg;
2741
2742 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2743 gcc_unreachable ();
2744
2745 arg = CALL_EXPR_ARG (exp, 0);
2746
2747 switch (DECL_FUNCTION_CODE (fndecl))
2748 {
2749 CASE_FLT_FN (BUILT_IN_ICEIL):
2750 CASE_FLT_FN (BUILT_IN_LCEIL):
2751 CASE_FLT_FN (BUILT_IN_LLCEIL):
2752 builtin_optab = lceil_optab;
2753 fallback_fn = BUILT_IN_CEIL;
2754 break;
2755
2756 CASE_FLT_FN (BUILT_IN_IFLOOR):
2757 CASE_FLT_FN (BUILT_IN_LFLOOR):
2758 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2759 builtin_optab = lfloor_optab;
2760 fallback_fn = BUILT_IN_FLOOR;
2761 break;
2762
2763 default:
2764 gcc_unreachable ();
2765 }
2766
2767 /* Make a suitable register to place result in. */
2768 mode = TYPE_MODE (TREE_TYPE (exp));
2769
2770 target = gen_reg_rtx (mode);
2771
2772 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2773 need to expand the argument again. This way, we will not perform
2774 side-effects more the once. */
2775 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2776
2777 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2778
2779 start_sequence ();
2780
2781 /* Compute into TARGET. */
2782 if (expand_sfix_optab (target, op0, builtin_optab))
2783 {
2784 /* Output the entire sequence. */
2785 insns = get_insns ();
2786 end_sequence ();
2787 emit_insn (insns);
2788 return target;
2789 }
2790
2791 /* If we were unable to expand via the builtin, stop the sequence
2792 (without outputting the insns). */
2793 end_sequence ();
2794
2795 /* Fall back to floating point rounding optab. */
2796 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2797
2798 /* For non-C99 targets we may end up without a fallback fndecl here
2799 if the user called __builtin_lfloor directly. In this case emit
2800 a call to the floor/ceil variants nevertheless. This should result
2801 in the best user experience for not full C99 targets. */
2802 if (fallback_fndecl == NULL_TREE)
2803 {
2804 tree fntype;
2805 const char *name = NULL;
2806
2807 switch (DECL_FUNCTION_CODE (fndecl))
2808 {
2809 case BUILT_IN_ICEIL:
2810 case BUILT_IN_LCEIL:
2811 case BUILT_IN_LLCEIL:
2812 name = "ceil";
2813 break;
2814 case BUILT_IN_ICEILF:
2815 case BUILT_IN_LCEILF:
2816 case BUILT_IN_LLCEILF:
2817 name = "ceilf";
2818 break;
2819 case BUILT_IN_ICEILL:
2820 case BUILT_IN_LCEILL:
2821 case BUILT_IN_LLCEILL:
2822 name = "ceill";
2823 break;
2824 case BUILT_IN_IFLOOR:
2825 case BUILT_IN_LFLOOR:
2826 case BUILT_IN_LLFLOOR:
2827 name = "floor";
2828 break;
2829 case BUILT_IN_IFLOORF:
2830 case BUILT_IN_LFLOORF:
2831 case BUILT_IN_LLFLOORF:
2832 name = "floorf";
2833 break;
2834 case BUILT_IN_IFLOORL:
2835 case BUILT_IN_LFLOORL:
2836 case BUILT_IN_LLFLOORL:
2837 name = "floorl";
2838 break;
2839 default:
2840 gcc_unreachable ();
2841 }
2842
2843 fntype = build_function_type_list (TREE_TYPE (arg),
2844 TREE_TYPE (arg), NULL_TREE);
2845 fallback_fndecl = build_fn_decl (name, fntype);
2846 }
2847
2848 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2849
2850 tmp = expand_normal (exp);
2851 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2852
2853 /* Truncate the result of floating point optab to integer
2854 via expand_fix (). */
2855 target = gen_reg_rtx (mode);
2856 expand_fix (target, tmp, 0);
2857
2858 return target;
2859 }
2860
2861 /* Expand a call to one of the builtin math functions doing integer
2862 conversion (lrint).
2863 Return 0 if a normal call should be emitted rather than expanding the
2864 function in-line. EXP is the expression that is a call to the builtin
2865 function; if convenient, the result should be placed in TARGET. */
2866
2867 static rtx
2868 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2869 {
2870 convert_optab builtin_optab;
2871 rtx op0;
2872 rtx_insn *insns;
2873 tree fndecl = get_callee_fndecl (exp);
2874 tree arg;
2875 machine_mode mode;
2876 enum built_in_function fallback_fn = BUILT_IN_NONE;
2877
2878 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2879 gcc_unreachable ();
2880
2881 arg = CALL_EXPR_ARG (exp, 0);
2882
2883 switch (DECL_FUNCTION_CODE (fndecl))
2884 {
2885 CASE_FLT_FN (BUILT_IN_IRINT):
2886 fallback_fn = BUILT_IN_LRINT;
2887 /* FALLTHRU */
2888 CASE_FLT_FN (BUILT_IN_LRINT):
2889 CASE_FLT_FN (BUILT_IN_LLRINT):
2890 builtin_optab = lrint_optab;
2891 break;
2892
2893 CASE_FLT_FN (BUILT_IN_IROUND):
2894 fallback_fn = BUILT_IN_LROUND;
2895 /* FALLTHRU */
2896 CASE_FLT_FN (BUILT_IN_LROUND):
2897 CASE_FLT_FN (BUILT_IN_LLROUND):
2898 builtin_optab = lround_optab;
2899 break;
2900
2901 default:
2902 gcc_unreachable ();
2903 }
2904
2905 /* There's no easy way to detect the case we need to set EDOM. */
2906 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2907 return NULL_RTX;
2908
2909 /* Make a suitable register to place result in. */
2910 mode = TYPE_MODE (TREE_TYPE (exp));
2911
2912 /* There's no easy way to detect the case we need to set EDOM. */
2913 if (!flag_errno_math)
2914 {
2915 rtx result = gen_reg_rtx (mode);
2916
2917 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2918 need to expand the argument again. This way, we will not perform
2919 side-effects more the once. */
2920 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2921
2922 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2923
2924 start_sequence ();
2925
2926 if (expand_sfix_optab (result, op0, builtin_optab))
2927 {
2928 /* Output the entire sequence. */
2929 insns = get_insns ();
2930 end_sequence ();
2931 emit_insn (insns);
2932 return result;
2933 }
2934
2935 /* If we were unable to expand via the builtin, stop the sequence
2936 (without outputting the insns) and call to the library function
2937 with the stabilized argument list. */
2938 end_sequence ();
2939 }
2940
2941 if (fallback_fn != BUILT_IN_NONE)
2942 {
2943 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2944 targets, (int) round (x) should never be transformed into
2945 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2946 a call to lround in the hope that the target provides at least some
2947 C99 functions. This should result in the best user experience for
2948 not full C99 targets. */
2949 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2950 fallback_fn, 0);
2951
2952 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2953 fallback_fndecl, 1, arg);
2954
2955 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2956 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2957 return convert_to_mode (mode, target, 0);
2958 }
2959
2960 return expand_call (exp, target, target == const0_rtx);
2961 }
2962
2963 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2964 a normal call should be emitted rather than expanding the function
2965 in-line. EXP is the expression that is a call to the builtin
2966 function; if convenient, the result should be placed in TARGET. */
2967
2968 static rtx
2969 expand_builtin_powi (tree exp, rtx target)
2970 {
2971 tree arg0, arg1;
2972 rtx op0, op1;
2973 machine_mode mode;
2974 machine_mode mode2;
2975
2976 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2977 return NULL_RTX;
2978
2979 arg0 = CALL_EXPR_ARG (exp, 0);
2980 arg1 = CALL_EXPR_ARG (exp, 1);
2981 mode = TYPE_MODE (TREE_TYPE (exp));
2982
2983 /* Emit a libcall to libgcc. */
2984
2985 /* Mode of the 2nd argument must match that of an int. */
2986 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2987
2988 if (target == NULL_RTX)
2989 target = gen_reg_rtx (mode);
2990
2991 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2992 if (GET_MODE (op0) != mode)
2993 op0 = convert_to_mode (mode, op0, 0);
2994 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2995 if (GET_MODE (op1) != mode2)
2996 op1 = convert_to_mode (mode2, op1, 0);
2997
2998 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2999 target, LCT_CONST, mode, 2,
3000 op0, mode, op1, mode2);
3001
3002 return target;
3003 }
3004
3005 /* Expand expression EXP which is a call to the strlen builtin. Return
3006 NULL_RTX if we failed the caller should emit a normal call, otherwise
3007 try to get the result in TARGET, if convenient. */
3008
3009 static rtx
3010 expand_builtin_strlen (tree exp, rtx target,
3011 machine_mode target_mode)
3012 {
3013 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3014 return NULL_RTX;
3015 else
3016 {
3017 struct expand_operand ops[4];
3018 rtx pat;
3019 tree len;
3020 tree src = CALL_EXPR_ARG (exp, 0);
3021 rtx src_reg;
3022 rtx_insn *before_strlen;
3023 machine_mode insn_mode = target_mode;
3024 enum insn_code icode = CODE_FOR_nothing;
3025 unsigned int align;
3026
3027 /* If the length can be computed at compile-time, return it. */
3028 len = c_strlen (src, 0);
3029 if (len)
3030 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3031
3032 /* If the length can be computed at compile-time and is constant
3033 integer, but there are side-effects in src, evaluate
3034 src for side-effects, then return len.
3035 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3036 can be optimized into: i++; x = 3; */
3037 len = c_strlen (src, 1);
3038 if (len && TREE_CODE (len) == INTEGER_CST)
3039 {
3040 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3041 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3042 }
3043
3044 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3045
3046 /* If SRC is not a pointer type, don't do this operation inline. */
3047 if (align == 0)
3048 return NULL_RTX;
3049
3050 /* Bail out if we can't compute strlen in the right mode. */
3051 while (insn_mode != VOIDmode)
3052 {
3053 icode = optab_handler (strlen_optab, insn_mode);
3054 if (icode != CODE_FOR_nothing)
3055 break;
3056
3057 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3058 }
3059 if (insn_mode == VOIDmode)
3060 return NULL_RTX;
3061
3062 /* Make a place to hold the source address. We will not expand
3063 the actual source until we are sure that the expansion will
3064 not fail -- there are trees that cannot be expanded twice. */
3065 src_reg = gen_reg_rtx (Pmode);
3066
3067 /* Mark the beginning of the strlen sequence so we can emit the
3068 source operand later. */
3069 before_strlen = get_last_insn ();
3070
3071 create_output_operand (&ops[0], target, insn_mode);
3072 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3073 create_integer_operand (&ops[2], 0);
3074 create_integer_operand (&ops[3], align);
3075 if (!maybe_expand_insn (icode, 4, ops))
3076 return NULL_RTX;
3077
3078 /* Now that we are assured of success, expand the source. */
3079 start_sequence ();
3080 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3081 if (pat != src_reg)
3082 {
3083 #ifdef POINTERS_EXTEND_UNSIGNED
3084 if (GET_MODE (pat) != Pmode)
3085 pat = convert_to_mode (Pmode, pat,
3086 POINTERS_EXTEND_UNSIGNED);
3087 #endif
3088 emit_move_insn (src_reg, pat);
3089 }
3090 pat = get_insns ();
3091 end_sequence ();
3092
3093 if (before_strlen)
3094 emit_insn_after (pat, before_strlen);
3095 else
3096 emit_insn_before (pat, get_insns ());
3097
3098 /* Return the value in the proper mode for this function. */
3099 if (GET_MODE (ops[0].value) == target_mode)
3100 target = ops[0].value;
3101 else if (target != 0)
3102 convert_move (target, ops[0].value, 0);
3103 else
3104 target = convert_to_mode (target_mode, ops[0].value, 0);
3105
3106 return target;
3107 }
3108 }
3109
3110 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3111 bytes from constant string DATA + OFFSET and return it as target
3112 constant. */
3113
3114 static rtx
3115 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3116 machine_mode mode)
3117 {
3118 const char *str = (const char *) data;
3119
3120 gcc_assert (offset >= 0
3121 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3122 <= strlen (str) + 1));
3123
3124 return c_readstr (str + offset, mode);
3125 }
3126
3127 /* LEN specify length of the block of memcpy/memset operation.
3128 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3129 In some cases we can make very likely guess on max size, then we
3130 set it into PROBABLE_MAX_SIZE. */
3131
3132 static void
3133 determine_block_size (tree len, rtx len_rtx,
3134 unsigned HOST_WIDE_INT *min_size,
3135 unsigned HOST_WIDE_INT *max_size,
3136 unsigned HOST_WIDE_INT *probable_max_size)
3137 {
3138 if (CONST_INT_P (len_rtx))
3139 {
3140 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3141 return;
3142 }
3143 else
3144 {
3145 wide_int min, max;
3146 enum value_range_type range_type = VR_UNDEFINED;
3147
3148 /* Determine bounds from the type. */
3149 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3150 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3151 else
3152 *min_size = 0;
3153 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3154 *probable_max_size = *max_size
3155 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3156 else
3157 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3158
3159 if (TREE_CODE (len) == SSA_NAME)
3160 range_type = get_range_info (len, &min, &max);
3161 if (range_type == VR_RANGE)
3162 {
3163 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3164 *min_size = min.to_uhwi ();
3165 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3166 *probable_max_size = *max_size = max.to_uhwi ();
3167 }
3168 else if (range_type == VR_ANTI_RANGE)
3169 {
3170 /* Anti range 0...N lets us to determine minimal size to N+1. */
3171 if (min == 0)
3172 {
3173 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3174 *min_size = max.to_uhwi () + 1;
3175 }
3176 /* Code like
3177
3178 int n;
3179 if (n < 100)
3180 memcpy (a, b, n)
3181
3182 Produce anti range allowing negative values of N. We still
3183 can use the information and make a guess that N is not negative.
3184 */
3185 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3186 *probable_max_size = min.to_uhwi () - 1;
3187 }
3188 }
3189 gcc_checking_assert (*max_size <=
3190 (unsigned HOST_WIDE_INT)
3191 GET_MODE_MASK (GET_MODE (len_rtx)));
3192 }
3193
3194 /* Helper function to do the actual work for expand_builtin_memcpy. */
3195
3196 static rtx
3197 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3198 {
3199 const char *src_str;
3200 unsigned int src_align = get_pointer_alignment (src);
3201 unsigned int dest_align = get_pointer_alignment (dest);
3202 rtx dest_mem, src_mem, dest_addr, len_rtx;
3203 HOST_WIDE_INT expected_size = -1;
3204 unsigned int expected_align = 0;
3205 unsigned HOST_WIDE_INT min_size;
3206 unsigned HOST_WIDE_INT max_size;
3207 unsigned HOST_WIDE_INT probable_max_size;
3208
3209 /* If DEST is not a pointer type, call the normal function. */
3210 if (dest_align == 0)
3211 return NULL_RTX;
3212
3213 /* If either SRC is not a pointer type, don't do this
3214 operation in-line. */
3215 if (src_align == 0)
3216 return NULL_RTX;
3217
3218 if (currently_expanding_gimple_stmt)
3219 stringop_block_profile (currently_expanding_gimple_stmt,
3220 &expected_align, &expected_size);
3221
3222 if (expected_align < dest_align)
3223 expected_align = dest_align;
3224 dest_mem = get_memory_rtx (dest, len);
3225 set_mem_align (dest_mem, dest_align);
3226 len_rtx = expand_normal (len);
3227 determine_block_size (len, len_rtx, &min_size, &max_size,
3228 &probable_max_size);
3229 src_str = c_getstr (src);
3230
3231 /* If SRC is a string constant and block move would be done
3232 by pieces, we can avoid loading the string from memory
3233 and only stored the computed constants. */
3234 if (src_str
3235 && CONST_INT_P (len_rtx)
3236 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3237 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3238 CONST_CAST (char *, src_str),
3239 dest_align, false))
3240 {
3241 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3242 builtin_memcpy_read_str,
3243 CONST_CAST (char *, src_str),
3244 dest_align, false, 0);
3245 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3246 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3247 return dest_mem;
3248 }
3249
3250 src_mem = get_memory_rtx (src, len);
3251 set_mem_align (src_mem, src_align);
3252
3253 /* Copy word part most expediently. */
3254 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3255 CALL_EXPR_TAILCALL (exp)
3256 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3257 expected_align, expected_size,
3258 min_size, max_size, probable_max_size);
3259
3260 if (dest_addr == 0)
3261 {
3262 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3263 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3264 }
3265
3266 return dest_addr;
3267 }
3268
3269 /* Expand a call EXP to the memcpy builtin.
3270 Return NULL_RTX if we failed, the caller should emit a normal call,
3271 otherwise try to get the result in TARGET, if convenient (and in
3272 mode MODE if that's convenient). */
3273
3274 static rtx
3275 expand_builtin_memcpy (tree exp, rtx target)
3276 {
3277 if (!validate_arglist (exp,
3278 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3279 return NULL_RTX;
3280 else
3281 {
3282 tree dest = CALL_EXPR_ARG (exp, 0);
3283 tree src = CALL_EXPR_ARG (exp, 1);
3284 tree len = CALL_EXPR_ARG (exp, 2);
3285 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3286 }
3287 }
3288
3289 /* Expand an instrumented call EXP to the memcpy builtin.
3290 Return NULL_RTX if we failed, the caller should emit a normal call,
3291 otherwise try to get the result in TARGET, if convenient (and in
3292 mode MODE if that's convenient). */
3293
3294 static rtx
3295 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3296 {
3297 if (!validate_arglist (exp,
3298 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3299 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3300 INTEGER_TYPE, VOID_TYPE))
3301 return NULL_RTX;
3302 else
3303 {
3304 tree dest = CALL_EXPR_ARG (exp, 0);
3305 tree src = CALL_EXPR_ARG (exp, 2);
3306 tree len = CALL_EXPR_ARG (exp, 4);
3307 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3308
3309 /* Return src bounds with the result. */
3310 if (res)
3311 {
3312 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3313 expand_normal (CALL_EXPR_ARG (exp, 1)));
3314 res = chkp_join_splitted_slot (res, bnd);
3315 }
3316 return res;
3317 }
3318 }
3319
3320 /* Expand a call EXP to the mempcpy builtin.
3321 Return NULL_RTX if we failed; the caller should emit a normal call,
3322 otherwise try to get the result in TARGET, if convenient (and in
3323 mode MODE if that's convenient). If ENDP is 0 return the
3324 destination pointer, if ENDP is 1 return the end pointer ala
3325 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3326 stpcpy. */
3327
3328 static rtx
3329 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3330 {
3331 if (!validate_arglist (exp,
3332 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3333 return NULL_RTX;
3334 else
3335 {
3336 tree dest = CALL_EXPR_ARG (exp, 0);
3337 tree src = CALL_EXPR_ARG (exp, 1);
3338 tree len = CALL_EXPR_ARG (exp, 2);
3339 return expand_builtin_mempcpy_args (dest, src, len,
3340 target, mode, /*endp=*/ 1,
3341 exp);
3342 }
3343 }
3344
3345 /* Expand an instrumented call EXP to the mempcpy builtin.
3346 Return NULL_RTX if we failed, the caller should emit a normal call,
3347 otherwise try to get the result in TARGET, if convenient (and in
3348 mode MODE if that's convenient). */
3349
3350 static rtx
3351 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3352 {
3353 if (!validate_arglist (exp,
3354 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3355 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3356 INTEGER_TYPE, VOID_TYPE))
3357 return NULL_RTX;
3358 else
3359 {
3360 tree dest = CALL_EXPR_ARG (exp, 0);
3361 tree src = CALL_EXPR_ARG (exp, 2);
3362 tree len = CALL_EXPR_ARG (exp, 4);
3363 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3364 mode, 1, exp);
3365
3366 /* Return src bounds with the result. */
3367 if (res)
3368 {
3369 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3370 expand_normal (CALL_EXPR_ARG (exp, 1)));
3371 res = chkp_join_splitted_slot (res, bnd);
3372 }
3373 return res;
3374 }
3375 }
3376
3377 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3378 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3379 so that this can also be called without constructing an actual CALL_EXPR.
3380 The other arguments and return value are the same as for
3381 expand_builtin_mempcpy. */
3382
3383 static rtx
3384 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3385 rtx target, machine_mode mode, int endp,
3386 tree orig_exp)
3387 {
3388 tree fndecl = get_callee_fndecl (orig_exp);
3389
3390 /* If return value is ignored, transform mempcpy into memcpy. */
3391 if (target == const0_rtx
3392 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3393 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3394 {
3395 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3396 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3397 dest, src, len);
3398 return expand_expr (result, target, mode, EXPAND_NORMAL);
3399 }
3400 else if (target == const0_rtx
3401 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3402 {
3403 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3404 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3405 dest, src, len);
3406 return expand_expr (result, target, mode, EXPAND_NORMAL);
3407 }
3408 else
3409 {
3410 const char *src_str;
3411 unsigned int src_align = get_pointer_alignment (src);
3412 unsigned int dest_align = get_pointer_alignment (dest);
3413 rtx dest_mem, src_mem, len_rtx;
3414
3415 /* If either SRC or DEST is not a pointer type, don't do this
3416 operation in-line. */
3417 if (dest_align == 0 || src_align == 0)
3418 return NULL_RTX;
3419
3420 /* If LEN is not constant, call the normal function. */
3421 if (! tree_fits_uhwi_p (len))
3422 return NULL_RTX;
3423
3424 len_rtx = expand_normal (len);
3425 src_str = c_getstr (src);
3426
3427 /* If SRC is a string constant and block move would be done
3428 by pieces, we can avoid loading the string from memory
3429 and only stored the computed constants. */
3430 if (src_str
3431 && CONST_INT_P (len_rtx)
3432 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3433 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3434 CONST_CAST (char *, src_str),
3435 dest_align, false))
3436 {
3437 dest_mem = get_memory_rtx (dest, len);
3438 set_mem_align (dest_mem, dest_align);
3439 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3440 builtin_memcpy_read_str,
3441 CONST_CAST (char *, src_str),
3442 dest_align, false, endp);
3443 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3444 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3445 return dest_mem;
3446 }
3447
3448 if (CONST_INT_P (len_rtx)
3449 && can_move_by_pieces (INTVAL (len_rtx),
3450 MIN (dest_align, src_align)))
3451 {
3452 dest_mem = get_memory_rtx (dest, len);
3453 set_mem_align (dest_mem, dest_align);
3454 src_mem = get_memory_rtx (src, len);
3455 set_mem_align (src_mem, src_align);
3456 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3457 MIN (dest_align, src_align), endp);
3458 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3459 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3460 return dest_mem;
3461 }
3462
3463 return NULL_RTX;
3464 }
3465 }
3466
3467 #ifndef HAVE_movstr
3468 # define HAVE_movstr 0
3469 # define CODE_FOR_movstr CODE_FOR_nothing
3470 #endif
3471
3472 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3473 we failed, the caller should emit a normal call, otherwise try to
3474 get the result in TARGET, if convenient. If ENDP is 0 return the
3475 destination pointer, if ENDP is 1 return the end pointer ala
3476 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3477 stpcpy. */
3478
3479 static rtx
3480 expand_movstr (tree dest, tree src, rtx target, int endp)
3481 {
3482 struct expand_operand ops[3];
3483 rtx dest_mem;
3484 rtx src_mem;
3485
3486 if (!HAVE_movstr)
3487 return NULL_RTX;
3488
3489 dest_mem = get_memory_rtx (dest, NULL);
3490 src_mem = get_memory_rtx (src, NULL);
3491 if (!endp)
3492 {
3493 target = force_reg (Pmode, XEXP (dest_mem, 0));
3494 dest_mem = replace_equiv_address (dest_mem, target);
3495 }
3496
3497 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3498 create_fixed_operand (&ops[1], dest_mem);
3499 create_fixed_operand (&ops[2], src_mem);
3500 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3501 return NULL_RTX;
3502
3503 if (endp && target != const0_rtx)
3504 {
3505 target = ops[0].value;
3506 /* movstr is supposed to set end to the address of the NUL
3507 terminator. If the caller requested a mempcpy-like return value,
3508 adjust it. */
3509 if (endp == 1)
3510 {
3511 rtx tem = plus_constant (GET_MODE (target),
3512 gen_lowpart (GET_MODE (target), target), 1);
3513 emit_move_insn (target, force_operand (tem, NULL_RTX));
3514 }
3515 }
3516 return target;
3517 }
3518
3519 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3520 NULL_RTX if we failed the caller should emit a normal call, otherwise
3521 try to get the result in TARGET, if convenient (and in mode MODE if that's
3522 convenient). */
3523
3524 static rtx
3525 expand_builtin_strcpy (tree exp, rtx target)
3526 {
3527 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 {
3529 tree dest = CALL_EXPR_ARG (exp, 0);
3530 tree src = CALL_EXPR_ARG (exp, 1);
3531 return expand_builtin_strcpy_args (dest, src, target);
3532 }
3533 return NULL_RTX;
3534 }
3535
3536 /* Helper function to do the actual work for expand_builtin_strcpy. The
3537 arguments to the builtin_strcpy call DEST and SRC are broken out
3538 so that this can also be called without constructing an actual CALL_EXPR.
3539 The other arguments and return value are the same as for
3540 expand_builtin_strcpy. */
3541
3542 static rtx
3543 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3544 {
3545 return expand_movstr (dest, src, target, /*endp=*/0);
3546 }
3547
3548 /* Expand a call EXP to the stpcpy builtin.
3549 Return NULL_RTX if we failed the caller should emit a normal call,
3550 otherwise try to get the result in TARGET, if convenient (and in
3551 mode MODE if that's convenient). */
3552
3553 static rtx
3554 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3555 {
3556 tree dst, src;
3557 location_t loc = EXPR_LOCATION (exp);
3558
3559 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3560 return NULL_RTX;
3561
3562 dst = CALL_EXPR_ARG (exp, 0);
3563 src = CALL_EXPR_ARG (exp, 1);
3564
3565 /* If return value is ignored, transform stpcpy into strcpy. */
3566 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3567 {
3568 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3569 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3570 return expand_expr (result, target, mode, EXPAND_NORMAL);
3571 }
3572 else
3573 {
3574 tree len, lenp1;
3575 rtx ret;
3576
3577 /* Ensure we get an actual string whose length can be evaluated at
3578 compile-time, not an expression containing a string. This is
3579 because the latter will potentially produce pessimized code
3580 when used to produce the return value. */
3581 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3582 return expand_movstr (dst, src, target, /*endp=*/2);
3583
3584 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3585 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3586 target, mode, /*endp=*/2,
3587 exp);
3588
3589 if (ret)
3590 return ret;
3591
3592 if (TREE_CODE (len) == INTEGER_CST)
3593 {
3594 rtx len_rtx = expand_normal (len);
3595
3596 if (CONST_INT_P (len_rtx))
3597 {
3598 ret = expand_builtin_strcpy_args (dst, src, target);
3599
3600 if (ret)
3601 {
3602 if (! target)
3603 {
3604 if (mode != VOIDmode)
3605 target = gen_reg_rtx (mode);
3606 else
3607 target = gen_reg_rtx (GET_MODE (ret));
3608 }
3609 if (GET_MODE (target) != GET_MODE (ret))
3610 ret = gen_lowpart (GET_MODE (target), ret);
3611
3612 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3613 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3614 gcc_assert (ret);
3615
3616 return target;
3617 }
3618 }
3619 }
3620
3621 return expand_movstr (dst, src, target, /*endp=*/2);
3622 }
3623 }
3624
3625 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3626 bytes from constant string DATA + OFFSET and return it as target
3627 constant. */
3628
3629 rtx
3630 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3631 machine_mode mode)
3632 {
3633 const char *str = (const char *) data;
3634
3635 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3636 return const0_rtx;
3637
3638 return c_readstr (str + offset, mode);
3639 }
3640
3641 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3642 NULL_RTX if we failed the caller should emit a normal call. */
3643
3644 static rtx
3645 expand_builtin_strncpy (tree exp, rtx target)
3646 {
3647 location_t loc = EXPR_LOCATION (exp);
3648
3649 if (validate_arglist (exp,
3650 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3651 {
3652 tree dest = CALL_EXPR_ARG (exp, 0);
3653 tree src = CALL_EXPR_ARG (exp, 1);
3654 tree len = CALL_EXPR_ARG (exp, 2);
3655 tree slen = c_strlen (src, 1);
3656
3657 /* We must be passed a constant len and src parameter. */
3658 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3659 return NULL_RTX;
3660
3661 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3662
3663 /* We're required to pad with trailing zeros if the requested
3664 len is greater than strlen(s2)+1. In that case try to
3665 use store_by_pieces, if it fails, punt. */
3666 if (tree_int_cst_lt (slen, len))
3667 {
3668 unsigned int dest_align = get_pointer_alignment (dest);
3669 const char *p = c_getstr (src);
3670 rtx dest_mem;
3671
3672 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3673 || !can_store_by_pieces (tree_to_uhwi (len),
3674 builtin_strncpy_read_str,
3675 CONST_CAST (char *, p),
3676 dest_align, false))
3677 return NULL_RTX;
3678
3679 dest_mem = get_memory_rtx (dest, len);
3680 store_by_pieces (dest_mem, tree_to_uhwi (len),
3681 builtin_strncpy_read_str,
3682 CONST_CAST (char *, p), dest_align, false, 0);
3683 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3684 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3685 return dest_mem;
3686 }
3687 }
3688 return NULL_RTX;
3689 }
3690
3691 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3692 bytes from constant string DATA + OFFSET and return it as target
3693 constant. */
3694
3695 rtx
3696 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3697 machine_mode mode)
3698 {
3699 const char *c = (const char *) data;
3700 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3701
3702 memset (p, *c, GET_MODE_SIZE (mode));
3703
3704 return c_readstr (p, mode);
3705 }
3706
3707 /* Callback routine for store_by_pieces. Return the RTL of a register
3708 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3709 char value given in the RTL register data. For example, if mode is
3710 4 bytes wide, return the RTL for 0x01010101*data. */
3711
3712 static rtx
3713 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3714 machine_mode mode)
3715 {
3716 rtx target, coeff;
3717 size_t size;
3718 char *p;
3719
3720 size = GET_MODE_SIZE (mode);
3721 if (size == 1)
3722 return (rtx) data;
3723
3724 p = XALLOCAVEC (char, size);
3725 memset (p, 1, size);
3726 coeff = c_readstr (p, mode);
3727
3728 target = convert_to_mode (mode, (rtx) data, 1);
3729 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3730 return force_reg (mode, target);
3731 }
3732
3733 /* Expand expression EXP, which is a call to the memset builtin. Return
3734 NULL_RTX if we failed the caller should emit a normal call, otherwise
3735 try to get the result in TARGET, if convenient (and in mode MODE if that's
3736 convenient). */
3737
3738 static rtx
3739 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3740 {
3741 if (!validate_arglist (exp,
3742 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3743 return NULL_RTX;
3744 else
3745 {
3746 tree dest = CALL_EXPR_ARG (exp, 0);
3747 tree val = CALL_EXPR_ARG (exp, 1);
3748 tree len = CALL_EXPR_ARG (exp, 2);
3749 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3750 }
3751 }
3752
3753 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3754 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3755 try to get the result in TARGET, if convenient (and in mode MODE if that's
3756 convenient). */
3757
3758 static rtx
3759 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3760 {
3761 if (!validate_arglist (exp,
3762 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3763 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3764 return NULL_RTX;
3765 else
3766 {
3767 tree dest = CALL_EXPR_ARG (exp, 0);
3768 tree val = CALL_EXPR_ARG (exp, 2);
3769 tree len = CALL_EXPR_ARG (exp, 3);
3770 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3771
3772 /* Return src bounds with the result. */
3773 if (res)
3774 {
3775 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3776 expand_normal (CALL_EXPR_ARG (exp, 1)));
3777 res = chkp_join_splitted_slot (res, bnd);
3778 }
3779 return res;
3780 }
3781 }
3782
3783 /* Helper function to do the actual work for expand_builtin_memset. The
3784 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3785 so that this can also be called without constructing an actual CALL_EXPR.
3786 The other arguments and return value are the same as for
3787 expand_builtin_memset. */
3788
3789 static rtx
3790 expand_builtin_memset_args (tree dest, tree val, tree len,
3791 rtx target, machine_mode mode, tree orig_exp)
3792 {
3793 tree fndecl, fn;
3794 enum built_in_function fcode;
3795 machine_mode val_mode;
3796 char c;
3797 unsigned int dest_align;
3798 rtx dest_mem, dest_addr, len_rtx;
3799 HOST_WIDE_INT expected_size = -1;
3800 unsigned int expected_align = 0;
3801 unsigned HOST_WIDE_INT min_size;
3802 unsigned HOST_WIDE_INT max_size;
3803 unsigned HOST_WIDE_INT probable_max_size;
3804
3805 dest_align = get_pointer_alignment (dest);
3806
3807 /* If DEST is not a pointer type, don't do this operation in-line. */
3808 if (dest_align == 0)
3809 return NULL_RTX;
3810
3811 if (currently_expanding_gimple_stmt)
3812 stringop_block_profile (currently_expanding_gimple_stmt,
3813 &expected_align, &expected_size);
3814
3815 if (expected_align < dest_align)
3816 expected_align = dest_align;
3817
3818 /* If the LEN parameter is zero, return DEST. */
3819 if (integer_zerop (len))
3820 {
3821 /* Evaluate and ignore VAL in case it has side-effects. */
3822 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3823 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3824 }
3825
3826 /* Stabilize the arguments in case we fail. */
3827 dest = builtin_save_expr (dest);
3828 val = builtin_save_expr (val);
3829 len = builtin_save_expr (len);
3830
3831 len_rtx = expand_normal (len);
3832 determine_block_size (len, len_rtx, &min_size, &max_size,
3833 &probable_max_size);
3834 dest_mem = get_memory_rtx (dest, len);
3835 val_mode = TYPE_MODE (unsigned_char_type_node);
3836
3837 if (TREE_CODE (val) != INTEGER_CST)
3838 {
3839 rtx val_rtx;
3840
3841 val_rtx = expand_normal (val);
3842 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3843
3844 /* Assume that we can memset by pieces if we can store
3845 * the coefficients by pieces (in the required modes).
3846 * We can't pass builtin_memset_gen_str as that emits RTL. */
3847 c = 1;
3848 if (tree_fits_uhwi_p (len)
3849 && can_store_by_pieces (tree_to_uhwi (len),
3850 builtin_memset_read_str, &c, dest_align,
3851 true))
3852 {
3853 val_rtx = force_reg (val_mode, val_rtx);
3854 store_by_pieces (dest_mem, tree_to_uhwi (len),
3855 builtin_memset_gen_str, val_rtx, dest_align,
3856 true, 0);
3857 }
3858 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3859 dest_align, expected_align,
3860 expected_size, min_size, max_size,
3861 probable_max_size))
3862 goto do_libcall;
3863
3864 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3865 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3866 return dest_mem;
3867 }
3868
3869 if (target_char_cast (val, &c))
3870 goto do_libcall;
3871
3872 if (c)
3873 {
3874 if (tree_fits_uhwi_p (len)
3875 && can_store_by_pieces (tree_to_uhwi (len),
3876 builtin_memset_read_str, &c, dest_align,
3877 true))
3878 store_by_pieces (dest_mem, tree_to_uhwi (len),
3879 builtin_memset_read_str, &c, dest_align, true, 0);
3880 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3881 gen_int_mode (c, val_mode),
3882 dest_align, expected_align,
3883 expected_size, min_size, max_size,
3884 probable_max_size))
3885 goto do_libcall;
3886
3887 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3888 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3889 return dest_mem;
3890 }
3891
3892 set_mem_align (dest_mem, dest_align);
3893 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3894 CALL_EXPR_TAILCALL (orig_exp)
3895 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3896 expected_align, expected_size,
3897 min_size, max_size,
3898 probable_max_size);
3899
3900 if (dest_addr == 0)
3901 {
3902 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3903 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3904 }
3905
3906 return dest_addr;
3907
3908 do_libcall:
3909 fndecl = get_callee_fndecl (orig_exp);
3910 fcode = DECL_FUNCTION_CODE (fndecl);
3911 if (fcode == BUILT_IN_MEMSET
3912 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3913 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3914 dest, val, len);
3915 else if (fcode == BUILT_IN_BZERO)
3916 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3917 dest, len);
3918 else
3919 gcc_unreachable ();
3920 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3921 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3922 return expand_call (fn, target, target == const0_rtx);
3923 }
3924
3925 /* Expand expression EXP, which is a call to the bzero builtin. Return
3926 NULL_RTX if we failed the caller should emit a normal call. */
3927
3928 static rtx
3929 expand_builtin_bzero (tree exp)
3930 {
3931 tree dest, size;
3932 location_t loc = EXPR_LOCATION (exp);
3933
3934 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3935 return NULL_RTX;
3936
3937 dest = CALL_EXPR_ARG (exp, 0);
3938 size = CALL_EXPR_ARG (exp, 1);
3939
3940 /* New argument list transforming bzero(ptr x, int y) to
3941 memset(ptr x, int 0, size_t y). This is done this way
3942 so that if it isn't expanded inline, we fallback to
3943 calling bzero instead of memset. */
3944
3945 return expand_builtin_memset_args (dest, integer_zero_node,
3946 fold_convert_loc (loc,
3947 size_type_node, size),
3948 const0_rtx, VOIDmode, exp);
3949 }
3950
3951 /* Expand expression EXP, which is a call to the memcmp built-in function.
3952 Return NULL_RTX if we failed and the caller should emit a normal call,
3953 otherwise try to get the result in TARGET, if convenient (and in mode
3954 MODE, if that's convenient). */
3955
3956 static rtx
3957 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3958 ATTRIBUTE_UNUSED machine_mode mode)
3959 {
3960 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3961
3962 if (!validate_arglist (exp,
3963 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3964 return NULL_RTX;
3965
3966 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3967 implementing memcmp because it will stop if it encounters two
3968 zero bytes. */
3969 #if defined HAVE_cmpmemsi
3970 {
3971 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3972 rtx result;
3973 rtx insn;
3974 tree arg1 = CALL_EXPR_ARG (exp, 0);
3975 tree arg2 = CALL_EXPR_ARG (exp, 1);
3976 tree len = CALL_EXPR_ARG (exp, 2);
3977
3978 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3979 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3980 machine_mode insn_mode;
3981
3982 if (HAVE_cmpmemsi)
3983 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3984 else
3985 return NULL_RTX;
3986
3987 /* If we don't have POINTER_TYPE, call the function. */
3988 if (arg1_align == 0 || arg2_align == 0)
3989 return NULL_RTX;
3990
3991 /* Make a place to write the result of the instruction. */
3992 result = target;
3993 if (! (result != 0
3994 && REG_P (result) && GET_MODE (result) == insn_mode
3995 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3996 result = gen_reg_rtx (insn_mode);
3997
3998 arg1_rtx = get_memory_rtx (arg1, len);
3999 arg2_rtx = get_memory_rtx (arg2, len);
4000 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4001
4002 /* Set MEM_SIZE as appropriate. */
4003 if (CONST_INT_P (arg3_rtx))
4004 {
4005 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4006 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4007 }
4008
4009 if (HAVE_cmpmemsi)
4010 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4011 GEN_INT (MIN (arg1_align, arg2_align)));
4012 else
4013 gcc_unreachable ();
4014
4015 if (insn)
4016 emit_insn (insn);
4017 else
4018 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4019 TYPE_MODE (integer_type_node), 3,
4020 XEXP (arg1_rtx, 0), Pmode,
4021 XEXP (arg2_rtx, 0), Pmode,
4022 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4023 TYPE_UNSIGNED (sizetype)),
4024 TYPE_MODE (sizetype));
4025
4026 /* Return the value in the proper mode for this function. */
4027 mode = TYPE_MODE (TREE_TYPE (exp));
4028 if (GET_MODE (result) == mode)
4029 return result;
4030 else if (target != 0)
4031 {
4032 convert_move (target, result, 0);
4033 return target;
4034 }
4035 else
4036 return convert_to_mode (mode, result, 0);
4037 }
4038 #endif /* HAVE_cmpmemsi. */
4039
4040 return NULL_RTX;
4041 }
4042
4043 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4044 if we failed the caller should emit a normal call, otherwise try to get
4045 the result in TARGET, if convenient. */
4046
4047 static rtx
4048 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4049 {
4050 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052
4053 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4054 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4055 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4056 {
4057 rtx arg1_rtx, arg2_rtx;
4058 rtx result, insn = NULL_RTX;
4059 tree fndecl, fn;
4060 tree arg1 = CALL_EXPR_ARG (exp, 0);
4061 tree arg2 = CALL_EXPR_ARG (exp, 1);
4062
4063 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4064 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4065
4066 /* If we don't have POINTER_TYPE, call the function. */
4067 if (arg1_align == 0 || arg2_align == 0)
4068 return NULL_RTX;
4069
4070 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
4073
4074 arg1_rtx = get_memory_rtx (arg1, NULL);
4075 arg2_rtx = get_memory_rtx (arg2, NULL);
4076
4077 #ifdef HAVE_cmpstrsi
4078 /* Try to call cmpstrsi. */
4079 if (HAVE_cmpstrsi)
4080 {
4081 machine_mode insn_mode
4082 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4083
4084 /* Make a place to write the result of the instruction. */
4085 result = target;
4086 if (! (result != 0
4087 && REG_P (result) && GET_MODE (result) == insn_mode
4088 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4089 result = gen_reg_rtx (insn_mode);
4090
4091 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4092 GEN_INT (MIN (arg1_align, arg2_align)));
4093 }
4094 #endif
4095 #ifdef HAVE_cmpstrnsi
4096 /* Try to determine at least one length and call cmpstrnsi. */
4097 if (!insn && HAVE_cmpstrnsi)
4098 {
4099 tree len;
4100 rtx arg3_rtx;
4101
4102 machine_mode insn_mode
4103 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4104 tree len1 = c_strlen (arg1, 1);
4105 tree len2 = c_strlen (arg2, 1);
4106
4107 if (len1)
4108 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4109 if (len2)
4110 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4111
4112 /* If we don't have a constant length for the first, use the length
4113 of the second, if we know it. We don't require a constant for
4114 this case; some cost analysis could be done if both are available
4115 but neither is constant. For now, assume they're equally cheap,
4116 unless one has side effects. If both strings have constant lengths,
4117 use the smaller. */
4118
4119 if (!len1)
4120 len = len2;
4121 else if (!len2)
4122 len = len1;
4123 else if (TREE_SIDE_EFFECTS (len1))
4124 len = len2;
4125 else if (TREE_SIDE_EFFECTS (len2))
4126 len = len1;
4127 else if (TREE_CODE (len1) != INTEGER_CST)
4128 len = len2;
4129 else if (TREE_CODE (len2) != INTEGER_CST)
4130 len = len1;
4131 else if (tree_int_cst_lt (len1, len2))
4132 len = len1;
4133 else
4134 len = len2;
4135
4136 /* If both arguments have side effects, we cannot optimize. */
4137 if (!len || TREE_SIDE_EFFECTS (len))
4138 goto do_libcall;
4139
4140 arg3_rtx = expand_normal (len);
4141
4142 /* Make a place to write the result of the instruction. */
4143 result = target;
4144 if (! (result != 0
4145 && REG_P (result) && GET_MODE (result) == insn_mode
4146 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4147 result = gen_reg_rtx (insn_mode);
4148
4149 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4150 GEN_INT (MIN (arg1_align, arg2_align)));
4151 }
4152 #endif
4153
4154 if (insn)
4155 {
4156 machine_mode mode;
4157 emit_insn (insn);
4158
4159 /* Return the value in the proper mode for this function. */
4160 mode = TYPE_MODE (TREE_TYPE (exp));
4161 if (GET_MODE (result) == mode)
4162 return result;
4163 if (target == 0)
4164 return convert_to_mode (mode, result, 0);
4165 convert_move (target, result, 0);
4166 return target;
4167 }
4168
4169 /* Expand the library call ourselves using a stabilized argument
4170 list to avoid re-evaluating the function's arguments twice. */
4171 #ifdef HAVE_cmpstrnsi
4172 do_libcall:
4173 #endif
4174 fndecl = get_callee_fndecl (exp);
4175 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4176 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4177 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4178 return expand_call (fn, target, target == const0_rtx);
4179 }
4180 #endif
4181 return NULL_RTX;
4182 }
4183
4184 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4185 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4186 the result in TARGET, if convenient. */
4187
4188 static rtx
4189 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4190 ATTRIBUTE_UNUSED machine_mode mode)
4191 {
4192 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4193
4194 if (!validate_arglist (exp,
4195 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4196 return NULL_RTX;
4197
4198 /* If c_strlen can determine an expression for one of the string
4199 lengths, and it doesn't have side effects, then emit cmpstrnsi
4200 using length MIN(strlen(string)+1, arg3). */
4201 #ifdef HAVE_cmpstrnsi
4202 if (HAVE_cmpstrnsi)
4203 {
4204 tree len, len1, len2;
4205 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4206 rtx result, insn;
4207 tree fndecl, fn;
4208 tree arg1 = CALL_EXPR_ARG (exp, 0);
4209 tree arg2 = CALL_EXPR_ARG (exp, 1);
4210 tree arg3 = CALL_EXPR_ARG (exp, 2);
4211
4212 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4213 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4214 machine_mode insn_mode
4215 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4216
4217 len1 = c_strlen (arg1, 1);
4218 len2 = c_strlen (arg2, 1);
4219
4220 if (len1)
4221 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4222 if (len2)
4223 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4224
4225 /* If we don't have a constant length for the first, use the length
4226 of the second, if we know it. We don't require a constant for
4227 this case; some cost analysis could be done if both are available
4228 but neither is constant. For now, assume they're equally cheap,
4229 unless one has side effects. If both strings have constant lengths,
4230 use the smaller. */
4231
4232 if (!len1)
4233 len = len2;
4234 else if (!len2)
4235 len = len1;
4236 else if (TREE_SIDE_EFFECTS (len1))
4237 len = len2;
4238 else if (TREE_SIDE_EFFECTS (len2))
4239 len = len1;
4240 else if (TREE_CODE (len1) != INTEGER_CST)
4241 len = len2;
4242 else if (TREE_CODE (len2) != INTEGER_CST)
4243 len = len1;
4244 else if (tree_int_cst_lt (len1, len2))
4245 len = len1;
4246 else
4247 len = len2;
4248
4249 /* If both arguments have side effects, we cannot optimize. */
4250 if (!len || TREE_SIDE_EFFECTS (len))
4251 return NULL_RTX;
4252
4253 /* The actual new length parameter is MIN(len,arg3). */
4254 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4255 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4256
4257 /* If we don't have POINTER_TYPE, call the function. */
4258 if (arg1_align == 0 || arg2_align == 0)
4259 return NULL_RTX;
4260
4261 /* Make a place to write the result of the instruction. */
4262 result = target;
4263 if (! (result != 0
4264 && REG_P (result) && GET_MODE (result) == insn_mode
4265 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4266 result = gen_reg_rtx (insn_mode);
4267
4268 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4269 arg1 = builtin_save_expr (arg1);
4270 arg2 = builtin_save_expr (arg2);
4271 len = builtin_save_expr (len);
4272
4273 arg1_rtx = get_memory_rtx (arg1, len);
4274 arg2_rtx = get_memory_rtx (arg2, len);
4275 arg3_rtx = expand_normal (len);
4276 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4277 GEN_INT (MIN (arg1_align, arg2_align)));
4278 if (insn)
4279 {
4280 emit_insn (insn);
4281
4282 /* Return the value in the proper mode for this function. */
4283 mode = TYPE_MODE (TREE_TYPE (exp));
4284 if (GET_MODE (result) == mode)
4285 return result;
4286 if (target == 0)
4287 return convert_to_mode (mode, result, 0);
4288 convert_move (target, result, 0);
4289 return target;
4290 }
4291
4292 /* Expand the library call ourselves using a stabilized argument
4293 list to avoid re-evaluating the function's arguments twice. */
4294 fndecl = get_callee_fndecl (exp);
4295 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4296 arg1, arg2, len);
4297 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4298 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4299 return expand_call (fn, target, target == const0_rtx);
4300 }
4301 #endif
4302 return NULL_RTX;
4303 }
4304
4305 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4306 if that's convenient. */
4307
4308 rtx
4309 expand_builtin_saveregs (void)
4310 {
4311 rtx val;
4312 rtx_insn *seq;
4313
4314 /* Don't do __builtin_saveregs more than once in a function.
4315 Save the result of the first call and reuse it. */
4316 if (saveregs_value != 0)
4317 return saveregs_value;
4318
4319 /* When this function is called, it means that registers must be
4320 saved on entry to this function. So we migrate the call to the
4321 first insn of this function. */
4322
4323 start_sequence ();
4324
4325 /* Do whatever the machine needs done in this case. */
4326 val = targetm.calls.expand_builtin_saveregs ();
4327
4328 seq = get_insns ();
4329 end_sequence ();
4330
4331 saveregs_value = val;
4332
4333 /* Put the insns after the NOTE that starts the function. If this
4334 is inside a start_sequence, make the outer-level insn chain current, so
4335 the code is placed at the start of the function. */
4336 push_topmost_sequence ();
4337 emit_insn_after (seq, entry_of_function ());
4338 pop_topmost_sequence ();
4339
4340 return val;
4341 }
4342
4343 /* Expand a call to __builtin_next_arg. */
4344
4345 static rtx
4346 expand_builtin_next_arg (void)
4347 {
4348 /* Checking arguments is already done in fold_builtin_next_arg
4349 that must be called before this function. */
4350 return expand_binop (ptr_mode, add_optab,
4351 crtl->args.internal_arg_pointer,
4352 crtl->args.arg_offset_rtx,
4353 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4354 }
4355
4356 /* Make it easier for the backends by protecting the valist argument
4357 from multiple evaluations. */
4358
4359 static tree
4360 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4361 {
4362 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4363
4364 /* The current way of determining the type of valist is completely
4365 bogus. We should have the information on the va builtin instead. */
4366 if (!vatype)
4367 vatype = targetm.fn_abi_va_list (cfun->decl);
4368
4369 if (TREE_CODE (vatype) == ARRAY_TYPE)
4370 {
4371 if (TREE_SIDE_EFFECTS (valist))
4372 valist = save_expr (valist);
4373
4374 /* For this case, the backends will be expecting a pointer to
4375 vatype, but it's possible we've actually been given an array
4376 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4377 So fix it. */
4378 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4379 {
4380 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4381 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4382 }
4383 }
4384 else
4385 {
4386 tree pt = build_pointer_type (vatype);
4387
4388 if (! needs_lvalue)
4389 {
4390 if (! TREE_SIDE_EFFECTS (valist))
4391 return valist;
4392
4393 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4394 TREE_SIDE_EFFECTS (valist) = 1;
4395 }
4396
4397 if (TREE_SIDE_EFFECTS (valist))
4398 valist = save_expr (valist);
4399 valist = fold_build2_loc (loc, MEM_REF,
4400 vatype, valist, build_int_cst (pt, 0));
4401 }
4402
4403 return valist;
4404 }
4405
4406 /* The "standard" definition of va_list is void*. */
4407
4408 tree
4409 std_build_builtin_va_list (void)
4410 {
4411 return ptr_type_node;
4412 }
4413
4414 /* The "standard" abi va_list is va_list_type_node. */
4415
4416 tree
4417 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4418 {
4419 return va_list_type_node;
4420 }
4421
4422 /* The "standard" type of va_list is va_list_type_node. */
4423
4424 tree
4425 std_canonical_va_list_type (tree type)
4426 {
4427 tree wtype, htype;
4428
4429 if (INDIRECT_REF_P (type))
4430 type = TREE_TYPE (type);
4431 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4432 type = TREE_TYPE (type);
4433 wtype = va_list_type_node;
4434 htype = type;
4435 /* Treat structure va_list types. */
4436 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4437 htype = TREE_TYPE (htype);
4438 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4439 {
4440 /* If va_list is an array type, the argument may have decayed
4441 to a pointer type, e.g. by being passed to another function.
4442 In that case, unwrap both types so that we can compare the
4443 underlying records. */
4444 if (TREE_CODE (htype) == ARRAY_TYPE
4445 || POINTER_TYPE_P (htype))
4446 {
4447 wtype = TREE_TYPE (wtype);
4448 htype = TREE_TYPE (htype);
4449 }
4450 }
4451 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4452 return va_list_type_node;
4453
4454 return NULL_TREE;
4455 }
4456
4457 /* The "standard" implementation of va_start: just assign `nextarg' to
4458 the variable. */
4459
4460 void
4461 std_expand_builtin_va_start (tree valist, rtx nextarg)
4462 {
4463 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4464 convert_move (va_r, nextarg, 0);
4465
4466 /* We do not have any valid bounds for the pointer, so
4467 just store zero bounds for it. */
4468 if (chkp_function_instrumented_p (current_function_decl))
4469 chkp_expand_bounds_reset_for_mem (valist,
4470 make_tree (TREE_TYPE (valist),
4471 nextarg));
4472 }
4473
4474 /* Expand EXP, a call to __builtin_va_start. */
4475
4476 static rtx
4477 expand_builtin_va_start (tree exp)
4478 {
4479 rtx nextarg;
4480 tree valist;
4481 location_t loc = EXPR_LOCATION (exp);
4482
4483 if (call_expr_nargs (exp) < 2)
4484 {
4485 error_at (loc, "too few arguments to function %<va_start%>");
4486 return const0_rtx;
4487 }
4488
4489 if (fold_builtin_next_arg (exp, true))
4490 return const0_rtx;
4491
4492 nextarg = expand_builtin_next_arg ();
4493 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4494
4495 if (targetm.expand_builtin_va_start)
4496 targetm.expand_builtin_va_start (valist, nextarg);
4497 else
4498 std_expand_builtin_va_start (valist, nextarg);
4499
4500 return const0_rtx;
4501 }
4502
4503 /* Expand EXP, a call to __builtin_va_end. */
4504
4505 static rtx
4506 expand_builtin_va_end (tree exp)
4507 {
4508 tree valist = CALL_EXPR_ARG (exp, 0);
4509
4510 /* Evaluate for side effects, if needed. I hate macros that don't
4511 do that. */
4512 if (TREE_SIDE_EFFECTS (valist))
4513 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4514
4515 return const0_rtx;
4516 }
4517
4518 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4519 builtin rather than just as an assignment in stdarg.h because of the
4520 nastiness of array-type va_list types. */
4521
4522 static rtx
4523 expand_builtin_va_copy (tree exp)
4524 {
4525 tree dst, src, t;
4526 location_t loc = EXPR_LOCATION (exp);
4527
4528 dst = CALL_EXPR_ARG (exp, 0);
4529 src = CALL_EXPR_ARG (exp, 1);
4530
4531 dst = stabilize_va_list_loc (loc, dst, 1);
4532 src = stabilize_va_list_loc (loc, src, 0);
4533
4534 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4535
4536 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4537 {
4538 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4539 TREE_SIDE_EFFECTS (t) = 1;
4540 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4541 }
4542 else
4543 {
4544 rtx dstb, srcb, size;
4545
4546 /* Evaluate to pointers. */
4547 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4548 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4549 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4550 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4551
4552 dstb = convert_memory_address (Pmode, dstb);
4553 srcb = convert_memory_address (Pmode, srcb);
4554
4555 /* "Dereference" to BLKmode memories. */
4556 dstb = gen_rtx_MEM (BLKmode, dstb);
4557 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4558 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4559 srcb = gen_rtx_MEM (BLKmode, srcb);
4560 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4561 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4562
4563 /* Copy. */
4564 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4565 }
4566
4567 return const0_rtx;
4568 }
4569
4570 /* Expand a call to one of the builtin functions __builtin_frame_address or
4571 __builtin_return_address. */
4572
4573 static rtx
4574 expand_builtin_frame_address (tree fndecl, tree exp)
4575 {
4576 /* The argument must be a nonnegative integer constant.
4577 It counts the number of frames to scan up the stack.
4578 The value is the return address saved in that frame. */
4579 if (call_expr_nargs (exp) == 0)
4580 /* Warning about missing arg was already issued. */
4581 return const0_rtx;
4582 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4583 {
4584 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4585 error ("invalid argument to %<__builtin_frame_address%>");
4586 else
4587 error ("invalid argument to %<__builtin_return_address%>");
4588 return const0_rtx;
4589 }
4590 else
4591 {
4592 rtx tem
4593 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4594 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4595
4596 /* Some ports cannot access arbitrary stack frames. */
4597 if (tem == NULL)
4598 {
4599 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4600 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4601 else
4602 warning (0, "unsupported argument to %<__builtin_return_address%>");
4603 return const0_rtx;
4604 }
4605
4606 /* For __builtin_frame_address, return what we've got. */
4607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4608 return tem;
4609
4610 if (!REG_P (tem)
4611 && ! CONSTANT_P (tem))
4612 tem = copy_addr_to_reg (tem);
4613 return tem;
4614 }
4615 }
4616
4617 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4618 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4619 is the same as for allocate_dynamic_stack_space. */
4620
4621 static rtx
4622 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4623 {
4624 rtx op0;
4625 rtx result;
4626 bool valid_arglist;
4627 unsigned int align;
4628 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4629 == BUILT_IN_ALLOCA_WITH_ALIGN);
4630
4631 valid_arglist
4632 = (alloca_with_align
4633 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4634 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4635
4636 if (!valid_arglist)
4637 return NULL_RTX;
4638
4639 /* Compute the argument. */
4640 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4641
4642 /* Compute the alignment. */
4643 align = (alloca_with_align
4644 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4645 : BIGGEST_ALIGNMENT);
4646
4647 /* Allocate the desired space. */
4648 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4649 result = convert_memory_address (ptr_mode, result);
4650
4651 return result;
4652 }
4653
4654 /* Expand a call to bswap builtin in EXP.
4655 Return NULL_RTX if a normal call should be emitted rather than expanding the
4656 function in-line. If convenient, the result should be placed in TARGET.
4657 SUBTARGET may be used as the target for computing one of EXP's operands. */
4658
4659 static rtx
4660 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4661 rtx subtarget)
4662 {
4663 tree arg;
4664 rtx op0;
4665
4666 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4667 return NULL_RTX;
4668
4669 arg = CALL_EXPR_ARG (exp, 0);
4670 op0 = expand_expr (arg,
4671 subtarget && GET_MODE (subtarget) == target_mode
4672 ? subtarget : NULL_RTX,
4673 target_mode, EXPAND_NORMAL);
4674 if (GET_MODE (op0) != target_mode)
4675 op0 = convert_to_mode (target_mode, op0, 1);
4676
4677 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4678
4679 gcc_assert (target);
4680
4681 return convert_to_mode (target_mode, target, 1);
4682 }
4683
4684 /* Expand a call to a unary builtin in EXP.
4685 Return NULL_RTX if a normal call should be emitted rather than expanding the
4686 function in-line. If convenient, the result should be placed in TARGET.
4687 SUBTARGET may be used as the target for computing one of EXP's operands. */
4688
4689 static rtx
4690 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4691 rtx subtarget, optab op_optab)
4692 {
4693 rtx op0;
4694
4695 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4696 return NULL_RTX;
4697
4698 /* Compute the argument. */
4699 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4700 (subtarget
4701 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4702 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4703 VOIDmode, EXPAND_NORMAL);
4704 /* Compute op, into TARGET if possible.
4705 Set TARGET to wherever the result comes back. */
4706 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4707 op_optab, op0, target, op_optab != clrsb_optab);
4708 gcc_assert (target);
4709
4710 return convert_to_mode (target_mode, target, 0);
4711 }
4712
4713 /* Expand a call to __builtin_expect. We just return our argument
4714 as the builtin_expect semantic should've been already executed by
4715 tree branch prediction pass. */
4716
4717 static rtx
4718 expand_builtin_expect (tree exp, rtx target)
4719 {
4720 tree arg;
4721
4722 if (call_expr_nargs (exp) < 2)
4723 return const0_rtx;
4724 arg = CALL_EXPR_ARG (exp, 0);
4725
4726 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4727 /* When guessing was done, the hints should be already stripped away. */
4728 gcc_assert (!flag_guess_branch_prob
4729 || optimize == 0 || seen_error ());
4730 return target;
4731 }
4732
4733 /* Expand a call to __builtin_assume_aligned. We just return our first
4734 argument as the builtin_assume_aligned semantic should've been already
4735 executed by CCP. */
4736
4737 static rtx
4738 expand_builtin_assume_aligned (tree exp, rtx target)
4739 {
4740 if (call_expr_nargs (exp) < 2)
4741 return const0_rtx;
4742 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4743 EXPAND_NORMAL);
4744 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4745 && (call_expr_nargs (exp) < 3
4746 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4747 return target;
4748 }
4749
4750 void
4751 expand_builtin_trap (void)
4752 {
4753 #ifdef HAVE_trap
4754 if (HAVE_trap)
4755 {
4756 rtx_insn *insn = emit_insn (gen_trap ());
4757 /* For trap insns when not accumulating outgoing args force
4758 REG_ARGS_SIZE note to prevent crossjumping of calls with
4759 different args sizes. */
4760 if (!ACCUMULATE_OUTGOING_ARGS)
4761 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4762 }
4763 else
4764 #endif
4765 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4766 emit_barrier ();
4767 }
4768
4769 /* Expand a call to __builtin_unreachable. We do nothing except emit
4770 a barrier saying that control flow will not pass here.
4771
4772 It is the responsibility of the program being compiled to ensure
4773 that control flow does never reach __builtin_unreachable. */
4774 static void
4775 expand_builtin_unreachable (void)
4776 {
4777 emit_barrier ();
4778 }
4779
4780 /* Expand EXP, a call to fabs, fabsf or fabsl.
4781 Return NULL_RTX if a normal call should be emitted rather than expanding
4782 the function inline. If convenient, the result should be placed
4783 in TARGET. SUBTARGET may be used as the target for computing
4784 the operand. */
4785
4786 static rtx
4787 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4788 {
4789 machine_mode mode;
4790 tree arg;
4791 rtx op0;
4792
4793 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4794 return NULL_RTX;
4795
4796 arg = CALL_EXPR_ARG (exp, 0);
4797 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4798 mode = TYPE_MODE (TREE_TYPE (arg));
4799 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4800 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4801 }
4802
4803 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4804 Return NULL is a normal call should be emitted rather than expanding the
4805 function inline. If convenient, the result should be placed in TARGET.
4806 SUBTARGET may be used as the target for computing the operand. */
4807
4808 static rtx
4809 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4810 {
4811 rtx op0, op1;
4812 tree arg;
4813
4814 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4815 return NULL_RTX;
4816
4817 arg = CALL_EXPR_ARG (exp, 0);
4818 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4819
4820 arg = CALL_EXPR_ARG (exp, 1);
4821 op1 = expand_normal (arg);
4822
4823 return expand_copysign (op0, op1, target);
4824 }
4825
4826 /* Expand a call to __builtin___clear_cache. */
4827
4828 static rtx
4829 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4830 {
4831 #ifndef HAVE_clear_cache
4832 #ifdef CLEAR_INSN_CACHE
4833 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4834 does something. Just do the default expansion to a call to
4835 __clear_cache(). */
4836 return NULL_RTX;
4837 #else
4838 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4839 does nothing. There is no need to call it. Do nothing. */
4840 return const0_rtx;
4841 #endif /* CLEAR_INSN_CACHE */
4842 #else
4843 /* We have a "clear_cache" insn, and it will handle everything. */
4844 tree begin, end;
4845 rtx begin_rtx, end_rtx;
4846
4847 /* We must not expand to a library call. If we did, any
4848 fallback library function in libgcc that might contain a call to
4849 __builtin___clear_cache() would recurse infinitely. */
4850 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4851 {
4852 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4853 return const0_rtx;
4854 }
4855
4856 if (HAVE_clear_cache)
4857 {
4858 struct expand_operand ops[2];
4859
4860 begin = CALL_EXPR_ARG (exp, 0);
4861 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4862
4863 end = CALL_EXPR_ARG (exp, 1);
4864 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4865
4866 create_address_operand (&ops[0], begin_rtx);
4867 create_address_operand (&ops[1], end_rtx);
4868 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4869 return const0_rtx;
4870 }
4871 return const0_rtx;
4872 #endif /* HAVE_clear_cache */
4873 }
4874
4875 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4876
4877 static rtx
4878 round_trampoline_addr (rtx tramp)
4879 {
4880 rtx temp, addend, mask;
4881
4882 /* If we don't need too much alignment, we'll have been guaranteed
4883 proper alignment by get_trampoline_type. */
4884 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4885 return tramp;
4886
4887 /* Round address up to desired boundary. */
4888 temp = gen_reg_rtx (Pmode);
4889 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4890 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4891
4892 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4893 temp, 0, OPTAB_LIB_WIDEN);
4894 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4895 temp, 0, OPTAB_LIB_WIDEN);
4896
4897 return tramp;
4898 }
4899
4900 static rtx
4901 expand_builtin_init_trampoline (tree exp, bool onstack)
4902 {
4903 tree t_tramp, t_func, t_chain;
4904 rtx m_tramp, r_tramp, r_chain, tmp;
4905
4906 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4907 POINTER_TYPE, VOID_TYPE))
4908 return NULL_RTX;
4909
4910 t_tramp = CALL_EXPR_ARG (exp, 0);
4911 t_func = CALL_EXPR_ARG (exp, 1);
4912 t_chain = CALL_EXPR_ARG (exp, 2);
4913
4914 r_tramp = expand_normal (t_tramp);
4915 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4916 MEM_NOTRAP_P (m_tramp) = 1;
4917
4918 /* If ONSTACK, the TRAMP argument should be the address of a field
4919 within the local function's FRAME decl. Either way, let's see if
4920 we can fill in the MEM_ATTRs for this memory. */
4921 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4922 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4923
4924 /* Creator of a heap trampoline is responsible for making sure the
4925 address is aligned to at least STACK_BOUNDARY. Normally malloc
4926 will ensure this anyhow. */
4927 tmp = round_trampoline_addr (r_tramp);
4928 if (tmp != r_tramp)
4929 {
4930 m_tramp = change_address (m_tramp, BLKmode, tmp);
4931 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4932 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4933 }
4934
4935 /* The FUNC argument should be the address of the nested function.
4936 Extract the actual function decl to pass to the hook. */
4937 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4938 t_func = TREE_OPERAND (t_func, 0);
4939 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4940
4941 r_chain = expand_normal (t_chain);
4942
4943 /* Generate insns to initialize the trampoline. */
4944 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4945
4946 if (onstack)
4947 {
4948 trampolines_created = 1;
4949
4950 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4951 "trampoline generated for nested function %qD", t_func);
4952 }
4953
4954 return const0_rtx;
4955 }
4956
4957 static rtx
4958 expand_builtin_adjust_trampoline (tree exp)
4959 {
4960 rtx tramp;
4961
4962 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4963 return NULL_RTX;
4964
4965 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4966 tramp = round_trampoline_addr (tramp);
4967 if (targetm.calls.trampoline_adjust_address)
4968 tramp = targetm.calls.trampoline_adjust_address (tramp);
4969
4970 return tramp;
4971 }
4972
4973 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4974 function. The function first checks whether the back end provides
4975 an insn to implement signbit for the respective mode. If not, it
4976 checks whether the floating point format of the value is such that
4977 the sign bit can be extracted. If that is not the case, the
4978 function returns NULL_RTX to indicate that a normal call should be
4979 emitted rather than expanding the function in-line. EXP is the
4980 expression that is a call to the builtin function; if convenient,
4981 the result should be placed in TARGET. */
4982 static rtx
4983 expand_builtin_signbit (tree exp, rtx target)
4984 {
4985 const struct real_format *fmt;
4986 machine_mode fmode, imode, rmode;
4987 tree arg;
4988 int word, bitpos;
4989 enum insn_code icode;
4990 rtx temp;
4991 location_t loc = EXPR_LOCATION (exp);
4992
4993 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4994 return NULL_RTX;
4995
4996 arg = CALL_EXPR_ARG (exp, 0);
4997 fmode = TYPE_MODE (TREE_TYPE (arg));
4998 rmode = TYPE_MODE (TREE_TYPE (exp));
4999 fmt = REAL_MODE_FORMAT (fmode);
5000
5001 arg = builtin_save_expr (arg);
5002
5003 /* Expand the argument yielding a RTX expression. */
5004 temp = expand_normal (arg);
5005
5006 /* Check if the back end provides an insn that handles signbit for the
5007 argument's mode. */
5008 icode = optab_handler (signbit_optab, fmode);
5009 if (icode != CODE_FOR_nothing)
5010 {
5011 rtx_insn *last = get_last_insn ();
5012 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5013 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5014 return target;
5015 delete_insns_since (last);
5016 }
5017
5018 /* For floating point formats without a sign bit, implement signbit
5019 as "ARG < 0.0". */
5020 bitpos = fmt->signbit_ro;
5021 if (bitpos < 0)
5022 {
5023 /* But we can't do this if the format supports signed zero. */
5024 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5025 return NULL_RTX;
5026
5027 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5028 build_real (TREE_TYPE (arg), dconst0));
5029 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5030 }
5031
5032 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5033 {
5034 imode = int_mode_for_mode (fmode);
5035 if (imode == BLKmode)
5036 return NULL_RTX;
5037 temp = gen_lowpart (imode, temp);
5038 }
5039 else
5040 {
5041 imode = word_mode;
5042 /* Handle targets with different FP word orders. */
5043 if (FLOAT_WORDS_BIG_ENDIAN)
5044 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5045 else
5046 word = bitpos / BITS_PER_WORD;
5047 temp = operand_subword_force (temp, word, fmode);
5048 bitpos = bitpos % BITS_PER_WORD;
5049 }
5050
5051 /* Force the intermediate word_mode (or narrower) result into a
5052 register. This avoids attempting to create paradoxical SUBREGs
5053 of floating point modes below. */
5054 temp = force_reg (imode, temp);
5055
5056 /* If the bitpos is within the "result mode" lowpart, the operation
5057 can be implement with a single bitwise AND. Otherwise, we need
5058 a right shift and an AND. */
5059
5060 if (bitpos < GET_MODE_BITSIZE (rmode))
5061 {
5062 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5063
5064 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5065 temp = gen_lowpart (rmode, temp);
5066 temp = expand_binop (rmode, and_optab, temp,
5067 immed_wide_int_const (mask, rmode),
5068 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5069 }
5070 else
5071 {
5072 /* Perform a logical right shift to place the signbit in the least
5073 significant bit, then truncate the result to the desired mode
5074 and mask just this bit. */
5075 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5076 temp = gen_lowpart (rmode, temp);
5077 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5078 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5079 }
5080
5081 return temp;
5082 }
5083
5084 /* Expand fork or exec calls. TARGET is the desired target of the
5085 call. EXP is the call. FN is the
5086 identificator of the actual function. IGNORE is nonzero if the
5087 value is to be ignored. */
5088
5089 static rtx
5090 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5091 {
5092 tree id, decl;
5093 tree call;
5094
5095 /* If we are not profiling, just call the function. */
5096 if (!profile_arc_flag)
5097 return NULL_RTX;
5098
5099 /* Otherwise call the wrapper. This should be equivalent for the rest of
5100 compiler, so the code does not diverge, and the wrapper may run the
5101 code necessary for keeping the profiling sane. */
5102
5103 switch (DECL_FUNCTION_CODE (fn))
5104 {
5105 case BUILT_IN_FORK:
5106 id = get_identifier ("__gcov_fork");
5107 break;
5108
5109 case BUILT_IN_EXECL:
5110 id = get_identifier ("__gcov_execl");
5111 break;
5112
5113 case BUILT_IN_EXECV:
5114 id = get_identifier ("__gcov_execv");
5115 break;
5116
5117 case BUILT_IN_EXECLP:
5118 id = get_identifier ("__gcov_execlp");
5119 break;
5120
5121 case BUILT_IN_EXECLE:
5122 id = get_identifier ("__gcov_execle");
5123 break;
5124
5125 case BUILT_IN_EXECVP:
5126 id = get_identifier ("__gcov_execvp");
5127 break;
5128
5129 case BUILT_IN_EXECVE:
5130 id = get_identifier ("__gcov_execve");
5131 break;
5132
5133 default:
5134 gcc_unreachable ();
5135 }
5136
5137 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5138 FUNCTION_DECL, id, TREE_TYPE (fn));
5139 DECL_EXTERNAL (decl) = 1;
5140 TREE_PUBLIC (decl) = 1;
5141 DECL_ARTIFICIAL (decl) = 1;
5142 TREE_NOTHROW (decl) = 1;
5143 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5144 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5145 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5146 return expand_call (call, target, ignore);
5147 }
5148
5149
5150 \f
5151 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5152 the pointer in these functions is void*, the tree optimizers may remove
5153 casts. The mode computed in expand_builtin isn't reliable either, due
5154 to __sync_bool_compare_and_swap.
5155
5156 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5157 group of builtins. This gives us log2 of the mode size. */
5158
5159 static inline machine_mode
5160 get_builtin_sync_mode (int fcode_diff)
5161 {
5162 /* The size is not negotiable, so ask not to get BLKmode in return
5163 if the target indicates that a smaller size would be better. */
5164 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5165 }
5166
5167 /* Expand the memory expression LOC and return the appropriate memory operand
5168 for the builtin_sync operations. */
5169
5170 static rtx
5171 get_builtin_sync_mem (tree loc, machine_mode mode)
5172 {
5173 rtx addr, mem;
5174
5175 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5176 addr = convert_memory_address (Pmode, addr);
5177
5178 /* Note that we explicitly do not want any alias information for this
5179 memory, so that we kill all other live memories. Otherwise we don't
5180 satisfy the full barrier semantics of the intrinsic. */
5181 mem = validize_mem (gen_rtx_MEM (mode, addr));
5182
5183 /* The alignment needs to be at least according to that of the mode. */
5184 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5185 get_pointer_alignment (loc)));
5186 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5187 MEM_VOLATILE_P (mem) = 1;
5188
5189 return mem;
5190 }
5191
5192 /* Make sure an argument is in the right mode.
5193 EXP is the tree argument.
5194 MODE is the mode it should be in. */
5195
5196 static rtx
5197 expand_expr_force_mode (tree exp, machine_mode mode)
5198 {
5199 rtx val;
5200 machine_mode old_mode;
5201
5202 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5203 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5204 of CONST_INTs, where we know the old_mode only from the call argument. */
5205
5206 old_mode = GET_MODE (val);
5207 if (old_mode == VOIDmode)
5208 old_mode = TYPE_MODE (TREE_TYPE (exp));
5209 val = convert_modes (mode, old_mode, val, 1);
5210 return val;
5211 }
5212
5213
5214 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5215 EXP is the CALL_EXPR. CODE is the rtx code
5216 that corresponds to the arithmetic or logical operation from the name;
5217 an exception here is that NOT actually means NAND. TARGET is an optional
5218 place for us to store the results; AFTER is true if this is the
5219 fetch_and_xxx form. */
5220
5221 static rtx
5222 expand_builtin_sync_operation (machine_mode mode, tree exp,
5223 enum rtx_code code, bool after,
5224 rtx target)
5225 {
5226 rtx val, mem;
5227 location_t loc = EXPR_LOCATION (exp);
5228
5229 if (code == NOT && warn_sync_nand)
5230 {
5231 tree fndecl = get_callee_fndecl (exp);
5232 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5233
5234 static bool warned_f_a_n, warned_n_a_f;
5235
5236 switch (fcode)
5237 {
5238 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5239 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5240 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5241 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5242 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5243 if (warned_f_a_n)
5244 break;
5245
5246 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5247 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5248 warned_f_a_n = true;
5249 break;
5250
5251 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5252 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5253 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5254 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5255 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5256 if (warned_n_a_f)
5257 break;
5258
5259 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5260 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5261 warned_n_a_f = true;
5262 break;
5263
5264 default:
5265 gcc_unreachable ();
5266 }
5267 }
5268
5269 /* Expand the operands. */
5270 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5271 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5272
5273 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5274 after);
5275 }
5276
5277 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5278 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5279 true if this is the boolean form. TARGET is a place for us to store the
5280 results; this is NOT optional if IS_BOOL is true. */
5281
5282 static rtx
5283 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5284 bool is_bool, rtx target)
5285 {
5286 rtx old_val, new_val, mem;
5287 rtx *pbool, *poval;
5288
5289 /* Expand the operands. */
5290 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5291 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5292 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5293
5294 pbool = poval = NULL;
5295 if (target != const0_rtx)
5296 {
5297 if (is_bool)
5298 pbool = &target;
5299 else
5300 poval = &target;
5301 }
5302 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5303 false, MEMMODEL_SYNC_SEQ_CST,
5304 MEMMODEL_SYNC_SEQ_CST))
5305 return NULL_RTX;
5306
5307 return target;
5308 }
5309
5310 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5311 general form is actually an atomic exchange, and some targets only
5312 support a reduced form with the second argument being a constant 1.
5313 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5314 the results. */
5315
5316 static rtx
5317 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5318 rtx target)
5319 {
5320 rtx val, mem;
5321
5322 /* Expand the operands. */
5323 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5324 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5325
5326 return expand_sync_lock_test_and_set (target, mem, val);
5327 }
5328
5329 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5330
5331 static void
5332 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5333 {
5334 rtx mem;
5335
5336 /* Expand the operands. */
5337 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5338
5339 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5340 }
5341
5342 /* Given an integer representing an ``enum memmodel'', verify its
5343 correctness and return the memory model enum. */
5344
5345 static enum memmodel
5346 get_memmodel (tree exp)
5347 {
5348 rtx op;
5349 unsigned HOST_WIDE_INT val;
5350
5351 /* If the parameter is not a constant, it's a run time value so we'll just
5352 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5353 if (TREE_CODE (exp) != INTEGER_CST)
5354 return MEMMODEL_SEQ_CST;
5355
5356 op = expand_normal (exp);
5357
5358 val = INTVAL (op);
5359 if (targetm.memmodel_check)
5360 val = targetm.memmodel_check (val);
5361 else if (val & ~MEMMODEL_MASK)
5362 {
5363 warning (OPT_Winvalid_memory_model,
5364 "Unknown architecture specifier in memory model to builtin.");
5365 return MEMMODEL_SEQ_CST;
5366 }
5367
5368 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5369 if (memmodel_base (val) >= MEMMODEL_LAST)
5370 {
5371 warning (OPT_Winvalid_memory_model,
5372 "invalid memory model argument to builtin");
5373 return MEMMODEL_SEQ_CST;
5374 }
5375
5376 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5377 be conservative and promote consume to acquire. */
5378 if (val == MEMMODEL_CONSUME)
5379 val = MEMMODEL_ACQUIRE;
5380
5381 return (enum memmodel) val;
5382 }
5383
5384 /* Expand the __atomic_exchange intrinsic:
5385 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5388
5389 static rtx
5390 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5391 {
5392 rtx val, mem;
5393 enum memmodel model;
5394
5395 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5396
5397 if (!flag_inline_atomics)
5398 return NULL_RTX;
5399
5400 /* Expand the operands. */
5401 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5402 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5403
5404 return expand_atomic_exchange (target, mem, val, model);
5405 }
5406
5407 /* Expand the __atomic_compare_exchange intrinsic:
5408 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5409 TYPE desired, BOOL weak,
5410 enum memmodel success,
5411 enum memmodel failure)
5412 EXP is the CALL_EXPR.
5413 TARGET is an optional place for us to store the results. */
5414
5415 static rtx
5416 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5417 rtx target)
5418 {
5419 rtx expect, desired, mem, oldval;
5420 rtx_code_label *label;
5421 enum memmodel success, failure;
5422 tree weak;
5423 bool is_weak;
5424
5425 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5426 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5427
5428 if (failure > success)
5429 {
5430 warning (OPT_Winvalid_memory_model,
5431 "failure memory model cannot be stronger than success memory "
5432 "model for %<__atomic_compare_exchange%>");
5433 success = MEMMODEL_SEQ_CST;
5434 }
5435
5436 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5437 {
5438 warning (OPT_Winvalid_memory_model,
5439 "invalid failure memory model for "
5440 "%<__atomic_compare_exchange%>");
5441 failure = MEMMODEL_SEQ_CST;
5442 success = MEMMODEL_SEQ_CST;
5443 }
5444
5445
5446 if (!flag_inline_atomics)
5447 return NULL_RTX;
5448
5449 /* Expand the operands. */
5450 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5451
5452 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5453 expect = convert_memory_address (Pmode, expect);
5454 expect = gen_rtx_MEM (mode, expect);
5455 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5456
5457 weak = CALL_EXPR_ARG (exp, 3);
5458 is_weak = false;
5459 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5460 is_weak = true;
5461
5462 if (target == const0_rtx)
5463 target = NULL;
5464
5465 /* Lest the rtl backend create a race condition with an imporoper store
5466 to memory, always create a new pseudo for OLDVAL. */
5467 oldval = NULL;
5468
5469 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5470 is_weak, success, failure))
5471 return NULL_RTX;
5472
5473 /* Conditionally store back to EXPECT, lest we create a race condition
5474 with an improper store to memory. */
5475 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5476 the normal case where EXPECT is totally private, i.e. a register. At
5477 which point the store can be unconditional. */
5478 label = gen_label_rtx ();
5479 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5480 emit_move_insn (expect, oldval);
5481 emit_label (label);
5482
5483 return target;
5484 }
5485
5486 /* Expand the __atomic_load intrinsic:
5487 TYPE __atomic_load (TYPE *object, enum memmodel)
5488 EXP is the CALL_EXPR.
5489 TARGET is an optional place for us to store the results. */
5490
5491 static rtx
5492 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5493 {
5494 rtx mem;
5495 enum memmodel model;
5496
5497 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5498 if (is_mm_release (model) || is_mm_acq_rel (model))
5499 {
5500 warning (OPT_Winvalid_memory_model,
5501 "invalid memory model for %<__atomic_load%>");
5502 model = MEMMODEL_SEQ_CST;
5503 }
5504
5505 if (!flag_inline_atomics)
5506 return NULL_RTX;
5507
5508 /* Expand the operand. */
5509 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5510
5511 return expand_atomic_load (target, mem, model);
5512 }
5513
5514
5515 /* Expand the __atomic_store intrinsic:
5516 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5517 EXP is the CALL_EXPR.
5518 TARGET is an optional place for us to store the results. */
5519
5520 static rtx
5521 expand_builtin_atomic_store (machine_mode mode, tree exp)
5522 {
5523 rtx mem, val;
5524 enum memmodel model;
5525
5526 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5527 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5528 || is_mm_release (model)))
5529 {
5530 warning (OPT_Winvalid_memory_model,
5531 "invalid memory model for %<__atomic_store%>");
5532 model = MEMMODEL_SEQ_CST;
5533 }
5534
5535 if (!flag_inline_atomics)
5536 return NULL_RTX;
5537
5538 /* Expand the operands. */
5539 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5540 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5541
5542 return expand_atomic_store (mem, val, model, false);
5543 }
5544
5545 /* Expand the __atomic_fetch_XXX intrinsic:
5546 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5547 EXP is the CALL_EXPR.
5548 TARGET is an optional place for us to store the results.
5549 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5550 FETCH_AFTER is true if returning the result of the operation.
5551 FETCH_AFTER is false if returning the value before the operation.
5552 IGNORE is true if the result is not used.
5553 EXT_CALL is the correct builtin for an external call if this cannot be
5554 resolved to an instruction sequence. */
5555
5556 static rtx
5557 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5558 enum rtx_code code, bool fetch_after,
5559 bool ignore, enum built_in_function ext_call)
5560 {
5561 rtx val, mem, ret;
5562 enum memmodel model;
5563 tree fndecl;
5564 tree addr;
5565
5566 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5567
5568 /* Expand the operands. */
5569 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5570 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5571
5572 /* Only try generating instructions if inlining is turned on. */
5573 if (flag_inline_atomics)
5574 {
5575 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5576 if (ret)
5577 return ret;
5578 }
5579
5580 /* Return if a different routine isn't needed for the library call. */
5581 if (ext_call == BUILT_IN_NONE)
5582 return NULL_RTX;
5583
5584 /* Change the call to the specified function. */
5585 fndecl = get_callee_fndecl (exp);
5586 addr = CALL_EXPR_FN (exp);
5587 STRIP_NOPS (addr);
5588
5589 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5590 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5591
5592 /* Expand the call here so we can emit trailing code. */
5593 ret = expand_call (exp, target, ignore);
5594
5595 /* Replace the original function just in case it matters. */
5596 TREE_OPERAND (addr, 0) = fndecl;
5597
5598 /* Then issue the arithmetic correction to return the right result. */
5599 if (!ignore)
5600 {
5601 if (code == NOT)
5602 {
5603 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5604 OPTAB_LIB_WIDEN);
5605 ret = expand_simple_unop (mode, NOT, ret, target, true);
5606 }
5607 else
5608 ret = expand_simple_binop (mode, code, ret, val, target, true,
5609 OPTAB_LIB_WIDEN);
5610 }
5611 return ret;
5612 }
5613
5614
5615 #ifndef HAVE_atomic_clear
5616 # define HAVE_atomic_clear 0
5617 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5618 #endif
5619
5620 /* Expand an atomic clear operation.
5621 void _atomic_clear (BOOL *obj, enum memmodel)
5622 EXP is the call expression. */
5623
5624 static rtx
5625 expand_builtin_atomic_clear (tree exp)
5626 {
5627 machine_mode mode;
5628 rtx mem, ret;
5629 enum memmodel model;
5630
5631 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5632 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5633 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5634
5635 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5636 {
5637 warning (OPT_Winvalid_memory_model,
5638 "invalid memory model for %<__atomic_store%>");
5639 model = MEMMODEL_SEQ_CST;
5640 }
5641
5642 if (HAVE_atomic_clear)
5643 {
5644 emit_insn (gen_atomic_clear (mem, model));
5645 return const0_rtx;
5646 }
5647
5648 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5649 Failing that, a store is issued by __atomic_store. The only way this can
5650 fail is if the bool type is larger than a word size. Unlikely, but
5651 handle it anyway for completeness. Assume a single threaded model since
5652 there is no atomic support in this case, and no barriers are required. */
5653 ret = expand_atomic_store (mem, const0_rtx, model, true);
5654 if (!ret)
5655 emit_move_insn (mem, const0_rtx);
5656 return const0_rtx;
5657 }
5658
5659 /* Expand an atomic test_and_set operation.
5660 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5661 EXP is the call expression. */
5662
5663 static rtx
5664 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5665 {
5666 rtx mem;
5667 enum memmodel model;
5668 machine_mode mode;
5669
5670 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5671 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5672 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5673
5674 return expand_atomic_test_and_set (target, mem, model);
5675 }
5676
5677
5678 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5679 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5680
5681 static tree
5682 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5683 {
5684 int size;
5685 machine_mode mode;
5686 unsigned int mode_align, type_align;
5687
5688 if (TREE_CODE (arg0) != INTEGER_CST)
5689 return NULL_TREE;
5690
5691 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5692 mode = mode_for_size (size, MODE_INT, 0);
5693 mode_align = GET_MODE_ALIGNMENT (mode);
5694
5695 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5696 type_align = mode_align;
5697 else
5698 {
5699 tree ttype = TREE_TYPE (arg1);
5700
5701 /* This function is usually invoked and folded immediately by the front
5702 end before anything else has a chance to look at it. The pointer
5703 parameter at this point is usually cast to a void *, so check for that
5704 and look past the cast. */
5705 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5706 && VOID_TYPE_P (TREE_TYPE (ttype)))
5707 arg1 = TREE_OPERAND (arg1, 0);
5708
5709 ttype = TREE_TYPE (arg1);
5710 gcc_assert (POINTER_TYPE_P (ttype));
5711
5712 /* Get the underlying type of the object. */
5713 ttype = TREE_TYPE (ttype);
5714 type_align = TYPE_ALIGN (ttype);
5715 }
5716
5717 /* If the object has smaller alignment, the the lock free routines cannot
5718 be used. */
5719 if (type_align < mode_align)
5720 return boolean_false_node;
5721
5722 /* Check if a compare_and_swap pattern exists for the mode which represents
5723 the required size. The pattern is not allowed to fail, so the existence
5724 of the pattern indicates support is present. */
5725 if (can_compare_and_swap_p (mode, true))
5726 return boolean_true_node;
5727 else
5728 return boolean_false_node;
5729 }
5730
5731 /* Return true if the parameters to call EXP represent an object which will
5732 always generate lock free instructions. The first argument represents the
5733 size of the object, and the second parameter is a pointer to the object
5734 itself. If NULL is passed for the object, then the result is based on
5735 typical alignment for an object of the specified size. Otherwise return
5736 false. */
5737
5738 static rtx
5739 expand_builtin_atomic_always_lock_free (tree exp)
5740 {
5741 tree size;
5742 tree arg0 = CALL_EXPR_ARG (exp, 0);
5743 tree arg1 = CALL_EXPR_ARG (exp, 1);
5744
5745 if (TREE_CODE (arg0) != INTEGER_CST)
5746 {
5747 error ("non-constant argument 1 to __atomic_always_lock_free");
5748 return const0_rtx;
5749 }
5750
5751 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5752 if (size == boolean_true_node)
5753 return const1_rtx;
5754 return const0_rtx;
5755 }
5756
5757 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5758 is lock free on this architecture. */
5759
5760 static tree
5761 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5762 {
5763 if (!flag_inline_atomics)
5764 return NULL_TREE;
5765
5766 /* If it isn't always lock free, don't generate a result. */
5767 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5768 return boolean_true_node;
5769
5770 return NULL_TREE;
5771 }
5772
5773 /* Return true if the parameters to call EXP represent an object which will
5774 always generate lock free instructions. The first argument represents the
5775 size of the object, and the second parameter is a pointer to the object
5776 itself. If NULL is passed for the object, then the result is based on
5777 typical alignment for an object of the specified size. Otherwise return
5778 NULL*/
5779
5780 static rtx
5781 expand_builtin_atomic_is_lock_free (tree exp)
5782 {
5783 tree size;
5784 tree arg0 = CALL_EXPR_ARG (exp, 0);
5785 tree arg1 = CALL_EXPR_ARG (exp, 1);
5786
5787 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5788 {
5789 error ("non-integer argument 1 to __atomic_is_lock_free");
5790 return NULL_RTX;
5791 }
5792
5793 if (!flag_inline_atomics)
5794 return NULL_RTX;
5795
5796 /* If the value is known at compile time, return the RTX for it. */
5797 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5798 if (size == boolean_true_node)
5799 return const1_rtx;
5800
5801 return NULL_RTX;
5802 }
5803
5804 /* Expand the __atomic_thread_fence intrinsic:
5805 void __atomic_thread_fence (enum memmodel)
5806 EXP is the CALL_EXPR. */
5807
5808 static void
5809 expand_builtin_atomic_thread_fence (tree exp)
5810 {
5811 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5812 expand_mem_thread_fence (model);
5813 }
5814
5815 /* Expand the __atomic_signal_fence intrinsic:
5816 void __atomic_signal_fence (enum memmodel)
5817 EXP is the CALL_EXPR. */
5818
5819 static void
5820 expand_builtin_atomic_signal_fence (tree exp)
5821 {
5822 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5823 expand_mem_signal_fence (model);
5824 }
5825
5826 /* Expand the __sync_synchronize intrinsic. */
5827
5828 static void
5829 expand_builtin_sync_synchronize (void)
5830 {
5831 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5832 }
5833
5834 static rtx
5835 expand_builtin_thread_pointer (tree exp, rtx target)
5836 {
5837 enum insn_code icode;
5838 if (!validate_arglist (exp, VOID_TYPE))
5839 return const0_rtx;
5840 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5841 if (icode != CODE_FOR_nothing)
5842 {
5843 struct expand_operand op;
5844 /* If the target is not sutitable then create a new target. */
5845 if (target == NULL_RTX
5846 || !REG_P (target)
5847 || GET_MODE (target) != Pmode)
5848 target = gen_reg_rtx (Pmode);
5849 create_output_operand (&op, target, Pmode);
5850 expand_insn (icode, 1, &op);
5851 return target;
5852 }
5853 error ("__builtin_thread_pointer is not supported on this target");
5854 return const0_rtx;
5855 }
5856
5857 static void
5858 expand_builtin_set_thread_pointer (tree exp)
5859 {
5860 enum insn_code icode;
5861 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5862 return;
5863 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5864 if (icode != CODE_FOR_nothing)
5865 {
5866 struct expand_operand op;
5867 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5868 Pmode, EXPAND_NORMAL);
5869 create_input_operand (&op, val, Pmode);
5870 expand_insn (icode, 1, &op);
5871 return;
5872 }
5873 error ("__builtin_set_thread_pointer is not supported on this target");
5874 }
5875
5876 \f
5877 /* Emit code to restore the current value of stack. */
5878
5879 static void
5880 expand_stack_restore (tree var)
5881 {
5882 rtx_insn *prev;
5883 rtx sa = expand_normal (var);
5884
5885 sa = convert_memory_address (Pmode, sa);
5886
5887 prev = get_last_insn ();
5888 emit_stack_restore (SAVE_BLOCK, sa);
5889
5890 record_new_stack_level ();
5891
5892 fixup_args_size_notes (prev, get_last_insn (), 0);
5893 }
5894
5895 /* Emit code to save the current value of stack. */
5896
5897 static rtx
5898 expand_stack_save (void)
5899 {
5900 rtx ret = NULL_RTX;
5901
5902 emit_stack_save (SAVE_BLOCK, &ret);
5903 return ret;
5904 }
5905
5906
5907 /* Expand OpenACC acc_on_device.
5908
5909 This has to happen late (that is, not in early folding; expand_builtin_*,
5910 rather than fold_builtin_*), as we have to act differently for host and
5911 acceleration device (ACCEL_COMPILER conditional). */
5912
5913 static rtx
5914 expand_builtin_acc_on_device (tree exp, rtx target)
5915 {
5916 #ifdef ACCEL_COMPILER
5917 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5918 return NULL_RTX;
5919
5920 tree arg = CALL_EXPR_ARG (exp, 0);
5921
5922 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5923 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5924 rtx v = expand_normal (arg), v1, v2;
5925 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5926 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5927 machine_mode target_mode = TYPE_MODE (integer_type_node);
5928 if (!target || !register_operand (target, target_mode))
5929 target = gen_reg_rtx (target_mode);
5930 emit_move_insn (target, const1_rtx);
5931 rtx_code_label *done_label = gen_label_rtx ();
5932 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5933 NULL, done_label, PROB_EVEN);
5934 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5935 NULL, done_label, PROB_EVEN);
5936 emit_move_insn (target, const0_rtx);
5937 emit_label (done_label);
5938
5939 return target;
5940 #else
5941 return NULL;
5942 #endif
5943 }
5944
5945
5946 /* Expand an expression EXP that calls a built-in function,
5947 with result going to TARGET if that's convenient
5948 (and in mode MODE if that's convenient).
5949 SUBTARGET may be used as the target for computing one of EXP's operands.
5950 IGNORE is nonzero if the value is to be ignored. */
5951
5952 rtx
5953 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5954 int ignore)
5955 {
5956 tree fndecl = get_callee_fndecl (exp);
5957 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5958 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5959 int flags;
5960
5961 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5962 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5963
5964 /* When ASan is enabled, we don't want to expand some memory/string
5965 builtins and rely on libsanitizer's hooks. This allows us to avoid
5966 redundant checks and be sure, that possible overflow will be detected
5967 by ASan. */
5968
5969 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5970 return expand_call (exp, target, ignore);
5971
5972 /* When not optimizing, generate calls to library functions for a certain
5973 set of builtins. */
5974 if (!optimize
5975 && !called_as_built_in (fndecl)
5976 && fcode != BUILT_IN_FORK
5977 && fcode != BUILT_IN_EXECL
5978 && fcode != BUILT_IN_EXECV
5979 && fcode != BUILT_IN_EXECLP
5980 && fcode != BUILT_IN_EXECLE
5981 && fcode != BUILT_IN_EXECVP
5982 && fcode != BUILT_IN_EXECVE
5983 && fcode != BUILT_IN_ALLOCA
5984 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5985 && fcode != BUILT_IN_FREE
5986 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5987 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5988 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5989 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5990 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5991 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5992 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5993 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5994 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5995 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5996 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5997 && fcode != BUILT_IN_CHKP_BNDRET)
5998 return expand_call (exp, target, ignore);
5999
6000 /* The built-in function expanders test for target == const0_rtx
6001 to determine whether the function's result will be ignored. */
6002 if (ignore)
6003 target = const0_rtx;
6004
6005 /* If the result of a pure or const built-in function is ignored, and
6006 none of its arguments are volatile, we can avoid expanding the
6007 built-in call and just evaluate the arguments for side-effects. */
6008 if (target == const0_rtx
6009 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6010 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6011 {
6012 bool volatilep = false;
6013 tree arg;
6014 call_expr_arg_iterator iter;
6015
6016 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6017 if (TREE_THIS_VOLATILE (arg))
6018 {
6019 volatilep = true;
6020 break;
6021 }
6022
6023 if (! volatilep)
6024 {
6025 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6026 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6027 return const0_rtx;
6028 }
6029 }
6030
6031 /* expand_builtin_with_bounds is supposed to be used for
6032 instrumented builtin calls. */
6033 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6034
6035 switch (fcode)
6036 {
6037 CASE_FLT_FN (BUILT_IN_FABS):
6038 case BUILT_IN_FABSD32:
6039 case BUILT_IN_FABSD64:
6040 case BUILT_IN_FABSD128:
6041 target = expand_builtin_fabs (exp, target, subtarget);
6042 if (target)
6043 return target;
6044 break;
6045
6046 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6047 target = expand_builtin_copysign (exp, target, subtarget);
6048 if (target)
6049 return target;
6050 break;
6051
6052 /* Just do a normal library call if we were unable to fold
6053 the values. */
6054 CASE_FLT_FN (BUILT_IN_CABS):
6055 break;
6056
6057 CASE_FLT_FN (BUILT_IN_EXP):
6058 CASE_FLT_FN (BUILT_IN_EXP10):
6059 CASE_FLT_FN (BUILT_IN_POW10):
6060 CASE_FLT_FN (BUILT_IN_EXP2):
6061 CASE_FLT_FN (BUILT_IN_EXPM1):
6062 CASE_FLT_FN (BUILT_IN_LOGB):
6063 CASE_FLT_FN (BUILT_IN_LOG):
6064 CASE_FLT_FN (BUILT_IN_LOG10):
6065 CASE_FLT_FN (BUILT_IN_LOG2):
6066 CASE_FLT_FN (BUILT_IN_LOG1P):
6067 CASE_FLT_FN (BUILT_IN_TAN):
6068 CASE_FLT_FN (BUILT_IN_ASIN):
6069 CASE_FLT_FN (BUILT_IN_ACOS):
6070 CASE_FLT_FN (BUILT_IN_ATAN):
6071 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6072 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6073 because of possible accuracy problems. */
6074 if (! flag_unsafe_math_optimizations)
6075 break;
6076 CASE_FLT_FN (BUILT_IN_SQRT):
6077 CASE_FLT_FN (BUILT_IN_FLOOR):
6078 CASE_FLT_FN (BUILT_IN_CEIL):
6079 CASE_FLT_FN (BUILT_IN_TRUNC):
6080 CASE_FLT_FN (BUILT_IN_ROUND):
6081 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6082 CASE_FLT_FN (BUILT_IN_RINT):
6083 target = expand_builtin_mathfn (exp, target, subtarget);
6084 if (target)
6085 return target;
6086 break;
6087
6088 CASE_FLT_FN (BUILT_IN_FMA):
6089 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6090 if (target)
6091 return target;
6092 break;
6093
6094 CASE_FLT_FN (BUILT_IN_ILOGB):
6095 if (! flag_unsafe_math_optimizations)
6096 break;
6097 CASE_FLT_FN (BUILT_IN_ISINF):
6098 CASE_FLT_FN (BUILT_IN_FINITE):
6099 case BUILT_IN_ISFINITE:
6100 case BUILT_IN_ISNORMAL:
6101 target = expand_builtin_interclass_mathfn (exp, target);
6102 if (target)
6103 return target;
6104 break;
6105
6106 CASE_FLT_FN (BUILT_IN_ICEIL):
6107 CASE_FLT_FN (BUILT_IN_LCEIL):
6108 CASE_FLT_FN (BUILT_IN_LLCEIL):
6109 CASE_FLT_FN (BUILT_IN_LFLOOR):
6110 CASE_FLT_FN (BUILT_IN_IFLOOR):
6111 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6112 target = expand_builtin_int_roundingfn (exp, target);
6113 if (target)
6114 return target;
6115 break;
6116
6117 CASE_FLT_FN (BUILT_IN_IRINT):
6118 CASE_FLT_FN (BUILT_IN_LRINT):
6119 CASE_FLT_FN (BUILT_IN_LLRINT):
6120 CASE_FLT_FN (BUILT_IN_IROUND):
6121 CASE_FLT_FN (BUILT_IN_LROUND):
6122 CASE_FLT_FN (BUILT_IN_LLROUND):
6123 target = expand_builtin_int_roundingfn_2 (exp, target);
6124 if (target)
6125 return target;
6126 break;
6127
6128 CASE_FLT_FN (BUILT_IN_POWI):
6129 target = expand_builtin_powi (exp, target);
6130 if (target)
6131 return target;
6132 break;
6133
6134 CASE_FLT_FN (BUILT_IN_ATAN2):
6135 CASE_FLT_FN (BUILT_IN_LDEXP):
6136 CASE_FLT_FN (BUILT_IN_SCALB):
6137 CASE_FLT_FN (BUILT_IN_SCALBN):
6138 CASE_FLT_FN (BUILT_IN_SCALBLN):
6139 if (! flag_unsafe_math_optimizations)
6140 break;
6141
6142 CASE_FLT_FN (BUILT_IN_FMOD):
6143 CASE_FLT_FN (BUILT_IN_REMAINDER):
6144 CASE_FLT_FN (BUILT_IN_DREM):
6145 CASE_FLT_FN (BUILT_IN_POW):
6146 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6147 if (target)
6148 return target;
6149 break;
6150
6151 CASE_FLT_FN (BUILT_IN_CEXPI):
6152 target = expand_builtin_cexpi (exp, target);
6153 gcc_assert (target);
6154 return target;
6155
6156 CASE_FLT_FN (BUILT_IN_SIN):
6157 CASE_FLT_FN (BUILT_IN_COS):
6158 if (! flag_unsafe_math_optimizations)
6159 break;
6160 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6161 if (target)
6162 return target;
6163 break;
6164
6165 CASE_FLT_FN (BUILT_IN_SINCOS):
6166 if (! flag_unsafe_math_optimizations)
6167 break;
6168 target = expand_builtin_sincos (exp);
6169 if (target)
6170 return target;
6171 break;
6172
6173 case BUILT_IN_APPLY_ARGS:
6174 return expand_builtin_apply_args ();
6175
6176 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6177 FUNCTION with a copy of the parameters described by
6178 ARGUMENTS, and ARGSIZE. It returns a block of memory
6179 allocated on the stack into which is stored all the registers
6180 that might possibly be used for returning the result of a
6181 function. ARGUMENTS is the value returned by
6182 __builtin_apply_args. ARGSIZE is the number of bytes of
6183 arguments that must be copied. ??? How should this value be
6184 computed? We'll also need a safe worst case value for varargs
6185 functions. */
6186 case BUILT_IN_APPLY:
6187 if (!validate_arglist (exp, POINTER_TYPE,
6188 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6189 && !validate_arglist (exp, REFERENCE_TYPE,
6190 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6191 return const0_rtx;
6192 else
6193 {
6194 rtx ops[3];
6195
6196 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6197 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6198 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6199
6200 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6201 }
6202
6203 /* __builtin_return (RESULT) causes the function to return the
6204 value described by RESULT. RESULT is address of the block of
6205 memory returned by __builtin_apply. */
6206 case BUILT_IN_RETURN:
6207 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6208 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6209 return const0_rtx;
6210
6211 case BUILT_IN_SAVEREGS:
6212 return expand_builtin_saveregs ();
6213
6214 case BUILT_IN_VA_ARG_PACK:
6215 /* All valid uses of __builtin_va_arg_pack () are removed during
6216 inlining. */
6217 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6218 return const0_rtx;
6219
6220 case BUILT_IN_VA_ARG_PACK_LEN:
6221 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6222 inlining. */
6223 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6224 return const0_rtx;
6225
6226 /* Return the address of the first anonymous stack arg. */
6227 case BUILT_IN_NEXT_ARG:
6228 if (fold_builtin_next_arg (exp, false))
6229 return const0_rtx;
6230 return expand_builtin_next_arg ();
6231
6232 case BUILT_IN_CLEAR_CACHE:
6233 target = expand_builtin___clear_cache (exp);
6234 if (target)
6235 return target;
6236 break;
6237
6238 case BUILT_IN_CLASSIFY_TYPE:
6239 return expand_builtin_classify_type (exp);
6240
6241 case BUILT_IN_CONSTANT_P:
6242 return const0_rtx;
6243
6244 case BUILT_IN_FRAME_ADDRESS:
6245 case BUILT_IN_RETURN_ADDRESS:
6246 return expand_builtin_frame_address (fndecl, exp);
6247
6248 /* Returns the address of the area where the structure is returned.
6249 0 otherwise. */
6250 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6251 if (call_expr_nargs (exp) != 0
6252 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6253 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6254 return const0_rtx;
6255 else
6256 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6257
6258 case BUILT_IN_ALLOCA:
6259 case BUILT_IN_ALLOCA_WITH_ALIGN:
6260 /* If the allocation stems from the declaration of a variable-sized
6261 object, it cannot accumulate. */
6262 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6263 if (target)
6264 return target;
6265 break;
6266
6267 case BUILT_IN_STACK_SAVE:
6268 return expand_stack_save ();
6269
6270 case BUILT_IN_STACK_RESTORE:
6271 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6272 return const0_rtx;
6273
6274 case BUILT_IN_BSWAP16:
6275 case BUILT_IN_BSWAP32:
6276 case BUILT_IN_BSWAP64:
6277 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6278 if (target)
6279 return target;
6280 break;
6281
6282 CASE_INT_FN (BUILT_IN_FFS):
6283 target = expand_builtin_unop (target_mode, exp, target,
6284 subtarget, ffs_optab);
6285 if (target)
6286 return target;
6287 break;
6288
6289 CASE_INT_FN (BUILT_IN_CLZ):
6290 target = expand_builtin_unop (target_mode, exp, target,
6291 subtarget, clz_optab);
6292 if (target)
6293 return target;
6294 break;
6295
6296 CASE_INT_FN (BUILT_IN_CTZ):
6297 target = expand_builtin_unop (target_mode, exp, target,
6298 subtarget, ctz_optab);
6299 if (target)
6300 return target;
6301 break;
6302
6303 CASE_INT_FN (BUILT_IN_CLRSB):
6304 target = expand_builtin_unop (target_mode, exp, target,
6305 subtarget, clrsb_optab);
6306 if (target)
6307 return target;
6308 break;
6309
6310 CASE_INT_FN (BUILT_IN_POPCOUNT):
6311 target = expand_builtin_unop (target_mode, exp, target,
6312 subtarget, popcount_optab);
6313 if (target)
6314 return target;
6315 break;
6316
6317 CASE_INT_FN (BUILT_IN_PARITY):
6318 target = expand_builtin_unop (target_mode, exp, target,
6319 subtarget, parity_optab);
6320 if (target)
6321 return target;
6322 break;
6323
6324 case BUILT_IN_STRLEN:
6325 target = expand_builtin_strlen (exp, target, target_mode);
6326 if (target)
6327 return target;
6328 break;
6329
6330 case BUILT_IN_STRCPY:
6331 target = expand_builtin_strcpy (exp, target);
6332 if (target)
6333 return target;
6334 break;
6335
6336 case BUILT_IN_STRNCPY:
6337 target = expand_builtin_strncpy (exp, target);
6338 if (target)
6339 return target;
6340 break;
6341
6342 case BUILT_IN_STPCPY:
6343 target = expand_builtin_stpcpy (exp, target, mode);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_MEMCPY:
6349 target = expand_builtin_memcpy (exp, target);
6350 if (target)
6351 return target;
6352 break;
6353
6354 case BUILT_IN_MEMPCPY:
6355 target = expand_builtin_mempcpy (exp, target, mode);
6356 if (target)
6357 return target;
6358 break;
6359
6360 case BUILT_IN_MEMSET:
6361 target = expand_builtin_memset (exp, target, mode);
6362 if (target)
6363 return target;
6364 break;
6365
6366 case BUILT_IN_BZERO:
6367 target = expand_builtin_bzero (exp);
6368 if (target)
6369 return target;
6370 break;
6371
6372 case BUILT_IN_STRCMP:
6373 target = expand_builtin_strcmp (exp, target);
6374 if (target)
6375 return target;
6376 break;
6377
6378 case BUILT_IN_STRNCMP:
6379 target = expand_builtin_strncmp (exp, target, mode);
6380 if (target)
6381 return target;
6382 break;
6383
6384 case BUILT_IN_BCMP:
6385 case BUILT_IN_MEMCMP:
6386 target = expand_builtin_memcmp (exp, target, mode);
6387 if (target)
6388 return target;
6389 break;
6390
6391 case BUILT_IN_SETJMP:
6392 /* This should have been lowered to the builtins below. */
6393 gcc_unreachable ();
6394
6395 case BUILT_IN_SETJMP_SETUP:
6396 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6397 and the receiver label. */
6398 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6399 {
6400 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6401 VOIDmode, EXPAND_NORMAL);
6402 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6403 rtx label_r = label_rtx (label);
6404
6405 /* This is copied from the handling of non-local gotos. */
6406 expand_builtin_setjmp_setup (buf_addr, label_r);
6407 nonlocal_goto_handler_labels
6408 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6409 nonlocal_goto_handler_labels);
6410 /* ??? Do not let expand_label treat us as such since we would
6411 not want to be both on the list of non-local labels and on
6412 the list of forced labels. */
6413 FORCED_LABEL (label) = 0;
6414 return const0_rtx;
6415 }
6416 break;
6417
6418 case BUILT_IN_SETJMP_RECEIVER:
6419 /* __builtin_setjmp_receiver is passed the receiver label. */
6420 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6421 {
6422 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6423 rtx label_r = label_rtx (label);
6424
6425 expand_builtin_setjmp_receiver (label_r);
6426 return const0_rtx;
6427 }
6428 break;
6429
6430 /* __builtin_longjmp is passed a pointer to an array of five words.
6431 It's similar to the C library longjmp function but works with
6432 __builtin_setjmp above. */
6433 case BUILT_IN_LONGJMP:
6434 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6435 {
6436 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6437 VOIDmode, EXPAND_NORMAL);
6438 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6439
6440 if (value != const1_rtx)
6441 {
6442 error ("%<__builtin_longjmp%> second argument must be 1");
6443 return const0_rtx;
6444 }
6445
6446 expand_builtin_longjmp (buf_addr, value);
6447 return const0_rtx;
6448 }
6449 break;
6450
6451 case BUILT_IN_NONLOCAL_GOTO:
6452 target = expand_builtin_nonlocal_goto (exp);
6453 if (target)
6454 return target;
6455 break;
6456
6457 /* This updates the setjmp buffer that is its argument with the value
6458 of the current stack pointer. */
6459 case BUILT_IN_UPDATE_SETJMP_BUF:
6460 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6461 {
6462 rtx buf_addr
6463 = expand_normal (CALL_EXPR_ARG (exp, 0));
6464
6465 expand_builtin_update_setjmp_buf (buf_addr);
6466 return const0_rtx;
6467 }
6468 break;
6469
6470 case BUILT_IN_TRAP:
6471 expand_builtin_trap ();
6472 return const0_rtx;
6473
6474 case BUILT_IN_UNREACHABLE:
6475 expand_builtin_unreachable ();
6476 return const0_rtx;
6477
6478 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6479 case BUILT_IN_SIGNBITD32:
6480 case BUILT_IN_SIGNBITD64:
6481 case BUILT_IN_SIGNBITD128:
6482 target = expand_builtin_signbit (exp, target);
6483 if (target)
6484 return target;
6485 break;
6486
6487 /* Various hooks for the DWARF 2 __throw routine. */
6488 case BUILT_IN_UNWIND_INIT:
6489 expand_builtin_unwind_init ();
6490 return const0_rtx;
6491 case BUILT_IN_DWARF_CFA:
6492 return virtual_cfa_rtx;
6493 #ifdef DWARF2_UNWIND_INFO
6494 case BUILT_IN_DWARF_SP_COLUMN:
6495 return expand_builtin_dwarf_sp_column ();
6496 case BUILT_IN_INIT_DWARF_REG_SIZES:
6497 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6498 return const0_rtx;
6499 #endif
6500 case BUILT_IN_FROB_RETURN_ADDR:
6501 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6502 case BUILT_IN_EXTRACT_RETURN_ADDR:
6503 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6504 case BUILT_IN_EH_RETURN:
6505 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6506 CALL_EXPR_ARG (exp, 1));
6507 return const0_rtx;
6508 case BUILT_IN_EH_RETURN_DATA_REGNO:
6509 return expand_builtin_eh_return_data_regno (exp);
6510 case BUILT_IN_EXTEND_POINTER:
6511 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6512 case BUILT_IN_EH_POINTER:
6513 return expand_builtin_eh_pointer (exp);
6514 case BUILT_IN_EH_FILTER:
6515 return expand_builtin_eh_filter (exp);
6516 case BUILT_IN_EH_COPY_VALUES:
6517 return expand_builtin_eh_copy_values (exp);
6518
6519 case BUILT_IN_VA_START:
6520 return expand_builtin_va_start (exp);
6521 case BUILT_IN_VA_END:
6522 return expand_builtin_va_end (exp);
6523 case BUILT_IN_VA_COPY:
6524 return expand_builtin_va_copy (exp);
6525 case BUILT_IN_EXPECT:
6526 return expand_builtin_expect (exp, target);
6527 case BUILT_IN_ASSUME_ALIGNED:
6528 return expand_builtin_assume_aligned (exp, target);
6529 case BUILT_IN_PREFETCH:
6530 expand_builtin_prefetch (exp);
6531 return const0_rtx;
6532
6533 case BUILT_IN_INIT_TRAMPOLINE:
6534 return expand_builtin_init_trampoline (exp, true);
6535 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6536 return expand_builtin_init_trampoline (exp, false);
6537 case BUILT_IN_ADJUST_TRAMPOLINE:
6538 return expand_builtin_adjust_trampoline (exp);
6539
6540 case BUILT_IN_FORK:
6541 case BUILT_IN_EXECL:
6542 case BUILT_IN_EXECV:
6543 case BUILT_IN_EXECLP:
6544 case BUILT_IN_EXECLE:
6545 case BUILT_IN_EXECVP:
6546 case BUILT_IN_EXECVE:
6547 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6548 if (target)
6549 return target;
6550 break;
6551
6552 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6553 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6554 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6555 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6556 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6558 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6559 if (target)
6560 return target;
6561 break;
6562
6563 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6564 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6565 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6566 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6567 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6568 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6569 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6570 if (target)
6571 return target;
6572 break;
6573
6574 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6575 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6576 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6577 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6578 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6580 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6581 if (target)
6582 return target;
6583 break;
6584
6585 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6586 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6587 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6588 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6589 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6591 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6592 if (target)
6593 return target;
6594 break;
6595
6596 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6597 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6598 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6599 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6600 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6601 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6602 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6603 if (target)
6604 return target;
6605 break;
6606
6607 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6608 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6609 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6610 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6611 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6612 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6613 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6614 if (target)
6615 return target;
6616 break;
6617
6618 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6619 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6620 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6621 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6622 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6623 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6624 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6625 if (target)
6626 return target;
6627 break;
6628
6629 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6630 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6631 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6632 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6633 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6634 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6635 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6636 if (target)
6637 return target;
6638 break;
6639
6640 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6641 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6642 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6643 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6644 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6645 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6646 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6647 if (target)
6648 return target;
6649 break;
6650
6651 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6652 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6653 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6654 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6655 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6656 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6657 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6658 if (target)
6659 return target;
6660 break;
6661
6662 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6663 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6664 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6665 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6666 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6668 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6669 if (target)
6670 return target;
6671 break;
6672
6673 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6674 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6675 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6676 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6677 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6679 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6680 if (target)
6681 return target;
6682 break;
6683
6684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6687 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6688 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6689 if (mode == VOIDmode)
6690 mode = TYPE_MODE (boolean_type_node);
6691 if (!target || !register_operand (target, mode))
6692 target = gen_reg_rtx (mode);
6693
6694 mode = get_builtin_sync_mode
6695 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6696 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6697 if (target)
6698 return target;
6699 break;
6700
6701 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6702 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6703 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6704 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6706 mode = get_builtin_sync_mode
6707 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6708 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6709 if (target)
6710 return target;
6711 break;
6712
6713 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6714 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6715 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6716 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6717 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6719 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6725 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6726 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6727 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6728 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6730 expand_builtin_sync_lock_release (mode, exp);
6731 return const0_rtx;
6732
6733 case BUILT_IN_SYNC_SYNCHRONIZE:
6734 expand_builtin_sync_synchronize ();
6735 return const0_rtx;
6736
6737 case BUILT_IN_ATOMIC_EXCHANGE_1:
6738 case BUILT_IN_ATOMIC_EXCHANGE_2:
6739 case BUILT_IN_ATOMIC_EXCHANGE_4:
6740 case BUILT_IN_ATOMIC_EXCHANGE_8:
6741 case BUILT_IN_ATOMIC_EXCHANGE_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6743 target = expand_builtin_atomic_exchange (mode, exp, target);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6749 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6750 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6751 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6752 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6753 {
6754 unsigned int nargs, z;
6755 vec<tree, va_gc> *vec;
6756
6757 mode =
6758 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6759 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6760 if (target)
6761 return target;
6762
6763 /* If this is turned into an external library call, the weak parameter
6764 must be dropped to match the expected parameter list. */
6765 nargs = call_expr_nargs (exp);
6766 vec_alloc (vec, nargs - 1);
6767 for (z = 0; z < 3; z++)
6768 vec->quick_push (CALL_EXPR_ARG (exp, z));
6769 /* Skip the boolean weak parameter. */
6770 for (z = 4; z < 6; z++)
6771 vec->quick_push (CALL_EXPR_ARG (exp, z));
6772 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6773 break;
6774 }
6775
6776 case BUILT_IN_ATOMIC_LOAD_1:
6777 case BUILT_IN_ATOMIC_LOAD_2:
6778 case BUILT_IN_ATOMIC_LOAD_4:
6779 case BUILT_IN_ATOMIC_LOAD_8:
6780 case BUILT_IN_ATOMIC_LOAD_16:
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6782 target = expand_builtin_atomic_load (mode, exp, target);
6783 if (target)
6784 return target;
6785 break;
6786
6787 case BUILT_IN_ATOMIC_STORE_1:
6788 case BUILT_IN_ATOMIC_STORE_2:
6789 case BUILT_IN_ATOMIC_STORE_4:
6790 case BUILT_IN_ATOMIC_STORE_8:
6791 case BUILT_IN_ATOMIC_STORE_16:
6792 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6793 target = expand_builtin_atomic_store (mode, exp);
6794 if (target)
6795 return const0_rtx;
6796 break;
6797
6798 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6799 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6800 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6801 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6802 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6803 {
6804 enum built_in_function lib;
6805 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6806 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6807 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6808 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6809 ignore, lib);
6810 if (target)
6811 return target;
6812 break;
6813 }
6814 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6815 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6816 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6817 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6818 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6819 {
6820 enum built_in_function lib;
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6822 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6823 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6824 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6825 ignore, lib);
6826 if (target)
6827 return target;
6828 break;
6829 }
6830 case BUILT_IN_ATOMIC_AND_FETCH_1:
6831 case BUILT_IN_ATOMIC_AND_FETCH_2:
6832 case BUILT_IN_ATOMIC_AND_FETCH_4:
6833 case BUILT_IN_ATOMIC_AND_FETCH_8:
6834 case BUILT_IN_ATOMIC_AND_FETCH_16:
6835 {
6836 enum built_in_function lib;
6837 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6838 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6839 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6840 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6841 ignore, lib);
6842 if (target)
6843 return target;
6844 break;
6845 }
6846 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6847 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6848 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6849 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6850 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6851 {
6852 enum built_in_function lib;
6853 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6854 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6855 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6856 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6857 ignore, lib);
6858 if (target)
6859 return target;
6860 break;
6861 }
6862 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6863 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6864 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6865 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6866 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6867 {
6868 enum built_in_function lib;
6869 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6870 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6871 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6872 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6873 ignore, lib);
6874 if (target)
6875 return target;
6876 break;
6877 }
6878 case BUILT_IN_ATOMIC_OR_FETCH_1:
6879 case BUILT_IN_ATOMIC_OR_FETCH_2:
6880 case BUILT_IN_ATOMIC_OR_FETCH_4:
6881 case BUILT_IN_ATOMIC_OR_FETCH_8:
6882 case BUILT_IN_ATOMIC_OR_FETCH_16:
6883 {
6884 enum built_in_function lib;
6885 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6886 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6887 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6888 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6889 ignore, lib);
6890 if (target)
6891 return target;
6892 break;
6893 }
6894 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6895 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6896 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6897 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6898 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6899 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6900 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6901 ignore, BUILT_IN_NONE);
6902 if (target)
6903 return target;
6904 break;
6905
6906 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6907 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6908 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6909 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6910 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6912 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6913 ignore, BUILT_IN_NONE);
6914 if (target)
6915 return target;
6916 break;
6917
6918 case BUILT_IN_ATOMIC_FETCH_AND_1:
6919 case BUILT_IN_ATOMIC_FETCH_AND_2:
6920 case BUILT_IN_ATOMIC_FETCH_AND_4:
6921 case BUILT_IN_ATOMIC_FETCH_AND_8:
6922 case BUILT_IN_ATOMIC_FETCH_AND_16:
6923 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6924 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6925 ignore, BUILT_IN_NONE);
6926 if (target)
6927 return target;
6928 break;
6929
6930 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6931 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6932 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6933 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6934 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6936 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6937 ignore, BUILT_IN_NONE);
6938 if (target)
6939 return target;
6940 break;
6941
6942 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6943 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6944 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6945 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6946 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6947 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6948 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6949 ignore, BUILT_IN_NONE);
6950 if (target)
6951 return target;
6952 break;
6953
6954 case BUILT_IN_ATOMIC_FETCH_OR_1:
6955 case BUILT_IN_ATOMIC_FETCH_OR_2:
6956 case BUILT_IN_ATOMIC_FETCH_OR_4:
6957 case BUILT_IN_ATOMIC_FETCH_OR_8:
6958 case BUILT_IN_ATOMIC_FETCH_OR_16:
6959 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6960 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6961 ignore, BUILT_IN_NONE);
6962 if (target)
6963 return target;
6964 break;
6965
6966 case BUILT_IN_ATOMIC_TEST_AND_SET:
6967 return expand_builtin_atomic_test_and_set (exp, target);
6968
6969 case BUILT_IN_ATOMIC_CLEAR:
6970 return expand_builtin_atomic_clear (exp);
6971
6972 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6973 return expand_builtin_atomic_always_lock_free (exp);
6974
6975 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6976 target = expand_builtin_atomic_is_lock_free (exp);
6977 if (target)
6978 return target;
6979 break;
6980
6981 case BUILT_IN_ATOMIC_THREAD_FENCE:
6982 expand_builtin_atomic_thread_fence (exp);
6983 return const0_rtx;
6984
6985 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6986 expand_builtin_atomic_signal_fence (exp);
6987 return const0_rtx;
6988
6989 case BUILT_IN_OBJECT_SIZE:
6990 return expand_builtin_object_size (exp);
6991
6992 case BUILT_IN_MEMCPY_CHK:
6993 case BUILT_IN_MEMPCPY_CHK:
6994 case BUILT_IN_MEMMOVE_CHK:
6995 case BUILT_IN_MEMSET_CHK:
6996 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6997 if (target)
6998 return target;
6999 break;
7000
7001 case BUILT_IN_STRCPY_CHK:
7002 case BUILT_IN_STPCPY_CHK:
7003 case BUILT_IN_STRNCPY_CHK:
7004 case BUILT_IN_STPNCPY_CHK:
7005 case BUILT_IN_STRCAT_CHK:
7006 case BUILT_IN_STRNCAT_CHK:
7007 case BUILT_IN_SNPRINTF_CHK:
7008 case BUILT_IN_VSNPRINTF_CHK:
7009 maybe_emit_chk_warning (exp, fcode);
7010 break;
7011
7012 case BUILT_IN_SPRINTF_CHK:
7013 case BUILT_IN_VSPRINTF_CHK:
7014 maybe_emit_sprintf_chk_warning (exp, fcode);
7015 break;
7016
7017 case BUILT_IN_FREE:
7018 if (warn_free_nonheap_object)
7019 maybe_emit_free_warning (exp);
7020 break;
7021
7022 case BUILT_IN_THREAD_POINTER:
7023 return expand_builtin_thread_pointer (exp, target);
7024
7025 case BUILT_IN_SET_THREAD_POINTER:
7026 expand_builtin_set_thread_pointer (exp);
7027 return const0_rtx;
7028
7029 case BUILT_IN_CILK_DETACH:
7030 expand_builtin_cilk_detach (exp);
7031 return const0_rtx;
7032
7033 case BUILT_IN_CILK_POP_FRAME:
7034 expand_builtin_cilk_pop_frame (exp);
7035 return const0_rtx;
7036
7037 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7038 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7039 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7040 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7041 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7042 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7043 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7044 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7045 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7046 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7047 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7048 /* We allow user CHKP builtins if Pointer Bounds
7049 Checker is off. */
7050 if (!chkp_function_instrumented_p (current_function_decl))
7051 {
7052 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7053 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7054 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7055 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7056 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7057 return expand_normal (CALL_EXPR_ARG (exp, 0));
7058 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7059 return expand_normal (size_zero_node);
7060 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7061 return expand_normal (size_int (-1));
7062 else
7063 return const0_rtx;
7064 }
7065 /* FALLTHROUGH */
7066
7067 case BUILT_IN_CHKP_BNDMK:
7068 case BUILT_IN_CHKP_BNDSTX:
7069 case BUILT_IN_CHKP_BNDCL:
7070 case BUILT_IN_CHKP_BNDCU:
7071 case BUILT_IN_CHKP_BNDLDX:
7072 case BUILT_IN_CHKP_BNDRET:
7073 case BUILT_IN_CHKP_INTERSECT:
7074 case BUILT_IN_CHKP_NARROW:
7075 case BUILT_IN_CHKP_EXTRACT_LOWER:
7076 case BUILT_IN_CHKP_EXTRACT_UPPER:
7077 /* Software implementation of Pointer Bounds Checker is NYI.
7078 Target support is required. */
7079 error ("Your target platform does not support -fcheck-pointer-bounds");
7080 break;
7081
7082 case BUILT_IN_ACC_ON_DEVICE:
7083 target = expand_builtin_acc_on_device (exp, target);
7084 if (target)
7085 return target;
7086 break;
7087
7088 default: /* just do library call, if unknown builtin */
7089 break;
7090 }
7091
7092 /* The switch statement above can drop through to cause the function
7093 to be called normally. */
7094 return expand_call (exp, target, ignore);
7095 }
7096
7097 /* Similar to expand_builtin but is used for instrumented calls. */
7098
7099 rtx
7100 expand_builtin_with_bounds (tree exp, rtx target,
7101 rtx subtarget ATTRIBUTE_UNUSED,
7102 machine_mode mode, int ignore)
7103 {
7104 tree fndecl = get_callee_fndecl (exp);
7105 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7106
7107 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7108
7109 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7110 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7111
7112 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7113 && fcode < END_CHKP_BUILTINS);
7114
7115 switch (fcode)
7116 {
7117 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7118 target = expand_builtin_memcpy_with_bounds (exp, target);
7119 if (target)
7120 return target;
7121 break;
7122
7123 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7124 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7125 if (target)
7126 return target;
7127 break;
7128
7129 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7130 target = expand_builtin_memset_with_bounds (exp, target, mode);
7131 if (target)
7132 return target;
7133 break;
7134
7135 default:
7136 break;
7137 }
7138
7139 /* The switch statement above can drop through to cause the function
7140 to be called normally. */
7141 return expand_call (exp, target, ignore);
7142 }
7143
7144 /* Determine whether a tree node represents a call to a built-in
7145 function. If the tree T is a call to a built-in function with
7146 the right number of arguments of the appropriate types, return
7147 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7148 Otherwise the return value is END_BUILTINS. */
7149
7150 enum built_in_function
7151 builtin_mathfn_code (const_tree t)
7152 {
7153 const_tree fndecl, arg, parmlist;
7154 const_tree argtype, parmtype;
7155 const_call_expr_arg_iterator iter;
7156
7157 if (TREE_CODE (t) != CALL_EXPR
7158 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7159 return END_BUILTINS;
7160
7161 fndecl = get_callee_fndecl (t);
7162 if (fndecl == NULL_TREE
7163 || TREE_CODE (fndecl) != FUNCTION_DECL
7164 || ! DECL_BUILT_IN (fndecl)
7165 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7166 return END_BUILTINS;
7167
7168 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7169 init_const_call_expr_arg_iterator (t, &iter);
7170 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7171 {
7172 /* If a function doesn't take a variable number of arguments,
7173 the last element in the list will have type `void'. */
7174 parmtype = TREE_VALUE (parmlist);
7175 if (VOID_TYPE_P (parmtype))
7176 {
7177 if (more_const_call_expr_args_p (&iter))
7178 return END_BUILTINS;
7179 return DECL_FUNCTION_CODE (fndecl);
7180 }
7181
7182 if (! more_const_call_expr_args_p (&iter))
7183 return END_BUILTINS;
7184
7185 arg = next_const_call_expr_arg (&iter);
7186 argtype = TREE_TYPE (arg);
7187
7188 if (SCALAR_FLOAT_TYPE_P (parmtype))
7189 {
7190 if (! SCALAR_FLOAT_TYPE_P (argtype))
7191 return END_BUILTINS;
7192 }
7193 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7194 {
7195 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7196 return END_BUILTINS;
7197 }
7198 else if (POINTER_TYPE_P (parmtype))
7199 {
7200 if (! POINTER_TYPE_P (argtype))
7201 return END_BUILTINS;
7202 }
7203 else if (INTEGRAL_TYPE_P (parmtype))
7204 {
7205 if (! INTEGRAL_TYPE_P (argtype))
7206 return END_BUILTINS;
7207 }
7208 else
7209 return END_BUILTINS;
7210 }
7211
7212 /* Variable-length argument list. */
7213 return DECL_FUNCTION_CODE (fndecl);
7214 }
7215
7216 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7217 evaluate to a constant. */
7218
7219 static tree
7220 fold_builtin_constant_p (tree arg)
7221 {
7222 /* We return 1 for a numeric type that's known to be a constant
7223 value at compile-time or for an aggregate type that's a
7224 literal constant. */
7225 STRIP_NOPS (arg);
7226
7227 /* If we know this is a constant, emit the constant of one. */
7228 if (CONSTANT_CLASS_P (arg)
7229 || (TREE_CODE (arg) == CONSTRUCTOR
7230 && TREE_CONSTANT (arg)))
7231 return integer_one_node;
7232 if (TREE_CODE (arg) == ADDR_EXPR)
7233 {
7234 tree op = TREE_OPERAND (arg, 0);
7235 if (TREE_CODE (op) == STRING_CST
7236 || (TREE_CODE (op) == ARRAY_REF
7237 && integer_zerop (TREE_OPERAND (op, 1))
7238 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7239 return integer_one_node;
7240 }
7241
7242 /* If this expression has side effects, show we don't know it to be a
7243 constant. Likewise if it's a pointer or aggregate type since in
7244 those case we only want literals, since those are only optimized
7245 when generating RTL, not later.
7246 And finally, if we are compiling an initializer, not code, we
7247 need to return a definite result now; there's not going to be any
7248 more optimization done. */
7249 if (TREE_SIDE_EFFECTS (arg)
7250 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7251 || POINTER_TYPE_P (TREE_TYPE (arg))
7252 || cfun == 0
7253 || folding_initializer
7254 || force_folding_builtin_constant_p)
7255 return integer_zero_node;
7256
7257 return NULL_TREE;
7258 }
7259
7260 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7261 return it as a truthvalue. */
7262
7263 static tree
7264 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7265 tree predictor)
7266 {
7267 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7268
7269 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7270 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7271 ret_type = TREE_TYPE (TREE_TYPE (fn));
7272 pred_type = TREE_VALUE (arg_types);
7273 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7274
7275 pred = fold_convert_loc (loc, pred_type, pred);
7276 expected = fold_convert_loc (loc, expected_type, expected);
7277 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7278 predictor);
7279
7280 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7281 build_int_cst (ret_type, 0));
7282 }
7283
7284 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7285 NULL_TREE if no simplification is possible. */
7286
7287 tree
7288 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7289 {
7290 tree inner, fndecl, inner_arg0;
7291 enum tree_code code;
7292
7293 /* Distribute the expected value over short-circuiting operators.
7294 See through the cast from truthvalue_type_node to long. */
7295 inner_arg0 = arg0;
7296 while (CONVERT_EXPR_P (inner_arg0)
7297 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7298 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7299 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7300
7301 /* If this is a builtin_expect within a builtin_expect keep the
7302 inner one. See through a comparison against a constant. It
7303 might have been added to create a thruthvalue. */
7304 inner = inner_arg0;
7305
7306 if (COMPARISON_CLASS_P (inner)
7307 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7308 inner = TREE_OPERAND (inner, 0);
7309
7310 if (TREE_CODE (inner) == CALL_EXPR
7311 && (fndecl = get_callee_fndecl (inner))
7312 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7313 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7314 return arg0;
7315
7316 inner = inner_arg0;
7317 code = TREE_CODE (inner);
7318 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7319 {
7320 tree op0 = TREE_OPERAND (inner, 0);
7321 tree op1 = TREE_OPERAND (inner, 1);
7322
7323 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7324 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7325 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7326
7327 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7328 }
7329
7330 /* If the argument isn't invariant then there's nothing else we can do. */
7331 if (!TREE_CONSTANT (inner_arg0))
7332 return NULL_TREE;
7333
7334 /* If we expect that a comparison against the argument will fold to
7335 a constant return the constant. In practice, this means a true
7336 constant or the address of a non-weak symbol. */
7337 inner = inner_arg0;
7338 STRIP_NOPS (inner);
7339 if (TREE_CODE (inner) == ADDR_EXPR)
7340 {
7341 do
7342 {
7343 inner = TREE_OPERAND (inner, 0);
7344 }
7345 while (TREE_CODE (inner) == COMPONENT_REF
7346 || TREE_CODE (inner) == ARRAY_REF);
7347 if ((TREE_CODE (inner) == VAR_DECL
7348 || TREE_CODE (inner) == FUNCTION_DECL)
7349 && DECL_WEAK (inner))
7350 return NULL_TREE;
7351 }
7352
7353 /* Otherwise, ARG0 already has the proper type for the return value. */
7354 return arg0;
7355 }
7356
7357 /* Fold a call to __builtin_classify_type with argument ARG. */
7358
7359 static tree
7360 fold_builtin_classify_type (tree arg)
7361 {
7362 if (arg == 0)
7363 return build_int_cst (integer_type_node, no_type_class);
7364
7365 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7366 }
7367
7368 /* Fold a call to __builtin_strlen with argument ARG. */
7369
7370 static tree
7371 fold_builtin_strlen (location_t loc, tree type, tree arg)
7372 {
7373 if (!validate_arg (arg, POINTER_TYPE))
7374 return NULL_TREE;
7375 else
7376 {
7377 tree len = c_strlen (arg, 0);
7378
7379 if (len)
7380 return fold_convert_loc (loc, type, len);
7381
7382 return NULL_TREE;
7383 }
7384 }
7385
7386 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7387
7388 static tree
7389 fold_builtin_inf (location_t loc, tree type, int warn)
7390 {
7391 REAL_VALUE_TYPE real;
7392
7393 /* __builtin_inff is intended to be usable to define INFINITY on all
7394 targets. If an infinity is not available, INFINITY expands "to a
7395 positive constant of type float that overflows at translation
7396 time", footnote "In this case, using INFINITY will violate the
7397 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7398 Thus we pedwarn to ensure this constraint violation is
7399 diagnosed. */
7400 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7401 pedwarn (loc, 0, "target format does not support infinity");
7402
7403 real_inf (&real);
7404 return build_real (type, real);
7405 }
7406
7407 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7408
7409 static tree
7410 fold_builtin_nan (tree arg, tree type, int quiet)
7411 {
7412 REAL_VALUE_TYPE real;
7413 const char *str;
7414
7415 if (!validate_arg (arg, POINTER_TYPE))
7416 return NULL_TREE;
7417 str = c_getstr (arg);
7418 if (!str)
7419 return NULL_TREE;
7420
7421 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7422 return NULL_TREE;
7423
7424 return build_real (type, real);
7425 }
7426
7427 /* Return true if the floating point expression T has an integer value.
7428 We also allow +Inf, -Inf and NaN to be considered integer values. */
7429
7430 static bool
7431 integer_valued_real_p (tree t)
7432 {
7433 switch (TREE_CODE (t))
7434 {
7435 case FLOAT_EXPR:
7436 return true;
7437
7438 case ABS_EXPR:
7439 case SAVE_EXPR:
7440 return integer_valued_real_p (TREE_OPERAND (t, 0));
7441
7442 case COMPOUND_EXPR:
7443 case MODIFY_EXPR:
7444 case BIND_EXPR:
7445 return integer_valued_real_p (TREE_OPERAND (t, 1));
7446
7447 case PLUS_EXPR:
7448 case MINUS_EXPR:
7449 case MULT_EXPR:
7450 case MIN_EXPR:
7451 case MAX_EXPR:
7452 return integer_valued_real_p (TREE_OPERAND (t, 0))
7453 && integer_valued_real_p (TREE_OPERAND (t, 1));
7454
7455 case COND_EXPR:
7456 return integer_valued_real_p (TREE_OPERAND (t, 1))
7457 && integer_valued_real_p (TREE_OPERAND (t, 2));
7458
7459 case REAL_CST:
7460 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7461
7462 CASE_CONVERT:
7463 {
7464 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7465 if (TREE_CODE (type) == INTEGER_TYPE)
7466 return true;
7467 if (TREE_CODE (type) == REAL_TYPE)
7468 return integer_valued_real_p (TREE_OPERAND (t, 0));
7469 break;
7470 }
7471
7472 case CALL_EXPR:
7473 switch (builtin_mathfn_code (t))
7474 {
7475 CASE_FLT_FN (BUILT_IN_CEIL):
7476 CASE_FLT_FN (BUILT_IN_FLOOR):
7477 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7478 CASE_FLT_FN (BUILT_IN_RINT):
7479 CASE_FLT_FN (BUILT_IN_ROUND):
7480 CASE_FLT_FN (BUILT_IN_TRUNC):
7481 return true;
7482
7483 CASE_FLT_FN (BUILT_IN_FMIN):
7484 CASE_FLT_FN (BUILT_IN_FMAX):
7485 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7486 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7487
7488 default:
7489 break;
7490 }
7491 break;
7492
7493 default:
7494 break;
7495 }
7496 return false;
7497 }
7498
7499 /* FNDECL is assumed to be a builtin where truncation can be propagated
7500 across (for instance floor((double)f) == (double)floorf (f).
7501 Do the transformation for a call with argument ARG. */
7502
7503 static tree
7504 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7505 {
7506 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7507
7508 if (!validate_arg (arg, REAL_TYPE))
7509 return NULL_TREE;
7510
7511 /* Integer rounding functions are idempotent. */
7512 if (fcode == builtin_mathfn_code (arg))
7513 return arg;
7514
7515 /* If argument is already integer valued, and we don't need to worry
7516 about setting errno, there's no need to perform rounding. */
7517 if (! flag_errno_math && integer_valued_real_p (arg))
7518 return arg;
7519
7520 if (optimize)
7521 {
7522 tree arg0 = strip_float_extensions (arg);
7523 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7524 tree newtype = TREE_TYPE (arg0);
7525 tree decl;
7526
7527 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7528 && (decl = mathfn_built_in (newtype, fcode)))
7529 return fold_convert_loc (loc, ftype,
7530 build_call_expr_loc (loc, decl, 1,
7531 fold_convert_loc (loc,
7532 newtype,
7533 arg0)));
7534 }
7535 return NULL_TREE;
7536 }
7537
7538 /* FNDECL is assumed to be builtin which can narrow the FP type of
7539 the argument, for instance lround((double)f) -> lroundf (f).
7540 Do the transformation for a call with argument ARG. */
7541
7542 static tree
7543 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7544 {
7545 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7546
7547 if (!validate_arg (arg, REAL_TYPE))
7548 return NULL_TREE;
7549
7550 /* If argument is already integer valued, and we don't need to worry
7551 about setting errno, there's no need to perform rounding. */
7552 if (! flag_errno_math && integer_valued_real_p (arg))
7553 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7554 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7555
7556 if (optimize)
7557 {
7558 tree ftype = TREE_TYPE (arg);
7559 tree arg0 = strip_float_extensions (arg);
7560 tree newtype = TREE_TYPE (arg0);
7561 tree decl;
7562
7563 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7564 && (decl = mathfn_built_in (newtype, fcode)))
7565 return build_call_expr_loc (loc, decl, 1,
7566 fold_convert_loc (loc, newtype, arg0));
7567 }
7568
7569 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7570 sizeof (int) == sizeof (long). */
7571 if (TYPE_PRECISION (integer_type_node)
7572 == TYPE_PRECISION (long_integer_type_node))
7573 {
7574 tree newfn = NULL_TREE;
7575 switch (fcode)
7576 {
7577 CASE_FLT_FN (BUILT_IN_ICEIL):
7578 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7579 break;
7580
7581 CASE_FLT_FN (BUILT_IN_IFLOOR):
7582 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7583 break;
7584
7585 CASE_FLT_FN (BUILT_IN_IROUND):
7586 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7587 break;
7588
7589 CASE_FLT_FN (BUILT_IN_IRINT):
7590 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7591 break;
7592
7593 default:
7594 break;
7595 }
7596
7597 if (newfn)
7598 {
7599 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7600 return fold_convert_loc (loc,
7601 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7602 }
7603 }
7604
7605 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7606 sizeof (long long) == sizeof (long). */
7607 if (TYPE_PRECISION (long_long_integer_type_node)
7608 == TYPE_PRECISION (long_integer_type_node))
7609 {
7610 tree newfn = NULL_TREE;
7611 switch (fcode)
7612 {
7613 CASE_FLT_FN (BUILT_IN_LLCEIL):
7614 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7615 break;
7616
7617 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7618 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7619 break;
7620
7621 CASE_FLT_FN (BUILT_IN_LLROUND):
7622 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7623 break;
7624
7625 CASE_FLT_FN (BUILT_IN_LLRINT):
7626 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7627 break;
7628
7629 default:
7630 break;
7631 }
7632
7633 if (newfn)
7634 {
7635 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7636 return fold_convert_loc (loc,
7637 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7638 }
7639 }
7640
7641 return NULL_TREE;
7642 }
7643
7644 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7645 return type. Return NULL_TREE if no simplification can be made. */
7646
7647 static tree
7648 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7649 {
7650 tree res;
7651
7652 if (!validate_arg (arg, COMPLEX_TYPE)
7653 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7654 return NULL_TREE;
7655
7656 /* Calculate the result when the argument is a constant. */
7657 if (TREE_CODE (arg) == COMPLEX_CST
7658 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7659 type, mpfr_hypot)))
7660 return res;
7661
7662 if (TREE_CODE (arg) == COMPLEX_EXPR)
7663 {
7664 tree real = TREE_OPERAND (arg, 0);
7665 tree imag = TREE_OPERAND (arg, 1);
7666
7667 /* If either part is zero, cabs is fabs of the other. */
7668 if (real_zerop (real))
7669 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7670 if (real_zerop (imag))
7671 return fold_build1_loc (loc, ABS_EXPR, type, real);
7672
7673 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7674 if (flag_unsafe_math_optimizations
7675 && operand_equal_p (real, imag, OEP_PURE_SAME))
7676 {
7677 const REAL_VALUE_TYPE sqrt2_trunc
7678 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7679 STRIP_NOPS (real);
7680 return fold_build2_loc (loc, MULT_EXPR, type,
7681 fold_build1_loc (loc, ABS_EXPR, type, real),
7682 build_real (type, sqrt2_trunc));
7683 }
7684 }
7685
7686 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7687 if (TREE_CODE (arg) == NEGATE_EXPR
7688 || TREE_CODE (arg) == CONJ_EXPR)
7689 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7690
7691 /* Don't do this when optimizing for size. */
7692 if (flag_unsafe_math_optimizations
7693 && optimize && optimize_function_for_speed_p (cfun))
7694 {
7695 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7696
7697 if (sqrtfn != NULL_TREE)
7698 {
7699 tree rpart, ipart, result;
7700
7701 arg = builtin_save_expr (arg);
7702
7703 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7704 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7705
7706 rpart = builtin_save_expr (rpart);
7707 ipart = builtin_save_expr (ipart);
7708
7709 result = fold_build2_loc (loc, PLUS_EXPR, type,
7710 fold_build2_loc (loc, MULT_EXPR, type,
7711 rpart, rpart),
7712 fold_build2_loc (loc, MULT_EXPR, type,
7713 ipart, ipart));
7714
7715 return build_call_expr_loc (loc, sqrtfn, 1, result);
7716 }
7717 }
7718
7719 return NULL_TREE;
7720 }
7721
7722 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7723 complex tree type of the result. If NEG is true, the imaginary
7724 zero is negative. */
7725
7726 static tree
7727 build_complex_cproj (tree type, bool neg)
7728 {
7729 REAL_VALUE_TYPE rinf, rzero = dconst0;
7730
7731 real_inf (&rinf);
7732 rzero.sign = neg;
7733 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7734 build_real (TREE_TYPE (type), rzero));
7735 }
7736
7737 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7738 return type. Return NULL_TREE if no simplification can be made. */
7739
7740 static tree
7741 fold_builtin_cproj (location_t loc, tree arg, tree type)
7742 {
7743 if (!validate_arg (arg, COMPLEX_TYPE)
7744 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7745 return NULL_TREE;
7746
7747 /* If there are no infinities, return arg. */
7748 if (! HONOR_INFINITIES (type))
7749 return non_lvalue_loc (loc, arg);
7750
7751 /* Calculate the result when the argument is a constant. */
7752 if (TREE_CODE (arg) == COMPLEX_CST)
7753 {
7754 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7755 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7756
7757 if (real_isinf (real) || real_isinf (imag))
7758 return build_complex_cproj (type, imag->sign);
7759 else
7760 return arg;
7761 }
7762 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7763 {
7764 tree real = TREE_OPERAND (arg, 0);
7765 tree imag = TREE_OPERAND (arg, 1);
7766
7767 STRIP_NOPS (real);
7768 STRIP_NOPS (imag);
7769
7770 /* If the real part is inf and the imag part is known to be
7771 nonnegative, return (inf + 0i). Remember side-effects are
7772 possible in the imag part. */
7773 if (TREE_CODE (real) == REAL_CST
7774 && real_isinf (TREE_REAL_CST_PTR (real))
7775 && tree_expr_nonnegative_p (imag))
7776 return omit_one_operand_loc (loc, type,
7777 build_complex_cproj (type, false),
7778 arg);
7779
7780 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7781 Remember side-effects are possible in the real part. */
7782 if (TREE_CODE (imag) == REAL_CST
7783 && real_isinf (TREE_REAL_CST_PTR (imag)))
7784 return
7785 omit_one_operand_loc (loc, type,
7786 build_complex_cproj (type, TREE_REAL_CST_PTR
7787 (imag)->sign), arg);
7788 }
7789
7790 return NULL_TREE;
7791 }
7792
7793 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7794 Return NULL_TREE if no simplification can be made. */
7795
7796 static tree
7797 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7798 {
7799
7800 enum built_in_function fcode;
7801 tree res;
7802
7803 if (!validate_arg (arg, REAL_TYPE))
7804 return NULL_TREE;
7805
7806 /* Calculate the result when the argument is a constant. */
7807 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7808 return res;
7809
7810 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7811 fcode = builtin_mathfn_code (arg);
7812 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7813 {
7814 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7815 arg = fold_build2_loc (loc, MULT_EXPR, type,
7816 CALL_EXPR_ARG (arg, 0),
7817 build_real (type, dconsthalf));
7818 return build_call_expr_loc (loc, expfn, 1, arg);
7819 }
7820
7821 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7822 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7823 {
7824 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7825
7826 if (powfn)
7827 {
7828 tree arg0 = CALL_EXPR_ARG (arg, 0);
7829 tree tree_root;
7830 /* The inner root was either sqrt or cbrt. */
7831 /* This was a conditional expression but it triggered a bug
7832 in Sun C 5.5. */
7833 REAL_VALUE_TYPE dconstroot;
7834 if (BUILTIN_SQRT_P (fcode))
7835 dconstroot = dconsthalf;
7836 else
7837 dconstroot = dconst_third ();
7838
7839 /* Adjust for the outer root. */
7840 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7841 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7842 tree_root = build_real (type, dconstroot);
7843 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7844 }
7845 }
7846
7847 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7848 if (flag_unsafe_math_optimizations
7849 && (fcode == BUILT_IN_POW
7850 || fcode == BUILT_IN_POWF
7851 || fcode == BUILT_IN_POWL))
7852 {
7853 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7854 tree arg0 = CALL_EXPR_ARG (arg, 0);
7855 tree arg1 = CALL_EXPR_ARG (arg, 1);
7856 tree narg1;
7857 if (!tree_expr_nonnegative_p (arg0))
7858 arg0 = build1 (ABS_EXPR, type, arg0);
7859 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7860 build_real (type, dconsthalf));
7861 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7862 }
7863
7864 return NULL_TREE;
7865 }
7866
7867 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7869
7870 static tree
7871 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7872 {
7873 const enum built_in_function fcode = builtin_mathfn_code (arg);
7874 tree res;
7875
7876 if (!validate_arg (arg, REAL_TYPE))
7877 return NULL_TREE;
7878
7879 /* Calculate the result when the argument is a constant. */
7880 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7881 return res;
7882
7883 if (flag_unsafe_math_optimizations)
7884 {
7885 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7886 if (BUILTIN_EXPONENT_P (fcode))
7887 {
7888 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7889 const REAL_VALUE_TYPE third_trunc =
7890 real_value_truncate (TYPE_MODE (type), dconst_third ());
7891 arg = fold_build2_loc (loc, MULT_EXPR, type,
7892 CALL_EXPR_ARG (arg, 0),
7893 build_real (type, third_trunc));
7894 return build_call_expr_loc (loc, expfn, 1, arg);
7895 }
7896
7897 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7898 if (BUILTIN_SQRT_P (fcode))
7899 {
7900 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7901
7902 if (powfn)
7903 {
7904 tree arg0 = CALL_EXPR_ARG (arg, 0);
7905 tree tree_root;
7906 REAL_VALUE_TYPE dconstroot = dconst_third ();
7907
7908 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7909 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7910 tree_root = build_real (type, dconstroot);
7911 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7912 }
7913 }
7914
7915 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7916 if (BUILTIN_CBRT_P (fcode))
7917 {
7918 tree arg0 = CALL_EXPR_ARG (arg, 0);
7919 if (tree_expr_nonnegative_p (arg0))
7920 {
7921 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7922
7923 if (powfn)
7924 {
7925 tree tree_root;
7926 REAL_VALUE_TYPE dconstroot;
7927
7928 real_arithmetic (&dconstroot, MULT_EXPR,
7929 dconst_third_ptr (), dconst_third_ptr ());
7930 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7931 tree_root = build_real (type, dconstroot);
7932 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7933 }
7934 }
7935 }
7936
7937 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7938 if (fcode == BUILT_IN_POW
7939 || fcode == BUILT_IN_POWF
7940 || fcode == BUILT_IN_POWL)
7941 {
7942 tree arg00 = CALL_EXPR_ARG (arg, 0);
7943 tree arg01 = CALL_EXPR_ARG (arg, 1);
7944 if (tree_expr_nonnegative_p (arg00))
7945 {
7946 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7947 const REAL_VALUE_TYPE dconstroot
7948 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7949 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7950 build_real (type, dconstroot));
7951 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7952 }
7953 }
7954 }
7955 return NULL_TREE;
7956 }
7957
7958 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7959 TYPE is the type of the return value. Return NULL_TREE if no
7960 simplification can be made. */
7961
7962 static tree
7963 fold_builtin_cos (location_t loc,
7964 tree arg, tree type, tree fndecl)
7965 {
7966 tree res, narg;
7967
7968 if (!validate_arg (arg, REAL_TYPE))
7969 return NULL_TREE;
7970
7971 /* Calculate the result when the argument is a constant. */
7972 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7973 return res;
7974
7975 /* Optimize cos(-x) into cos (x). */
7976 if ((narg = fold_strip_sign_ops (arg)))
7977 return build_call_expr_loc (loc, fndecl, 1, narg);
7978
7979 return NULL_TREE;
7980 }
7981
7982 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7983 Return NULL_TREE if no simplification can be made. */
7984
7985 static tree
7986 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7987 {
7988 if (validate_arg (arg, REAL_TYPE))
7989 {
7990 tree res, narg;
7991
7992 /* Calculate the result when the argument is a constant. */
7993 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7994 return res;
7995
7996 /* Optimize cosh(-x) into cosh (x). */
7997 if ((narg = fold_strip_sign_ops (arg)))
7998 return build_call_expr_loc (loc, fndecl, 1, narg);
7999 }
8000
8001 return NULL_TREE;
8002 }
8003
8004 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8005 argument ARG. TYPE is the type of the return value. Return
8006 NULL_TREE if no simplification can be made. */
8007
8008 static tree
8009 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8010 bool hyper)
8011 {
8012 if (validate_arg (arg, COMPLEX_TYPE)
8013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8014 {
8015 tree tmp;
8016
8017 /* Calculate the result when the argument is a constant. */
8018 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8019 return tmp;
8020
8021 /* Optimize fn(-x) into fn(x). */
8022 if ((tmp = fold_strip_sign_ops (arg)))
8023 return build_call_expr_loc (loc, fndecl, 1, tmp);
8024 }
8025
8026 return NULL_TREE;
8027 }
8028
8029 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8030 Return NULL_TREE if no simplification can be made. */
8031
8032 static tree
8033 fold_builtin_tan (tree arg, tree type)
8034 {
8035 enum built_in_function fcode;
8036 tree res;
8037
8038 if (!validate_arg (arg, REAL_TYPE))
8039 return NULL_TREE;
8040
8041 /* Calculate the result when the argument is a constant. */
8042 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8043 return res;
8044
8045 /* Optimize tan(atan(x)) = x. */
8046 fcode = builtin_mathfn_code (arg);
8047 if (flag_unsafe_math_optimizations
8048 && (fcode == BUILT_IN_ATAN
8049 || fcode == BUILT_IN_ATANF
8050 || fcode == BUILT_IN_ATANL))
8051 return CALL_EXPR_ARG (arg, 0);
8052
8053 return NULL_TREE;
8054 }
8055
8056 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8057 NULL_TREE if no simplification can be made. */
8058
8059 static tree
8060 fold_builtin_sincos (location_t loc,
8061 tree arg0, tree arg1, tree arg2)
8062 {
8063 tree type;
8064 tree res, fn, call;
8065
8066 if (!validate_arg (arg0, REAL_TYPE)
8067 || !validate_arg (arg1, POINTER_TYPE)
8068 || !validate_arg (arg2, POINTER_TYPE))
8069 return NULL_TREE;
8070
8071 type = TREE_TYPE (arg0);
8072
8073 /* Calculate the result when the argument is a constant. */
8074 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8075 return res;
8076
8077 /* Canonicalize sincos to cexpi. */
8078 if (!targetm.libc_has_function (function_c99_math_complex))
8079 return NULL_TREE;
8080 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8081 if (!fn)
8082 return NULL_TREE;
8083
8084 call = build_call_expr_loc (loc, fn, 1, arg0);
8085 call = builtin_save_expr (call);
8086
8087 return build2 (COMPOUND_EXPR, void_type_node,
8088 build2 (MODIFY_EXPR, void_type_node,
8089 build_fold_indirect_ref_loc (loc, arg1),
8090 build1 (IMAGPART_EXPR, type, call)),
8091 build2 (MODIFY_EXPR, void_type_node,
8092 build_fold_indirect_ref_loc (loc, arg2),
8093 build1 (REALPART_EXPR, type, call)));
8094 }
8095
8096 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8097 NULL_TREE if no simplification can be made. */
8098
8099 static tree
8100 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8101 {
8102 tree rtype;
8103 tree realp, imagp, ifn;
8104 tree res;
8105
8106 if (!validate_arg (arg0, COMPLEX_TYPE)
8107 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8108 return NULL_TREE;
8109
8110 /* Calculate the result when the argument is a constant. */
8111 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8112 return res;
8113
8114 rtype = TREE_TYPE (TREE_TYPE (arg0));
8115
8116 /* In case we can figure out the real part of arg0 and it is constant zero
8117 fold to cexpi. */
8118 if (!targetm.libc_has_function (function_c99_math_complex))
8119 return NULL_TREE;
8120 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8121 if (!ifn)
8122 return NULL_TREE;
8123
8124 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8125 && real_zerop (realp))
8126 {
8127 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8128 return build_call_expr_loc (loc, ifn, 1, narg);
8129 }
8130
8131 /* In case we can easily decompose real and imaginary parts split cexp
8132 to exp (r) * cexpi (i). */
8133 if (flag_unsafe_math_optimizations
8134 && realp)
8135 {
8136 tree rfn, rcall, icall;
8137
8138 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8139 if (!rfn)
8140 return NULL_TREE;
8141
8142 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8143 if (!imagp)
8144 return NULL_TREE;
8145
8146 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8147 icall = builtin_save_expr (icall);
8148 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8149 rcall = builtin_save_expr (rcall);
8150 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8151 fold_build2_loc (loc, MULT_EXPR, rtype,
8152 rcall,
8153 fold_build1_loc (loc, REALPART_EXPR,
8154 rtype, icall)),
8155 fold_build2_loc (loc, MULT_EXPR, rtype,
8156 rcall,
8157 fold_build1_loc (loc, IMAGPART_EXPR,
8158 rtype, icall)));
8159 }
8160
8161 return NULL_TREE;
8162 }
8163
8164 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8165 Return NULL_TREE if no simplification can be made. */
8166
8167 static tree
8168 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8169 {
8170 if (!validate_arg (arg, REAL_TYPE))
8171 return NULL_TREE;
8172
8173 /* Optimize trunc of constant value. */
8174 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8175 {
8176 REAL_VALUE_TYPE r, x;
8177 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8178
8179 x = TREE_REAL_CST (arg);
8180 real_trunc (&r, TYPE_MODE (type), &x);
8181 return build_real (type, r);
8182 }
8183
8184 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8185 }
8186
8187 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8188 Return NULL_TREE if no simplification can be made. */
8189
8190 static tree
8191 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8192 {
8193 if (!validate_arg (arg, REAL_TYPE))
8194 return NULL_TREE;
8195
8196 /* Optimize floor of constant value. */
8197 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8198 {
8199 REAL_VALUE_TYPE x;
8200
8201 x = TREE_REAL_CST (arg);
8202 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8203 {
8204 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8205 REAL_VALUE_TYPE r;
8206
8207 real_floor (&r, TYPE_MODE (type), &x);
8208 return build_real (type, r);
8209 }
8210 }
8211
8212 /* Fold floor (x) where x is nonnegative to trunc (x). */
8213 if (tree_expr_nonnegative_p (arg))
8214 {
8215 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8216 if (truncfn)
8217 return build_call_expr_loc (loc, truncfn, 1, arg);
8218 }
8219
8220 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8221 }
8222
8223 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8224 Return NULL_TREE if no simplification can be made. */
8225
8226 static tree
8227 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8228 {
8229 if (!validate_arg (arg, REAL_TYPE))
8230 return NULL_TREE;
8231
8232 /* Optimize ceil of constant value. */
8233 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8234 {
8235 REAL_VALUE_TYPE x;
8236
8237 x = TREE_REAL_CST (arg);
8238 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8239 {
8240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8241 REAL_VALUE_TYPE r;
8242
8243 real_ceil (&r, TYPE_MODE (type), &x);
8244 return build_real (type, r);
8245 }
8246 }
8247
8248 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8249 }
8250
8251 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8252 Return NULL_TREE if no simplification can be made. */
8253
8254 static tree
8255 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8256 {
8257 if (!validate_arg (arg, REAL_TYPE))
8258 return NULL_TREE;
8259
8260 /* Optimize round of constant value. */
8261 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8262 {
8263 REAL_VALUE_TYPE x;
8264
8265 x = TREE_REAL_CST (arg);
8266 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8267 {
8268 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8269 REAL_VALUE_TYPE r;
8270
8271 real_round (&r, TYPE_MODE (type), &x);
8272 return build_real (type, r);
8273 }
8274 }
8275
8276 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8277 }
8278
8279 /* Fold function call to builtin lround, lroundf or lroundl (or the
8280 corresponding long long versions) and other rounding functions. ARG
8281 is the argument to the call. Return NULL_TREE if no simplification
8282 can be made. */
8283
8284 static tree
8285 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8286 {
8287 if (!validate_arg (arg, REAL_TYPE))
8288 return NULL_TREE;
8289
8290 /* Optimize lround of constant value. */
8291 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8292 {
8293 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8294
8295 if (real_isfinite (&x))
8296 {
8297 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8298 tree ftype = TREE_TYPE (arg);
8299 REAL_VALUE_TYPE r;
8300 bool fail = false;
8301
8302 switch (DECL_FUNCTION_CODE (fndecl))
8303 {
8304 CASE_FLT_FN (BUILT_IN_IFLOOR):
8305 CASE_FLT_FN (BUILT_IN_LFLOOR):
8306 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8307 real_floor (&r, TYPE_MODE (ftype), &x);
8308 break;
8309
8310 CASE_FLT_FN (BUILT_IN_ICEIL):
8311 CASE_FLT_FN (BUILT_IN_LCEIL):
8312 CASE_FLT_FN (BUILT_IN_LLCEIL):
8313 real_ceil (&r, TYPE_MODE (ftype), &x);
8314 break;
8315
8316 CASE_FLT_FN (BUILT_IN_IROUND):
8317 CASE_FLT_FN (BUILT_IN_LROUND):
8318 CASE_FLT_FN (BUILT_IN_LLROUND):
8319 real_round (&r, TYPE_MODE (ftype), &x);
8320 break;
8321
8322 default:
8323 gcc_unreachable ();
8324 }
8325
8326 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8327 if (!fail)
8328 return wide_int_to_tree (itype, val);
8329 }
8330 }
8331
8332 switch (DECL_FUNCTION_CODE (fndecl))
8333 {
8334 CASE_FLT_FN (BUILT_IN_LFLOOR):
8335 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8336 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8337 if (tree_expr_nonnegative_p (arg))
8338 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8339 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8340 break;
8341 default:;
8342 }
8343
8344 return fold_fixed_mathfn (loc, fndecl, arg);
8345 }
8346
8347 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8348 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8349 the argument to the call. Return NULL_TREE if no simplification can
8350 be made. */
8351
8352 static tree
8353 fold_builtin_bitop (tree fndecl, tree arg)
8354 {
8355 if (!validate_arg (arg, INTEGER_TYPE))
8356 return NULL_TREE;
8357
8358 /* Optimize for constant argument. */
8359 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8360 {
8361 tree type = TREE_TYPE (arg);
8362 int result;
8363
8364 switch (DECL_FUNCTION_CODE (fndecl))
8365 {
8366 CASE_INT_FN (BUILT_IN_FFS):
8367 result = wi::ffs (arg);
8368 break;
8369
8370 CASE_INT_FN (BUILT_IN_CLZ):
8371 if (wi::ne_p (arg, 0))
8372 result = wi::clz (arg);
8373 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8374 result = TYPE_PRECISION (type);
8375 break;
8376
8377 CASE_INT_FN (BUILT_IN_CTZ):
8378 if (wi::ne_p (arg, 0))
8379 result = wi::ctz (arg);
8380 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8381 result = TYPE_PRECISION (type);
8382 break;
8383
8384 CASE_INT_FN (BUILT_IN_CLRSB):
8385 result = wi::clrsb (arg);
8386 break;
8387
8388 CASE_INT_FN (BUILT_IN_POPCOUNT):
8389 result = wi::popcount (arg);
8390 break;
8391
8392 CASE_INT_FN (BUILT_IN_PARITY):
8393 result = wi::parity (arg);
8394 break;
8395
8396 default:
8397 gcc_unreachable ();
8398 }
8399
8400 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8401 }
8402
8403 return NULL_TREE;
8404 }
8405
8406 /* Fold function call to builtin_bswap and the short, long and long long
8407 variants. Return NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_bswap (tree fndecl, tree arg)
8410 {
8411 if (! validate_arg (arg, INTEGER_TYPE))
8412 return NULL_TREE;
8413
8414 /* Optimize constant value. */
8415 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8416 {
8417 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8418
8419 switch (DECL_FUNCTION_CODE (fndecl))
8420 {
8421 case BUILT_IN_BSWAP16:
8422 case BUILT_IN_BSWAP32:
8423 case BUILT_IN_BSWAP64:
8424 {
8425 signop sgn = TYPE_SIGN (type);
8426 tree result =
8427 wide_int_to_tree (type,
8428 wide_int::from (arg, TYPE_PRECISION (type),
8429 sgn).bswap ());
8430 return result;
8431 }
8432 default:
8433 gcc_unreachable ();
8434 }
8435 }
8436
8437 return NULL_TREE;
8438 }
8439
8440 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8441 NULL_TREE if no simplification can be made. */
8442
8443 static tree
8444 fold_builtin_hypot (location_t loc, tree fndecl,
8445 tree arg0, tree arg1, tree type)
8446 {
8447 tree res, narg0, narg1;
8448
8449 if (!validate_arg (arg0, REAL_TYPE)
8450 || !validate_arg (arg1, REAL_TYPE))
8451 return NULL_TREE;
8452
8453 /* Calculate the result when the argument is a constant. */
8454 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8455 return res;
8456
8457 /* If either argument to hypot has a negate or abs, strip that off.
8458 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8459 narg0 = fold_strip_sign_ops (arg0);
8460 narg1 = fold_strip_sign_ops (arg1);
8461 if (narg0 || narg1)
8462 {
8463 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8464 narg1 ? narg1 : arg1);
8465 }
8466
8467 /* If either argument is zero, hypot is fabs of the other. */
8468 if (real_zerop (arg0))
8469 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8470 else if (real_zerop (arg1))
8471 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8472
8473 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8474 if (flag_unsafe_math_optimizations
8475 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8476 {
8477 const REAL_VALUE_TYPE sqrt2_trunc
8478 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8479 return fold_build2_loc (loc, MULT_EXPR, type,
8480 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8481 build_real (type, sqrt2_trunc));
8482 }
8483
8484 return NULL_TREE;
8485 }
8486
8487
8488 /* Fold a builtin function call to pow, powf, or powl. Return
8489 NULL_TREE if no simplification can be made. */
8490 static tree
8491 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8492 {
8493 tree res;
8494
8495 if (!validate_arg (arg0, REAL_TYPE)
8496 || !validate_arg (arg1, REAL_TYPE))
8497 return NULL_TREE;
8498
8499 /* Calculate the result when the argument is a constant. */
8500 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8501 return res;
8502
8503 /* Optimize pow(1.0,y) = 1.0. */
8504 if (real_onep (arg0))
8505 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8506
8507 if (TREE_CODE (arg1) == REAL_CST
8508 && !TREE_OVERFLOW (arg1))
8509 {
8510 REAL_VALUE_TYPE cint;
8511 REAL_VALUE_TYPE c;
8512 HOST_WIDE_INT n;
8513
8514 c = TREE_REAL_CST (arg1);
8515
8516 /* Optimize pow(x,0.0) = 1.0. */
8517 if (REAL_VALUES_EQUAL (c, dconst0))
8518 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8519 arg0);
8520
8521 /* Optimize pow(x,1.0) = x. */
8522 if (REAL_VALUES_EQUAL (c, dconst1))
8523 return arg0;
8524
8525 /* Optimize pow(x,-1.0) = 1.0/x. */
8526 if (REAL_VALUES_EQUAL (c, dconstm1))
8527 return fold_build2_loc (loc, RDIV_EXPR, type,
8528 build_real (type, dconst1), arg0);
8529
8530 /* Optimize pow(x,0.5) = sqrt(x). */
8531 if (flag_unsafe_math_optimizations
8532 && REAL_VALUES_EQUAL (c, dconsthalf))
8533 {
8534 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8535
8536 if (sqrtfn != NULL_TREE)
8537 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8538 }
8539
8540 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8541 if (flag_unsafe_math_optimizations)
8542 {
8543 const REAL_VALUE_TYPE dconstroot
8544 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8545
8546 if (REAL_VALUES_EQUAL (c, dconstroot))
8547 {
8548 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8549 if (cbrtfn != NULL_TREE)
8550 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8551 }
8552 }
8553
8554 /* Check for an integer exponent. */
8555 n = real_to_integer (&c);
8556 real_from_integer (&cint, VOIDmode, n, SIGNED);
8557 if (real_identical (&c, &cint))
8558 {
8559 /* Attempt to evaluate pow at compile-time, unless this should
8560 raise an exception. */
8561 if (TREE_CODE (arg0) == REAL_CST
8562 && !TREE_OVERFLOW (arg0)
8563 && (n > 0
8564 || (!flag_trapping_math && !flag_errno_math)
8565 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8566 {
8567 REAL_VALUE_TYPE x;
8568 bool inexact;
8569
8570 x = TREE_REAL_CST (arg0);
8571 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8572 if (flag_unsafe_math_optimizations || !inexact)
8573 return build_real (type, x);
8574 }
8575
8576 /* Strip sign ops from even integer powers. */
8577 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8578 {
8579 tree narg0 = fold_strip_sign_ops (arg0);
8580 if (narg0)
8581 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8582 }
8583 }
8584 }
8585
8586 if (flag_unsafe_math_optimizations)
8587 {
8588 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8589
8590 /* Optimize pow(expN(x),y) = expN(x*y). */
8591 if (BUILTIN_EXPONENT_P (fcode))
8592 {
8593 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8594 tree arg = CALL_EXPR_ARG (arg0, 0);
8595 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8596 return build_call_expr_loc (loc, expfn, 1, arg);
8597 }
8598
8599 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8600 if (BUILTIN_SQRT_P (fcode))
8601 {
8602 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8603 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8604 build_real (type, dconsthalf));
8605 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8606 }
8607
8608 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8609 if (BUILTIN_CBRT_P (fcode))
8610 {
8611 tree arg = CALL_EXPR_ARG (arg0, 0);
8612 if (tree_expr_nonnegative_p (arg))
8613 {
8614 const REAL_VALUE_TYPE dconstroot
8615 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8616 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8617 build_real (type, dconstroot));
8618 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8619 }
8620 }
8621
8622 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8623 if (fcode == BUILT_IN_POW
8624 || fcode == BUILT_IN_POWF
8625 || fcode == BUILT_IN_POWL)
8626 {
8627 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8628 if (tree_expr_nonnegative_p (arg00))
8629 {
8630 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8631 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8632 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8633 }
8634 }
8635 }
8636
8637 return NULL_TREE;
8638 }
8639
8640 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8641 Return NULL_TREE if no simplification can be made. */
8642 static tree
8643 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8644 tree arg0, tree arg1, tree type)
8645 {
8646 if (!validate_arg (arg0, REAL_TYPE)
8647 || !validate_arg (arg1, INTEGER_TYPE))
8648 return NULL_TREE;
8649
8650 /* Optimize pow(1.0,y) = 1.0. */
8651 if (real_onep (arg0))
8652 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8653
8654 if (tree_fits_shwi_p (arg1))
8655 {
8656 HOST_WIDE_INT c = tree_to_shwi (arg1);
8657
8658 /* Evaluate powi at compile-time. */
8659 if (TREE_CODE (arg0) == REAL_CST
8660 && !TREE_OVERFLOW (arg0))
8661 {
8662 REAL_VALUE_TYPE x;
8663 x = TREE_REAL_CST (arg0);
8664 real_powi (&x, TYPE_MODE (type), &x, c);
8665 return build_real (type, x);
8666 }
8667
8668 /* Optimize pow(x,0) = 1.0. */
8669 if (c == 0)
8670 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8671 arg0);
8672
8673 /* Optimize pow(x,1) = x. */
8674 if (c == 1)
8675 return arg0;
8676
8677 /* Optimize pow(x,-1) = 1.0/x. */
8678 if (c == -1)
8679 return fold_build2_loc (loc, RDIV_EXPR, type,
8680 build_real (type, dconst1), arg0);
8681 }
8682
8683 return NULL_TREE;
8684 }
8685
8686 /* A subroutine of fold_builtin to fold the various exponent
8687 functions. Return NULL_TREE if no simplification can be made.
8688 FUNC is the corresponding MPFR exponent function. */
8689
8690 static tree
8691 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8692 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8693 {
8694 if (validate_arg (arg, REAL_TYPE))
8695 {
8696 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8697 tree res;
8698
8699 /* Calculate the result when the argument is a constant. */
8700 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8701 return res;
8702
8703 /* Optimize expN(logN(x)) = x. */
8704 if (flag_unsafe_math_optimizations)
8705 {
8706 const enum built_in_function fcode = builtin_mathfn_code (arg);
8707
8708 if ((func == mpfr_exp
8709 && (fcode == BUILT_IN_LOG
8710 || fcode == BUILT_IN_LOGF
8711 || fcode == BUILT_IN_LOGL))
8712 || (func == mpfr_exp2
8713 && (fcode == BUILT_IN_LOG2
8714 || fcode == BUILT_IN_LOG2F
8715 || fcode == BUILT_IN_LOG2L))
8716 || (func == mpfr_exp10
8717 && (fcode == BUILT_IN_LOG10
8718 || fcode == BUILT_IN_LOG10F
8719 || fcode == BUILT_IN_LOG10L)))
8720 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8721 }
8722 }
8723
8724 return NULL_TREE;
8725 }
8726
8727 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8728 arguments to the call, and TYPE is its return type.
8729 Return NULL_TREE if no simplification can be made. */
8730
8731 static tree
8732 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8733 {
8734 if (!validate_arg (arg1, POINTER_TYPE)
8735 || !validate_arg (arg2, INTEGER_TYPE)
8736 || !validate_arg (len, INTEGER_TYPE))
8737 return NULL_TREE;
8738 else
8739 {
8740 const char *p1;
8741
8742 if (TREE_CODE (arg2) != INTEGER_CST
8743 || !tree_fits_uhwi_p (len))
8744 return NULL_TREE;
8745
8746 p1 = c_getstr (arg1);
8747 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8748 {
8749 char c;
8750 const char *r;
8751 tree tem;
8752
8753 if (target_char_cast (arg2, &c))
8754 return NULL_TREE;
8755
8756 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8757
8758 if (r == NULL)
8759 return build_int_cst (TREE_TYPE (arg1), 0);
8760
8761 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8762 return fold_convert_loc (loc, type, tem);
8763 }
8764 return NULL_TREE;
8765 }
8766 }
8767
8768 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8769 Return NULL_TREE if no simplification can be made. */
8770
8771 static tree
8772 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8773 {
8774 const char *p1, *p2;
8775
8776 if (!validate_arg (arg1, POINTER_TYPE)
8777 || !validate_arg (arg2, POINTER_TYPE)
8778 || !validate_arg (len, INTEGER_TYPE))
8779 return NULL_TREE;
8780
8781 /* If the LEN parameter is zero, return zero. */
8782 if (integer_zerop (len))
8783 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8784 arg1, arg2);
8785
8786 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8787 if (operand_equal_p (arg1, arg2, 0))
8788 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8789
8790 p1 = c_getstr (arg1);
8791 p2 = c_getstr (arg2);
8792
8793 /* If all arguments are constant, and the value of len is not greater
8794 than the lengths of arg1 and arg2, evaluate at compile-time. */
8795 if (tree_fits_uhwi_p (len) && p1 && p2
8796 && compare_tree_int (len, strlen (p1) + 1) <= 0
8797 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8798 {
8799 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8800
8801 if (r > 0)
8802 return integer_one_node;
8803 else if (r < 0)
8804 return integer_minus_one_node;
8805 else
8806 return integer_zero_node;
8807 }
8808
8809 /* If len parameter is one, return an expression corresponding to
8810 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8811 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8812 {
8813 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8814 tree cst_uchar_ptr_node
8815 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8816
8817 tree ind1
8818 = fold_convert_loc (loc, integer_type_node,
8819 build1 (INDIRECT_REF, cst_uchar_node,
8820 fold_convert_loc (loc,
8821 cst_uchar_ptr_node,
8822 arg1)));
8823 tree ind2
8824 = fold_convert_loc (loc, integer_type_node,
8825 build1 (INDIRECT_REF, cst_uchar_node,
8826 fold_convert_loc (loc,
8827 cst_uchar_ptr_node,
8828 arg2)));
8829 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8830 }
8831
8832 return NULL_TREE;
8833 }
8834
8835 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8836 Return NULL_TREE if no simplification can be made. */
8837
8838 static tree
8839 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8840 {
8841 const char *p1, *p2;
8842
8843 if (!validate_arg (arg1, POINTER_TYPE)
8844 || !validate_arg (arg2, POINTER_TYPE))
8845 return NULL_TREE;
8846
8847 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8848 if (operand_equal_p (arg1, arg2, 0))
8849 return integer_zero_node;
8850
8851 p1 = c_getstr (arg1);
8852 p2 = c_getstr (arg2);
8853
8854 if (p1 && p2)
8855 {
8856 const int i = strcmp (p1, p2);
8857 if (i < 0)
8858 return integer_minus_one_node;
8859 else if (i > 0)
8860 return integer_one_node;
8861 else
8862 return integer_zero_node;
8863 }
8864
8865 /* If the second arg is "", return *(const unsigned char*)arg1. */
8866 if (p2 && *p2 == '\0')
8867 {
8868 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8869 tree cst_uchar_ptr_node
8870 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8871
8872 return fold_convert_loc (loc, integer_type_node,
8873 build1 (INDIRECT_REF, cst_uchar_node,
8874 fold_convert_loc (loc,
8875 cst_uchar_ptr_node,
8876 arg1)));
8877 }
8878
8879 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8880 if (p1 && *p1 == '\0')
8881 {
8882 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8883 tree cst_uchar_ptr_node
8884 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8885
8886 tree temp
8887 = fold_convert_loc (loc, integer_type_node,
8888 build1 (INDIRECT_REF, cst_uchar_node,
8889 fold_convert_loc (loc,
8890 cst_uchar_ptr_node,
8891 arg2)));
8892 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8893 }
8894
8895 return NULL_TREE;
8896 }
8897
8898 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8899 Return NULL_TREE if no simplification can be made. */
8900
8901 static tree
8902 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8903 {
8904 const char *p1, *p2;
8905
8906 if (!validate_arg (arg1, POINTER_TYPE)
8907 || !validate_arg (arg2, POINTER_TYPE)
8908 || !validate_arg (len, INTEGER_TYPE))
8909 return NULL_TREE;
8910
8911 /* If the LEN parameter is zero, return zero. */
8912 if (integer_zerop (len))
8913 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8914 arg1, arg2);
8915
8916 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8917 if (operand_equal_p (arg1, arg2, 0))
8918 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8919
8920 p1 = c_getstr (arg1);
8921 p2 = c_getstr (arg2);
8922
8923 if (tree_fits_uhwi_p (len) && p1 && p2)
8924 {
8925 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8926 if (i > 0)
8927 return integer_one_node;
8928 else if (i < 0)
8929 return integer_minus_one_node;
8930 else
8931 return integer_zero_node;
8932 }
8933
8934 /* If the second arg is "", and the length is greater than zero,
8935 return *(const unsigned char*)arg1. */
8936 if (p2 && *p2 == '\0'
8937 && TREE_CODE (len) == INTEGER_CST
8938 && tree_int_cst_sgn (len) == 1)
8939 {
8940 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8941 tree cst_uchar_ptr_node
8942 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8943
8944 return fold_convert_loc (loc, integer_type_node,
8945 build1 (INDIRECT_REF, cst_uchar_node,
8946 fold_convert_loc (loc,
8947 cst_uchar_ptr_node,
8948 arg1)));
8949 }
8950
8951 /* If the first arg is "", and the length is greater than zero,
8952 return -*(const unsigned char*)arg2. */
8953 if (p1 && *p1 == '\0'
8954 && TREE_CODE (len) == INTEGER_CST
8955 && tree_int_cst_sgn (len) == 1)
8956 {
8957 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8958 tree cst_uchar_ptr_node
8959 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8960
8961 tree temp = fold_convert_loc (loc, integer_type_node,
8962 build1 (INDIRECT_REF, cst_uchar_node,
8963 fold_convert_loc (loc,
8964 cst_uchar_ptr_node,
8965 arg2)));
8966 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8967 }
8968
8969 /* If len parameter is one, return an expression corresponding to
8970 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8971 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8972 {
8973 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8974 tree cst_uchar_ptr_node
8975 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8976
8977 tree ind1 = fold_convert_loc (loc, integer_type_node,
8978 build1 (INDIRECT_REF, cst_uchar_node,
8979 fold_convert_loc (loc,
8980 cst_uchar_ptr_node,
8981 arg1)));
8982 tree ind2 = fold_convert_loc (loc, integer_type_node,
8983 build1 (INDIRECT_REF, cst_uchar_node,
8984 fold_convert_loc (loc,
8985 cst_uchar_ptr_node,
8986 arg2)));
8987 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8988 }
8989
8990 return NULL_TREE;
8991 }
8992
8993 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8994 ARG. Return NULL_TREE if no simplification can be made. */
8995
8996 static tree
8997 fold_builtin_signbit (location_t loc, tree arg, tree type)
8998 {
8999 if (!validate_arg (arg, REAL_TYPE))
9000 return NULL_TREE;
9001
9002 /* If ARG is a compile-time constant, determine the result. */
9003 if (TREE_CODE (arg) == REAL_CST
9004 && !TREE_OVERFLOW (arg))
9005 {
9006 REAL_VALUE_TYPE c;
9007
9008 c = TREE_REAL_CST (arg);
9009 return (REAL_VALUE_NEGATIVE (c)
9010 ? build_one_cst (type)
9011 : build_zero_cst (type));
9012 }
9013
9014 /* If ARG is non-negative, the result is always zero. */
9015 if (tree_expr_nonnegative_p (arg))
9016 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9017
9018 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9019 if (!HONOR_SIGNED_ZEROS (arg))
9020 return fold_convert (type,
9021 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9022 build_real (TREE_TYPE (arg), dconst0)));
9023
9024 return NULL_TREE;
9025 }
9026
9027 /* Fold function call to builtin copysign, copysignf or copysignl with
9028 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9029 be made. */
9030
9031 static tree
9032 fold_builtin_copysign (location_t loc, tree fndecl,
9033 tree arg1, tree arg2, tree type)
9034 {
9035 tree tem;
9036
9037 if (!validate_arg (arg1, REAL_TYPE)
9038 || !validate_arg (arg2, REAL_TYPE))
9039 return NULL_TREE;
9040
9041 /* copysign(X,X) is X. */
9042 if (operand_equal_p (arg1, arg2, 0))
9043 return fold_convert_loc (loc, type, arg1);
9044
9045 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9046 if (TREE_CODE (arg1) == REAL_CST
9047 && TREE_CODE (arg2) == REAL_CST
9048 && !TREE_OVERFLOW (arg1)
9049 && !TREE_OVERFLOW (arg2))
9050 {
9051 REAL_VALUE_TYPE c1, c2;
9052
9053 c1 = TREE_REAL_CST (arg1);
9054 c2 = TREE_REAL_CST (arg2);
9055 /* c1.sign := c2.sign. */
9056 real_copysign (&c1, &c2);
9057 return build_real (type, c1);
9058 }
9059
9060 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9061 Remember to evaluate Y for side-effects. */
9062 if (tree_expr_nonnegative_p (arg2))
9063 return omit_one_operand_loc (loc, type,
9064 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9065 arg2);
9066
9067 /* Strip sign changing operations for the first argument. */
9068 tem = fold_strip_sign_ops (arg1);
9069 if (tem)
9070 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9071
9072 return NULL_TREE;
9073 }
9074
9075 /* Fold a call to builtin isascii with argument ARG. */
9076
9077 static tree
9078 fold_builtin_isascii (location_t loc, tree arg)
9079 {
9080 if (!validate_arg (arg, INTEGER_TYPE))
9081 return NULL_TREE;
9082 else
9083 {
9084 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9085 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9086 build_int_cst (integer_type_node,
9087 ~ (unsigned HOST_WIDE_INT) 0x7f));
9088 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9089 arg, integer_zero_node);
9090 }
9091 }
9092
9093 /* Fold a call to builtin toascii with argument ARG. */
9094
9095 static tree
9096 fold_builtin_toascii (location_t loc, tree arg)
9097 {
9098 if (!validate_arg (arg, INTEGER_TYPE))
9099 return NULL_TREE;
9100
9101 /* Transform toascii(c) -> (c & 0x7f). */
9102 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9103 build_int_cst (integer_type_node, 0x7f));
9104 }
9105
9106 /* Fold a call to builtin isdigit with argument ARG. */
9107
9108 static tree
9109 fold_builtin_isdigit (location_t loc, tree arg)
9110 {
9111 if (!validate_arg (arg, INTEGER_TYPE))
9112 return NULL_TREE;
9113 else
9114 {
9115 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9116 /* According to the C standard, isdigit is unaffected by locale.
9117 However, it definitely is affected by the target character set. */
9118 unsigned HOST_WIDE_INT target_digit0
9119 = lang_hooks.to_target_charset ('0');
9120
9121 if (target_digit0 == 0)
9122 return NULL_TREE;
9123
9124 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9125 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9126 build_int_cst (unsigned_type_node, target_digit0));
9127 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9128 build_int_cst (unsigned_type_node, 9));
9129 }
9130 }
9131
9132 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9133
9134 static tree
9135 fold_builtin_fabs (location_t loc, tree arg, tree type)
9136 {
9137 if (!validate_arg (arg, REAL_TYPE))
9138 return NULL_TREE;
9139
9140 arg = fold_convert_loc (loc, type, arg);
9141 if (TREE_CODE (arg) == REAL_CST)
9142 return fold_abs_const (arg, type);
9143 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9144 }
9145
9146 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9147
9148 static tree
9149 fold_builtin_abs (location_t loc, tree arg, tree type)
9150 {
9151 if (!validate_arg (arg, INTEGER_TYPE))
9152 return NULL_TREE;
9153
9154 arg = fold_convert_loc (loc, type, arg);
9155 if (TREE_CODE (arg) == INTEGER_CST)
9156 return fold_abs_const (arg, type);
9157 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9158 }
9159
9160 /* Fold a fma operation with arguments ARG[012]. */
9161
9162 tree
9163 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9164 tree type, tree arg0, tree arg1, tree arg2)
9165 {
9166 if (TREE_CODE (arg0) == REAL_CST
9167 && TREE_CODE (arg1) == REAL_CST
9168 && TREE_CODE (arg2) == REAL_CST)
9169 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9170
9171 return NULL_TREE;
9172 }
9173
9174 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9175
9176 static tree
9177 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9178 {
9179 if (validate_arg (arg0, REAL_TYPE)
9180 && validate_arg (arg1, REAL_TYPE)
9181 && validate_arg (arg2, REAL_TYPE))
9182 {
9183 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9184 if (tem)
9185 return tem;
9186
9187 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9188 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9189 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9190 }
9191 return NULL_TREE;
9192 }
9193
9194 /* Fold a call to builtin fmin or fmax. */
9195
9196 static tree
9197 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9198 tree type, bool max)
9199 {
9200 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9201 {
9202 /* Calculate the result when the argument is a constant. */
9203 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9204
9205 if (res)
9206 return res;
9207
9208 /* If either argument is NaN, return the other one. Avoid the
9209 transformation if we get (and honor) a signalling NaN. Using
9210 omit_one_operand() ensures we create a non-lvalue. */
9211 if (TREE_CODE (arg0) == REAL_CST
9212 && real_isnan (&TREE_REAL_CST (arg0))
9213 && (! HONOR_SNANS (arg0)
9214 || ! TREE_REAL_CST (arg0).signalling))
9215 return omit_one_operand_loc (loc, type, arg1, arg0);
9216 if (TREE_CODE (arg1) == REAL_CST
9217 && real_isnan (&TREE_REAL_CST (arg1))
9218 && (! HONOR_SNANS (arg1)
9219 || ! TREE_REAL_CST (arg1).signalling))
9220 return omit_one_operand_loc (loc, type, arg0, arg1);
9221
9222 /* Transform fmin/fmax(x,x) -> x. */
9223 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9224 return omit_one_operand_loc (loc, type, arg0, arg1);
9225
9226 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9227 functions to return the numeric arg if the other one is NaN.
9228 These tree codes don't honor that, so only transform if
9229 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9230 handled, so we don't have to worry about it either. */
9231 if (flag_finite_math_only)
9232 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9233 fold_convert_loc (loc, type, arg0),
9234 fold_convert_loc (loc, type, arg1));
9235 }
9236 return NULL_TREE;
9237 }
9238
9239 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9240
9241 static tree
9242 fold_builtin_carg (location_t loc, tree arg, tree type)
9243 {
9244 if (validate_arg (arg, COMPLEX_TYPE)
9245 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9246 {
9247 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9248
9249 if (atan2_fn)
9250 {
9251 tree new_arg = builtin_save_expr (arg);
9252 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9253 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9254 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9255 }
9256 }
9257
9258 return NULL_TREE;
9259 }
9260
9261 /* Fold a call to builtin logb/ilogb. */
9262
9263 static tree
9264 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9265 {
9266 if (! validate_arg (arg, REAL_TYPE))
9267 return NULL_TREE;
9268
9269 STRIP_NOPS (arg);
9270
9271 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9272 {
9273 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9274
9275 switch (value->cl)
9276 {
9277 case rvc_nan:
9278 case rvc_inf:
9279 /* If arg is Inf or NaN and we're logb, return it. */
9280 if (TREE_CODE (rettype) == REAL_TYPE)
9281 {
9282 /* For logb(-Inf) we have to return +Inf. */
9283 if (real_isinf (value) && real_isneg (value))
9284 {
9285 REAL_VALUE_TYPE tem;
9286 real_inf (&tem);
9287 return build_real (rettype, tem);
9288 }
9289 return fold_convert_loc (loc, rettype, arg);
9290 }
9291 /* Fall through... */
9292 case rvc_zero:
9293 /* Zero may set errno and/or raise an exception for logb, also
9294 for ilogb we don't know FP_ILOGB0. */
9295 return NULL_TREE;
9296 case rvc_normal:
9297 /* For normal numbers, proceed iff radix == 2. In GCC,
9298 normalized significands are in the range [0.5, 1.0). We
9299 want the exponent as if they were [1.0, 2.0) so get the
9300 exponent and subtract 1. */
9301 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9302 return fold_convert_loc (loc, rettype,
9303 build_int_cst (integer_type_node,
9304 REAL_EXP (value)-1));
9305 break;
9306 }
9307 }
9308
9309 return NULL_TREE;
9310 }
9311
9312 /* Fold a call to builtin significand, if radix == 2. */
9313
9314 static tree
9315 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9316 {
9317 if (! validate_arg (arg, REAL_TYPE))
9318 return NULL_TREE;
9319
9320 STRIP_NOPS (arg);
9321
9322 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9323 {
9324 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9325
9326 switch (value->cl)
9327 {
9328 case rvc_zero:
9329 case rvc_nan:
9330 case rvc_inf:
9331 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9332 return fold_convert_loc (loc, rettype, arg);
9333 case rvc_normal:
9334 /* For normal numbers, proceed iff radix == 2. */
9335 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9336 {
9337 REAL_VALUE_TYPE result = *value;
9338 /* In GCC, normalized significands are in the range [0.5,
9339 1.0). We want them to be [1.0, 2.0) so set the
9340 exponent to 1. */
9341 SET_REAL_EXP (&result, 1);
9342 return build_real (rettype, result);
9343 }
9344 break;
9345 }
9346 }
9347
9348 return NULL_TREE;
9349 }
9350
9351 /* Fold a call to builtin frexp, we can assume the base is 2. */
9352
9353 static tree
9354 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9355 {
9356 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9357 return NULL_TREE;
9358
9359 STRIP_NOPS (arg0);
9360
9361 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9362 return NULL_TREE;
9363
9364 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9365
9366 /* Proceed if a valid pointer type was passed in. */
9367 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9368 {
9369 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9370 tree frac, exp;
9371
9372 switch (value->cl)
9373 {
9374 case rvc_zero:
9375 /* For +-0, return (*exp = 0, +-0). */
9376 exp = integer_zero_node;
9377 frac = arg0;
9378 break;
9379 case rvc_nan:
9380 case rvc_inf:
9381 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9382 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9383 case rvc_normal:
9384 {
9385 /* Since the frexp function always expects base 2, and in
9386 GCC normalized significands are already in the range
9387 [0.5, 1.0), we have exactly what frexp wants. */
9388 REAL_VALUE_TYPE frac_rvt = *value;
9389 SET_REAL_EXP (&frac_rvt, 0);
9390 frac = build_real (rettype, frac_rvt);
9391 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9392 }
9393 break;
9394 default:
9395 gcc_unreachable ();
9396 }
9397
9398 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9399 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9400 TREE_SIDE_EFFECTS (arg1) = 1;
9401 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9402 }
9403
9404 return NULL_TREE;
9405 }
9406
9407 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9408 then we can assume the base is two. If it's false, then we have to
9409 check the mode of the TYPE parameter in certain cases. */
9410
9411 static tree
9412 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9413 tree type, bool ldexp)
9414 {
9415 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9416 {
9417 STRIP_NOPS (arg0);
9418 STRIP_NOPS (arg1);
9419
9420 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9421 if (real_zerop (arg0) || integer_zerop (arg1)
9422 || (TREE_CODE (arg0) == REAL_CST
9423 && !real_isfinite (&TREE_REAL_CST (arg0))))
9424 return omit_one_operand_loc (loc, type, arg0, arg1);
9425
9426 /* If both arguments are constant, then try to evaluate it. */
9427 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9428 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9429 && tree_fits_shwi_p (arg1))
9430 {
9431 /* Bound the maximum adjustment to twice the range of the
9432 mode's valid exponents. Use abs to ensure the range is
9433 positive as a sanity check. */
9434 const long max_exp_adj = 2 *
9435 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9436 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9437
9438 /* Get the user-requested adjustment. */
9439 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9440
9441 /* The requested adjustment must be inside this range. This
9442 is a preliminary cap to avoid things like overflow, we
9443 may still fail to compute the result for other reasons. */
9444 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9445 {
9446 REAL_VALUE_TYPE initial_result;
9447
9448 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9449
9450 /* Ensure we didn't overflow. */
9451 if (! real_isinf (&initial_result))
9452 {
9453 const REAL_VALUE_TYPE trunc_result
9454 = real_value_truncate (TYPE_MODE (type), initial_result);
9455
9456 /* Only proceed if the target mode can hold the
9457 resulting value. */
9458 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9459 return build_real (type, trunc_result);
9460 }
9461 }
9462 }
9463 }
9464
9465 return NULL_TREE;
9466 }
9467
9468 /* Fold a call to builtin modf. */
9469
9470 static tree
9471 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9472 {
9473 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9474 return NULL_TREE;
9475
9476 STRIP_NOPS (arg0);
9477
9478 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9479 return NULL_TREE;
9480
9481 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9482
9483 /* Proceed if a valid pointer type was passed in. */
9484 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9485 {
9486 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9487 REAL_VALUE_TYPE trunc, frac;
9488
9489 switch (value->cl)
9490 {
9491 case rvc_nan:
9492 case rvc_zero:
9493 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9494 trunc = frac = *value;
9495 break;
9496 case rvc_inf:
9497 /* For +-Inf, return (*arg1 = arg0, +-0). */
9498 frac = dconst0;
9499 frac.sign = value->sign;
9500 trunc = *value;
9501 break;
9502 case rvc_normal:
9503 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9504 real_trunc (&trunc, VOIDmode, value);
9505 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9506 /* If the original number was negative and already
9507 integral, then the fractional part is -0.0. */
9508 if (value->sign && frac.cl == rvc_zero)
9509 frac.sign = value->sign;
9510 break;
9511 }
9512
9513 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9514 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9515 build_real (rettype, trunc));
9516 TREE_SIDE_EFFECTS (arg1) = 1;
9517 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9518 build_real (rettype, frac));
9519 }
9520
9521 return NULL_TREE;
9522 }
9523
9524 /* Given a location LOC, an interclass builtin function decl FNDECL
9525 and its single argument ARG, return an folded expression computing
9526 the same, or NULL_TREE if we either couldn't or didn't want to fold
9527 (the latter happen if there's an RTL instruction available). */
9528
9529 static tree
9530 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9531 {
9532 machine_mode mode;
9533
9534 if (!validate_arg (arg, REAL_TYPE))
9535 return NULL_TREE;
9536
9537 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9538 return NULL_TREE;
9539
9540 mode = TYPE_MODE (TREE_TYPE (arg));
9541
9542 /* If there is no optab, try generic code. */
9543 switch (DECL_FUNCTION_CODE (fndecl))
9544 {
9545 tree result;
9546
9547 CASE_FLT_FN (BUILT_IN_ISINF):
9548 {
9549 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9550 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9551 tree const type = TREE_TYPE (arg);
9552 REAL_VALUE_TYPE r;
9553 char buf[128];
9554
9555 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9556 real_from_string (&r, buf);
9557 result = build_call_expr (isgr_fn, 2,
9558 fold_build1_loc (loc, ABS_EXPR, type, arg),
9559 build_real (type, r));
9560 return result;
9561 }
9562 CASE_FLT_FN (BUILT_IN_FINITE):
9563 case BUILT_IN_ISFINITE:
9564 {
9565 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9566 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9567 tree const type = TREE_TYPE (arg);
9568 REAL_VALUE_TYPE r;
9569 char buf[128];
9570
9571 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9572 real_from_string (&r, buf);
9573 result = build_call_expr (isle_fn, 2,
9574 fold_build1_loc (loc, ABS_EXPR, type, arg),
9575 build_real (type, r));
9576 /*result = fold_build2_loc (loc, UNGT_EXPR,
9577 TREE_TYPE (TREE_TYPE (fndecl)),
9578 fold_build1_loc (loc, ABS_EXPR, type, arg),
9579 build_real (type, r));
9580 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9581 TREE_TYPE (TREE_TYPE (fndecl)),
9582 result);*/
9583 return result;
9584 }
9585 case BUILT_IN_ISNORMAL:
9586 {
9587 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9588 islessequal(fabs(x),DBL_MAX). */
9589 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9590 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9591 tree const type = TREE_TYPE (arg);
9592 REAL_VALUE_TYPE rmax, rmin;
9593 char buf[128];
9594
9595 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9596 real_from_string (&rmax, buf);
9597 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9598 real_from_string (&rmin, buf);
9599 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9600 result = build_call_expr (isle_fn, 2, arg,
9601 build_real (type, rmax));
9602 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9603 build_call_expr (isge_fn, 2, arg,
9604 build_real (type, rmin)));
9605 return result;
9606 }
9607 default:
9608 break;
9609 }
9610
9611 return NULL_TREE;
9612 }
9613
9614 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9615 ARG is the argument for the call. */
9616
9617 static tree
9618 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9619 {
9620 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9621 REAL_VALUE_TYPE r;
9622
9623 if (!validate_arg (arg, REAL_TYPE))
9624 return NULL_TREE;
9625
9626 switch (builtin_index)
9627 {
9628 case BUILT_IN_ISINF:
9629 if (!HONOR_INFINITIES (arg))
9630 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9631
9632 if (TREE_CODE (arg) == REAL_CST)
9633 {
9634 r = TREE_REAL_CST (arg);
9635 if (real_isinf (&r))
9636 return real_compare (GT_EXPR, &r, &dconst0)
9637 ? integer_one_node : integer_minus_one_node;
9638 else
9639 return integer_zero_node;
9640 }
9641
9642 return NULL_TREE;
9643
9644 case BUILT_IN_ISINF_SIGN:
9645 {
9646 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9647 /* In a boolean context, GCC will fold the inner COND_EXPR to
9648 1. So e.g. "if (isinf_sign(x))" would be folded to just
9649 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9650 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9651 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9652 tree tmp = NULL_TREE;
9653
9654 arg = builtin_save_expr (arg);
9655
9656 if (signbit_fn && isinf_fn)
9657 {
9658 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9659 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9660
9661 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9662 signbit_call, integer_zero_node);
9663 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9664 isinf_call, integer_zero_node);
9665
9666 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9667 integer_minus_one_node, integer_one_node);
9668 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9669 isinf_call, tmp,
9670 integer_zero_node);
9671 }
9672
9673 return tmp;
9674 }
9675
9676 case BUILT_IN_ISFINITE:
9677 if (!HONOR_NANS (arg)
9678 && !HONOR_INFINITIES (arg))
9679 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9680
9681 if (TREE_CODE (arg) == REAL_CST)
9682 {
9683 r = TREE_REAL_CST (arg);
9684 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9685 }
9686
9687 return NULL_TREE;
9688
9689 case BUILT_IN_ISNAN:
9690 if (!HONOR_NANS (arg))
9691 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9692
9693 if (TREE_CODE (arg) == REAL_CST)
9694 {
9695 r = TREE_REAL_CST (arg);
9696 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9697 }
9698
9699 arg = builtin_save_expr (arg);
9700 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9701
9702 default:
9703 gcc_unreachable ();
9704 }
9705 }
9706
9707 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9708 This builtin will generate code to return the appropriate floating
9709 point classification depending on the value of the floating point
9710 number passed in. The possible return values must be supplied as
9711 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9712 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9713 one floating point argument which is "type generic". */
9714
9715 static tree
9716 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9717 {
9718 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9719 arg, type, res, tmp;
9720 machine_mode mode;
9721 REAL_VALUE_TYPE r;
9722 char buf[128];
9723
9724 /* Verify the required arguments in the original call. */
9725 if (nargs != 6
9726 || !validate_arg (args[0], INTEGER_TYPE)
9727 || !validate_arg (args[1], INTEGER_TYPE)
9728 || !validate_arg (args[2], INTEGER_TYPE)
9729 || !validate_arg (args[3], INTEGER_TYPE)
9730 || !validate_arg (args[4], INTEGER_TYPE)
9731 || !validate_arg (args[5], REAL_TYPE))
9732 return NULL_TREE;
9733
9734 fp_nan = args[0];
9735 fp_infinite = args[1];
9736 fp_normal = args[2];
9737 fp_subnormal = args[3];
9738 fp_zero = args[4];
9739 arg = args[5];
9740 type = TREE_TYPE (arg);
9741 mode = TYPE_MODE (type);
9742 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9743
9744 /* fpclassify(x) ->
9745 isnan(x) ? FP_NAN :
9746 (fabs(x) == Inf ? FP_INFINITE :
9747 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9748 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9749
9750 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9751 build_real (type, dconst0));
9752 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9753 tmp, fp_zero, fp_subnormal);
9754
9755 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9756 real_from_string (&r, buf);
9757 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9758 arg, build_real (type, r));
9759 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9760
9761 if (HONOR_INFINITIES (mode))
9762 {
9763 real_inf (&r);
9764 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9765 build_real (type, r));
9766 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9767 fp_infinite, res);
9768 }
9769
9770 if (HONOR_NANS (mode))
9771 {
9772 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9773 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9774 }
9775
9776 return res;
9777 }
9778
9779 /* Fold a call to an unordered comparison function such as
9780 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9781 being called and ARG0 and ARG1 are the arguments for the call.
9782 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9783 the opposite of the desired result. UNORDERED_CODE is used
9784 for modes that can hold NaNs and ORDERED_CODE is used for
9785 the rest. */
9786
9787 static tree
9788 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9789 enum tree_code unordered_code,
9790 enum tree_code ordered_code)
9791 {
9792 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9793 enum tree_code code;
9794 tree type0, type1;
9795 enum tree_code code0, code1;
9796 tree cmp_type = NULL_TREE;
9797
9798 type0 = TREE_TYPE (arg0);
9799 type1 = TREE_TYPE (arg1);
9800
9801 code0 = TREE_CODE (type0);
9802 code1 = TREE_CODE (type1);
9803
9804 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9805 /* Choose the wider of two real types. */
9806 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9807 ? type0 : type1;
9808 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9809 cmp_type = type0;
9810 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9811 cmp_type = type1;
9812
9813 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9814 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9815
9816 if (unordered_code == UNORDERED_EXPR)
9817 {
9818 if (!HONOR_NANS (arg0))
9819 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9820 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9821 }
9822
9823 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9824 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9825 fold_build2_loc (loc, code, type, arg0, arg1));
9826 }
9827
9828 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9829 arithmetics if it can never overflow, or into internal functions that
9830 return both result of arithmetics and overflowed boolean flag in
9831 a complex integer result, or some other check for overflow. */
9832
9833 static tree
9834 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9835 tree arg0, tree arg1, tree arg2)
9836 {
9837 enum internal_fn ifn = IFN_LAST;
9838 tree type = TREE_TYPE (TREE_TYPE (arg2));
9839 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9840 switch (fcode)
9841 {
9842 case BUILT_IN_ADD_OVERFLOW:
9843 case BUILT_IN_SADD_OVERFLOW:
9844 case BUILT_IN_SADDL_OVERFLOW:
9845 case BUILT_IN_SADDLL_OVERFLOW:
9846 case BUILT_IN_UADD_OVERFLOW:
9847 case BUILT_IN_UADDL_OVERFLOW:
9848 case BUILT_IN_UADDLL_OVERFLOW:
9849 ifn = IFN_ADD_OVERFLOW;
9850 break;
9851 case BUILT_IN_SUB_OVERFLOW:
9852 case BUILT_IN_SSUB_OVERFLOW:
9853 case BUILT_IN_SSUBL_OVERFLOW:
9854 case BUILT_IN_SSUBLL_OVERFLOW:
9855 case BUILT_IN_USUB_OVERFLOW:
9856 case BUILT_IN_USUBL_OVERFLOW:
9857 case BUILT_IN_USUBLL_OVERFLOW:
9858 ifn = IFN_SUB_OVERFLOW;
9859 break;
9860 case BUILT_IN_MUL_OVERFLOW:
9861 case BUILT_IN_SMUL_OVERFLOW:
9862 case BUILT_IN_SMULL_OVERFLOW:
9863 case BUILT_IN_SMULLL_OVERFLOW:
9864 case BUILT_IN_UMUL_OVERFLOW:
9865 case BUILT_IN_UMULL_OVERFLOW:
9866 case BUILT_IN_UMULLL_OVERFLOW:
9867 ifn = IFN_MUL_OVERFLOW;
9868 break;
9869 default:
9870 gcc_unreachable ();
9871 }
9872 tree ctype = build_complex_type (type);
9873 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9874 2, arg0, arg1);
9875 tree tgt = save_expr (call);
9876 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9877 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9878 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9879 tree store
9880 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9881 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9882 }
9883
9884 /* Fold a call to built-in function FNDECL with 0 arguments.
9885 This function returns NULL_TREE if no simplification was possible. */
9886
9887 static tree
9888 fold_builtin_0 (location_t loc, tree fndecl)
9889 {
9890 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9891 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9892 switch (fcode)
9893 {
9894 CASE_FLT_FN (BUILT_IN_INF):
9895 case BUILT_IN_INFD32:
9896 case BUILT_IN_INFD64:
9897 case BUILT_IN_INFD128:
9898 return fold_builtin_inf (loc, type, true);
9899
9900 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9901 return fold_builtin_inf (loc, type, false);
9902
9903 case BUILT_IN_CLASSIFY_TYPE:
9904 return fold_builtin_classify_type (NULL_TREE);
9905
9906 default:
9907 break;
9908 }
9909 return NULL_TREE;
9910 }
9911
9912 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9913 This function returns NULL_TREE if no simplification was possible. */
9914
9915 static tree
9916 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9917 {
9918 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9919 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9920 switch (fcode)
9921 {
9922 case BUILT_IN_CONSTANT_P:
9923 {
9924 tree val = fold_builtin_constant_p (arg0);
9925
9926 /* Gimplification will pull the CALL_EXPR for the builtin out of
9927 an if condition. When not optimizing, we'll not CSE it back.
9928 To avoid link error types of regressions, return false now. */
9929 if (!val && !optimize)
9930 val = integer_zero_node;
9931
9932 return val;
9933 }
9934
9935 case BUILT_IN_CLASSIFY_TYPE:
9936 return fold_builtin_classify_type (arg0);
9937
9938 case BUILT_IN_STRLEN:
9939 return fold_builtin_strlen (loc, type, arg0);
9940
9941 CASE_FLT_FN (BUILT_IN_FABS):
9942 case BUILT_IN_FABSD32:
9943 case BUILT_IN_FABSD64:
9944 case BUILT_IN_FABSD128:
9945 return fold_builtin_fabs (loc, arg0, type);
9946
9947 case BUILT_IN_ABS:
9948 case BUILT_IN_LABS:
9949 case BUILT_IN_LLABS:
9950 case BUILT_IN_IMAXABS:
9951 return fold_builtin_abs (loc, arg0, type);
9952
9953 CASE_FLT_FN (BUILT_IN_CONJ):
9954 if (validate_arg (arg0, COMPLEX_TYPE)
9955 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9956 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9957 break;
9958
9959 CASE_FLT_FN (BUILT_IN_CREAL):
9960 if (validate_arg (arg0, COMPLEX_TYPE)
9961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9962 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9963 break;
9964
9965 CASE_FLT_FN (BUILT_IN_CIMAG):
9966 if (validate_arg (arg0, COMPLEX_TYPE)
9967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9968 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9969 break;
9970
9971 CASE_FLT_FN (BUILT_IN_CCOS):
9972 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9973
9974 CASE_FLT_FN (BUILT_IN_CCOSH):
9975 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9976
9977 CASE_FLT_FN (BUILT_IN_CPROJ):
9978 return fold_builtin_cproj (loc, arg0, type);
9979
9980 CASE_FLT_FN (BUILT_IN_CSIN):
9981 if (validate_arg (arg0, COMPLEX_TYPE)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9983 return do_mpc_arg1 (arg0, type, mpc_sin);
9984 break;
9985
9986 CASE_FLT_FN (BUILT_IN_CSINH):
9987 if (validate_arg (arg0, COMPLEX_TYPE)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9989 return do_mpc_arg1 (arg0, type, mpc_sinh);
9990 break;
9991
9992 CASE_FLT_FN (BUILT_IN_CTAN):
9993 if (validate_arg (arg0, COMPLEX_TYPE)
9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9995 return do_mpc_arg1 (arg0, type, mpc_tan);
9996 break;
9997
9998 CASE_FLT_FN (BUILT_IN_CTANH):
9999 if (validate_arg (arg0, COMPLEX_TYPE)
10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10001 return do_mpc_arg1 (arg0, type, mpc_tanh);
10002 break;
10003
10004 CASE_FLT_FN (BUILT_IN_CLOG):
10005 if (validate_arg (arg0, COMPLEX_TYPE)
10006 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10007 return do_mpc_arg1 (arg0, type, mpc_log);
10008 break;
10009
10010 CASE_FLT_FN (BUILT_IN_CSQRT):
10011 if (validate_arg (arg0, COMPLEX_TYPE)
10012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10013 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10014 break;
10015
10016 CASE_FLT_FN (BUILT_IN_CASIN):
10017 if (validate_arg (arg0, COMPLEX_TYPE)
10018 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10019 return do_mpc_arg1 (arg0, type, mpc_asin);
10020 break;
10021
10022 CASE_FLT_FN (BUILT_IN_CACOS):
10023 if (validate_arg (arg0, COMPLEX_TYPE)
10024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10025 return do_mpc_arg1 (arg0, type, mpc_acos);
10026 break;
10027
10028 CASE_FLT_FN (BUILT_IN_CATAN):
10029 if (validate_arg (arg0, COMPLEX_TYPE)
10030 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10031 return do_mpc_arg1 (arg0, type, mpc_atan);
10032 break;
10033
10034 CASE_FLT_FN (BUILT_IN_CASINH):
10035 if (validate_arg (arg0, COMPLEX_TYPE)
10036 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10037 return do_mpc_arg1 (arg0, type, mpc_asinh);
10038 break;
10039
10040 CASE_FLT_FN (BUILT_IN_CACOSH):
10041 if (validate_arg (arg0, COMPLEX_TYPE)
10042 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10043 return do_mpc_arg1 (arg0, type, mpc_acosh);
10044 break;
10045
10046 CASE_FLT_FN (BUILT_IN_CATANH):
10047 if (validate_arg (arg0, COMPLEX_TYPE)
10048 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10049 return do_mpc_arg1 (arg0, type, mpc_atanh);
10050 break;
10051
10052 CASE_FLT_FN (BUILT_IN_CABS):
10053 return fold_builtin_cabs (loc, arg0, type, fndecl);
10054
10055 CASE_FLT_FN (BUILT_IN_CARG):
10056 return fold_builtin_carg (loc, arg0, type);
10057
10058 CASE_FLT_FN (BUILT_IN_SQRT):
10059 return fold_builtin_sqrt (loc, arg0, type);
10060
10061 CASE_FLT_FN (BUILT_IN_CBRT):
10062 return fold_builtin_cbrt (loc, arg0, type);
10063
10064 CASE_FLT_FN (BUILT_IN_ASIN):
10065 if (validate_arg (arg0, REAL_TYPE))
10066 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10067 &dconstm1, &dconst1, true);
10068 break;
10069
10070 CASE_FLT_FN (BUILT_IN_ACOS):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10073 &dconstm1, &dconst1, true);
10074 break;
10075
10076 CASE_FLT_FN (BUILT_IN_ATAN):
10077 if (validate_arg (arg0, REAL_TYPE))
10078 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10079 break;
10080
10081 CASE_FLT_FN (BUILT_IN_ASINH):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10084 break;
10085
10086 CASE_FLT_FN (BUILT_IN_ACOSH):
10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10089 &dconst1, NULL, true);
10090 break;
10091
10092 CASE_FLT_FN (BUILT_IN_ATANH):
10093 if (validate_arg (arg0, REAL_TYPE))
10094 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10095 &dconstm1, &dconst1, false);
10096 break;
10097
10098 CASE_FLT_FN (BUILT_IN_SIN):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10101 break;
10102
10103 CASE_FLT_FN (BUILT_IN_COS):
10104 return fold_builtin_cos (loc, arg0, type, fndecl);
10105
10106 CASE_FLT_FN (BUILT_IN_TAN):
10107 return fold_builtin_tan (arg0, type);
10108
10109 CASE_FLT_FN (BUILT_IN_CEXP):
10110 return fold_builtin_cexp (loc, arg0, type);
10111
10112 CASE_FLT_FN (BUILT_IN_CEXPI):
10113 if (validate_arg (arg0, REAL_TYPE))
10114 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10115 break;
10116
10117 CASE_FLT_FN (BUILT_IN_SINH):
10118 if (validate_arg (arg0, REAL_TYPE))
10119 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10120 break;
10121
10122 CASE_FLT_FN (BUILT_IN_COSH):
10123 return fold_builtin_cosh (loc, arg0, type, fndecl);
10124
10125 CASE_FLT_FN (BUILT_IN_TANH):
10126 if (validate_arg (arg0, REAL_TYPE))
10127 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10128 break;
10129
10130 CASE_FLT_FN (BUILT_IN_ERF):
10131 if (validate_arg (arg0, REAL_TYPE))
10132 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10133 break;
10134
10135 CASE_FLT_FN (BUILT_IN_ERFC):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10138 break;
10139
10140 CASE_FLT_FN (BUILT_IN_TGAMMA):
10141 if (validate_arg (arg0, REAL_TYPE))
10142 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10143 break;
10144
10145 CASE_FLT_FN (BUILT_IN_EXP):
10146 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10147
10148 CASE_FLT_FN (BUILT_IN_EXP2):
10149 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10150
10151 CASE_FLT_FN (BUILT_IN_EXP10):
10152 CASE_FLT_FN (BUILT_IN_POW10):
10153 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10154
10155 CASE_FLT_FN (BUILT_IN_EXPM1):
10156 if (validate_arg (arg0, REAL_TYPE))
10157 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10158 break;
10159
10160 CASE_FLT_FN (BUILT_IN_LOG):
10161 if (validate_arg (arg0, REAL_TYPE))
10162 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10163 break;
10164
10165 CASE_FLT_FN (BUILT_IN_LOG2):
10166 if (validate_arg (arg0, REAL_TYPE))
10167 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10168 break;
10169
10170 CASE_FLT_FN (BUILT_IN_LOG10):
10171 if (validate_arg (arg0, REAL_TYPE))
10172 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10173 break;
10174
10175 CASE_FLT_FN (BUILT_IN_LOG1P):
10176 if (validate_arg (arg0, REAL_TYPE))
10177 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10178 &dconstm1, NULL, false);
10179 break;
10180
10181 CASE_FLT_FN (BUILT_IN_J0):
10182 if (validate_arg (arg0, REAL_TYPE))
10183 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10184 NULL, NULL, 0);
10185 break;
10186
10187 CASE_FLT_FN (BUILT_IN_J1):
10188 if (validate_arg (arg0, REAL_TYPE))
10189 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10190 NULL, NULL, 0);
10191 break;
10192
10193 CASE_FLT_FN (BUILT_IN_Y0):
10194 if (validate_arg (arg0, REAL_TYPE))
10195 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10196 &dconst0, NULL, false);
10197 break;
10198
10199 CASE_FLT_FN (BUILT_IN_Y1):
10200 if (validate_arg (arg0, REAL_TYPE))
10201 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10202 &dconst0, NULL, false);
10203 break;
10204
10205 CASE_FLT_FN (BUILT_IN_NAN):
10206 case BUILT_IN_NAND32:
10207 case BUILT_IN_NAND64:
10208 case BUILT_IN_NAND128:
10209 return fold_builtin_nan (arg0, type, true);
10210
10211 CASE_FLT_FN (BUILT_IN_NANS):
10212 return fold_builtin_nan (arg0, type, false);
10213
10214 CASE_FLT_FN (BUILT_IN_FLOOR):
10215 return fold_builtin_floor (loc, fndecl, arg0);
10216
10217 CASE_FLT_FN (BUILT_IN_CEIL):
10218 return fold_builtin_ceil (loc, fndecl, arg0);
10219
10220 CASE_FLT_FN (BUILT_IN_TRUNC):
10221 return fold_builtin_trunc (loc, fndecl, arg0);
10222
10223 CASE_FLT_FN (BUILT_IN_ROUND):
10224 return fold_builtin_round (loc, fndecl, arg0);
10225
10226 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10227 CASE_FLT_FN (BUILT_IN_RINT):
10228 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10229
10230 CASE_FLT_FN (BUILT_IN_ICEIL):
10231 CASE_FLT_FN (BUILT_IN_LCEIL):
10232 CASE_FLT_FN (BUILT_IN_LLCEIL):
10233 CASE_FLT_FN (BUILT_IN_LFLOOR):
10234 CASE_FLT_FN (BUILT_IN_IFLOOR):
10235 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10236 CASE_FLT_FN (BUILT_IN_IROUND):
10237 CASE_FLT_FN (BUILT_IN_LROUND):
10238 CASE_FLT_FN (BUILT_IN_LLROUND):
10239 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10240
10241 CASE_FLT_FN (BUILT_IN_IRINT):
10242 CASE_FLT_FN (BUILT_IN_LRINT):
10243 CASE_FLT_FN (BUILT_IN_LLRINT):
10244 return fold_fixed_mathfn (loc, fndecl, arg0);
10245
10246 case BUILT_IN_BSWAP16:
10247 case BUILT_IN_BSWAP32:
10248 case BUILT_IN_BSWAP64:
10249 return fold_builtin_bswap (fndecl, arg0);
10250
10251 CASE_INT_FN (BUILT_IN_FFS):
10252 CASE_INT_FN (BUILT_IN_CLZ):
10253 CASE_INT_FN (BUILT_IN_CTZ):
10254 CASE_INT_FN (BUILT_IN_CLRSB):
10255 CASE_INT_FN (BUILT_IN_POPCOUNT):
10256 CASE_INT_FN (BUILT_IN_PARITY):
10257 return fold_builtin_bitop (fndecl, arg0);
10258
10259 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10260 return fold_builtin_signbit (loc, arg0, type);
10261
10262 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10263 return fold_builtin_significand (loc, arg0, type);
10264
10265 CASE_FLT_FN (BUILT_IN_ILOGB):
10266 CASE_FLT_FN (BUILT_IN_LOGB):
10267 return fold_builtin_logb (loc, arg0, type);
10268
10269 case BUILT_IN_ISASCII:
10270 return fold_builtin_isascii (loc, arg0);
10271
10272 case BUILT_IN_TOASCII:
10273 return fold_builtin_toascii (loc, arg0);
10274
10275 case BUILT_IN_ISDIGIT:
10276 return fold_builtin_isdigit (loc, arg0);
10277
10278 CASE_FLT_FN (BUILT_IN_FINITE):
10279 case BUILT_IN_FINITED32:
10280 case BUILT_IN_FINITED64:
10281 case BUILT_IN_FINITED128:
10282 case BUILT_IN_ISFINITE:
10283 {
10284 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10285 if (ret)
10286 return ret;
10287 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10288 }
10289
10290 CASE_FLT_FN (BUILT_IN_ISINF):
10291 case BUILT_IN_ISINFD32:
10292 case BUILT_IN_ISINFD64:
10293 case BUILT_IN_ISINFD128:
10294 {
10295 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10296 if (ret)
10297 return ret;
10298 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10299 }
10300
10301 case BUILT_IN_ISNORMAL:
10302 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10303
10304 case BUILT_IN_ISINF_SIGN:
10305 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10306
10307 CASE_FLT_FN (BUILT_IN_ISNAN):
10308 case BUILT_IN_ISNAND32:
10309 case BUILT_IN_ISNAND64:
10310 case BUILT_IN_ISNAND128:
10311 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10312
10313 case BUILT_IN_FREE:
10314 if (integer_zerop (arg0))
10315 return build_empty_stmt (loc);
10316 break;
10317
10318 default:
10319 break;
10320 }
10321
10322 return NULL_TREE;
10323
10324 }
10325
10326 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10327 This function returns NULL_TREE if no simplification was possible. */
10328
10329 static tree
10330 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10331 {
10332 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10333 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10334
10335 switch (fcode)
10336 {
10337 CASE_FLT_FN (BUILT_IN_JN):
10338 if (validate_arg (arg0, INTEGER_TYPE)
10339 && validate_arg (arg1, REAL_TYPE))
10340 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10341 break;
10342
10343 CASE_FLT_FN (BUILT_IN_YN):
10344 if (validate_arg (arg0, INTEGER_TYPE)
10345 && validate_arg (arg1, REAL_TYPE))
10346 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10347 &dconst0, false);
10348 break;
10349
10350 CASE_FLT_FN (BUILT_IN_DREM):
10351 CASE_FLT_FN (BUILT_IN_REMAINDER):
10352 if (validate_arg (arg0, REAL_TYPE)
10353 && validate_arg (arg1, REAL_TYPE))
10354 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10355 break;
10356
10357 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10358 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10359 if (validate_arg (arg0, REAL_TYPE)
10360 && validate_arg (arg1, POINTER_TYPE))
10361 return do_mpfr_lgamma_r (arg0, arg1, type);
10362 break;
10363
10364 CASE_FLT_FN (BUILT_IN_ATAN2):
10365 if (validate_arg (arg0, REAL_TYPE)
10366 && validate_arg (arg1, REAL_TYPE))
10367 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10368 break;
10369
10370 CASE_FLT_FN (BUILT_IN_FDIM):
10371 if (validate_arg (arg0, REAL_TYPE)
10372 && validate_arg (arg1, REAL_TYPE))
10373 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10374 break;
10375
10376 CASE_FLT_FN (BUILT_IN_HYPOT):
10377 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10378
10379 CASE_FLT_FN (BUILT_IN_CPOW):
10380 if (validate_arg (arg0, COMPLEX_TYPE)
10381 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10382 && validate_arg (arg1, COMPLEX_TYPE)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10384 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10385 break;
10386
10387 CASE_FLT_FN (BUILT_IN_LDEXP):
10388 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10389 CASE_FLT_FN (BUILT_IN_SCALBN):
10390 CASE_FLT_FN (BUILT_IN_SCALBLN):
10391 return fold_builtin_load_exponent (loc, arg0, arg1,
10392 type, /*ldexp=*/false);
10393
10394 CASE_FLT_FN (BUILT_IN_FREXP):
10395 return fold_builtin_frexp (loc, arg0, arg1, type);
10396
10397 CASE_FLT_FN (BUILT_IN_MODF):
10398 return fold_builtin_modf (loc, arg0, arg1, type);
10399
10400 case BUILT_IN_STRSTR:
10401 return fold_builtin_strstr (loc, arg0, arg1, type);
10402
10403 case BUILT_IN_STRSPN:
10404 return fold_builtin_strspn (loc, arg0, arg1);
10405
10406 case BUILT_IN_STRCSPN:
10407 return fold_builtin_strcspn (loc, arg0, arg1);
10408
10409 case BUILT_IN_STRCHR:
10410 case BUILT_IN_INDEX:
10411 return fold_builtin_strchr (loc, arg0, arg1, type);
10412
10413 case BUILT_IN_STRRCHR:
10414 case BUILT_IN_RINDEX:
10415 return fold_builtin_strrchr (loc, arg0, arg1, type);
10416
10417 case BUILT_IN_STRCMP:
10418 return fold_builtin_strcmp (loc, arg0, arg1);
10419
10420 case BUILT_IN_STRPBRK:
10421 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10422
10423 case BUILT_IN_EXPECT:
10424 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10425
10426 CASE_FLT_FN (BUILT_IN_POW):
10427 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10428
10429 CASE_FLT_FN (BUILT_IN_POWI):
10430 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10431
10432 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10433 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10434
10435 CASE_FLT_FN (BUILT_IN_FMIN):
10436 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10437
10438 CASE_FLT_FN (BUILT_IN_FMAX):
10439 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10440
10441 case BUILT_IN_ISGREATER:
10442 return fold_builtin_unordered_cmp (loc, fndecl,
10443 arg0, arg1, UNLE_EXPR, LE_EXPR);
10444 case BUILT_IN_ISGREATEREQUAL:
10445 return fold_builtin_unordered_cmp (loc, fndecl,
10446 arg0, arg1, UNLT_EXPR, LT_EXPR);
10447 case BUILT_IN_ISLESS:
10448 return fold_builtin_unordered_cmp (loc, fndecl,
10449 arg0, arg1, UNGE_EXPR, GE_EXPR);
10450 case BUILT_IN_ISLESSEQUAL:
10451 return fold_builtin_unordered_cmp (loc, fndecl,
10452 arg0, arg1, UNGT_EXPR, GT_EXPR);
10453 case BUILT_IN_ISLESSGREATER:
10454 return fold_builtin_unordered_cmp (loc, fndecl,
10455 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10456 case BUILT_IN_ISUNORDERED:
10457 return fold_builtin_unordered_cmp (loc, fndecl,
10458 arg0, arg1, UNORDERED_EXPR,
10459 NOP_EXPR);
10460
10461 /* We do the folding for va_start in the expander. */
10462 case BUILT_IN_VA_START:
10463 break;
10464
10465 case BUILT_IN_OBJECT_SIZE:
10466 return fold_builtin_object_size (arg0, arg1);
10467
10468 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10469 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10470
10471 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10472 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10473
10474 default:
10475 break;
10476 }
10477 return NULL_TREE;
10478 }
10479
10480 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10481 and ARG2.
10482 This function returns NULL_TREE if no simplification was possible. */
10483
10484 static tree
10485 fold_builtin_3 (location_t loc, tree fndecl,
10486 tree arg0, tree arg1, tree arg2)
10487 {
10488 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10489 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10490 switch (fcode)
10491 {
10492
10493 CASE_FLT_FN (BUILT_IN_SINCOS):
10494 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10495
10496 CASE_FLT_FN (BUILT_IN_FMA):
10497 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10498 break;
10499
10500 CASE_FLT_FN (BUILT_IN_REMQUO):
10501 if (validate_arg (arg0, REAL_TYPE)
10502 && validate_arg (arg1, REAL_TYPE)
10503 && validate_arg (arg2, POINTER_TYPE))
10504 return do_mpfr_remquo (arg0, arg1, arg2);
10505 break;
10506
10507 case BUILT_IN_STRNCMP:
10508 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10509
10510 case BUILT_IN_MEMCHR:
10511 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10512
10513 case BUILT_IN_BCMP:
10514 case BUILT_IN_MEMCMP:
10515 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10516
10517 case BUILT_IN_EXPECT:
10518 return fold_builtin_expect (loc, arg0, arg1, arg2);
10519
10520 case BUILT_IN_ADD_OVERFLOW:
10521 case BUILT_IN_SUB_OVERFLOW:
10522 case BUILT_IN_MUL_OVERFLOW:
10523 case BUILT_IN_SADD_OVERFLOW:
10524 case BUILT_IN_SADDL_OVERFLOW:
10525 case BUILT_IN_SADDLL_OVERFLOW:
10526 case BUILT_IN_SSUB_OVERFLOW:
10527 case BUILT_IN_SSUBL_OVERFLOW:
10528 case BUILT_IN_SSUBLL_OVERFLOW:
10529 case BUILT_IN_SMUL_OVERFLOW:
10530 case BUILT_IN_SMULL_OVERFLOW:
10531 case BUILT_IN_SMULLL_OVERFLOW:
10532 case BUILT_IN_UADD_OVERFLOW:
10533 case BUILT_IN_UADDL_OVERFLOW:
10534 case BUILT_IN_UADDLL_OVERFLOW:
10535 case BUILT_IN_USUB_OVERFLOW:
10536 case BUILT_IN_USUBL_OVERFLOW:
10537 case BUILT_IN_USUBLL_OVERFLOW:
10538 case BUILT_IN_UMUL_OVERFLOW:
10539 case BUILT_IN_UMULL_OVERFLOW:
10540 case BUILT_IN_UMULLL_OVERFLOW:
10541 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10542
10543 default:
10544 break;
10545 }
10546 return NULL_TREE;
10547 }
10548
10549 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10550 arguments. IGNORE is true if the result of the
10551 function call is ignored. This function returns NULL_TREE if no
10552 simplification was possible. */
10553
10554 tree
10555 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10556 {
10557 tree ret = NULL_TREE;
10558
10559 switch (nargs)
10560 {
10561 case 0:
10562 ret = fold_builtin_0 (loc, fndecl);
10563 break;
10564 case 1:
10565 ret = fold_builtin_1 (loc, fndecl, args[0]);
10566 break;
10567 case 2:
10568 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10569 break;
10570 case 3:
10571 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10572 break;
10573 default:
10574 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10575 break;
10576 }
10577 if (ret)
10578 {
10579 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10580 SET_EXPR_LOCATION (ret, loc);
10581 TREE_NO_WARNING (ret) = 1;
10582 return ret;
10583 }
10584 return NULL_TREE;
10585 }
10586
10587 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10588 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10589 of arguments in ARGS to be omitted. OLDNARGS is the number of
10590 elements in ARGS. */
10591
10592 static tree
10593 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10594 int skip, tree fndecl, int n, va_list newargs)
10595 {
10596 int nargs = oldnargs - skip + n;
10597 tree *buffer;
10598
10599 if (n > 0)
10600 {
10601 int i, j;
10602
10603 buffer = XALLOCAVEC (tree, nargs);
10604 for (i = 0; i < n; i++)
10605 buffer[i] = va_arg (newargs, tree);
10606 for (j = skip; j < oldnargs; j++, i++)
10607 buffer[i] = args[j];
10608 }
10609 else
10610 buffer = args + skip;
10611
10612 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10613 }
10614
10615 /* Return true if FNDECL shouldn't be folded right now.
10616 If a built-in function has an inline attribute always_inline
10617 wrapper, defer folding it after always_inline functions have
10618 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10619 might not be performed. */
10620
10621 bool
10622 avoid_folding_inline_builtin (tree fndecl)
10623 {
10624 return (DECL_DECLARED_INLINE_P (fndecl)
10625 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10626 && cfun
10627 && !cfun->always_inline_functions_inlined
10628 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10629 }
10630
10631 /* A wrapper function for builtin folding that prevents warnings for
10632 "statement without effect" and the like, caused by removing the
10633 call node earlier than the warning is generated. */
10634
10635 tree
10636 fold_call_expr (location_t loc, tree exp, bool ignore)
10637 {
10638 tree ret = NULL_TREE;
10639 tree fndecl = get_callee_fndecl (exp);
10640 if (fndecl
10641 && TREE_CODE (fndecl) == FUNCTION_DECL
10642 && DECL_BUILT_IN (fndecl)
10643 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10644 yet. Defer folding until we see all the arguments
10645 (after inlining). */
10646 && !CALL_EXPR_VA_ARG_PACK (exp))
10647 {
10648 int nargs = call_expr_nargs (exp);
10649
10650 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10651 instead last argument is __builtin_va_arg_pack (). Defer folding
10652 even in that case, until arguments are finalized. */
10653 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10654 {
10655 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10656 if (fndecl2
10657 && TREE_CODE (fndecl2) == FUNCTION_DECL
10658 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10659 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10660 return NULL_TREE;
10661 }
10662
10663 if (avoid_folding_inline_builtin (fndecl))
10664 return NULL_TREE;
10665
10666 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10667 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10668 CALL_EXPR_ARGP (exp), ignore);
10669 else
10670 {
10671 tree *args = CALL_EXPR_ARGP (exp);
10672 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10673 if (ret)
10674 return ret;
10675 }
10676 }
10677 return NULL_TREE;
10678 }
10679
10680 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10681 N arguments are passed in the array ARGARRAY. Return a folded
10682 expression or NULL_TREE if no simplification was possible. */
10683
10684 tree
10685 fold_builtin_call_array (location_t loc, tree,
10686 tree fn,
10687 int n,
10688 tree *argarray)
10689 {
10690 if (TREE_CODE (fn) != ADDR_EXPR)
10691 return NULL_TREE;
10692
10693 tree fndecl = TREE_OPERAND (fn, 0);
10694 if (TREE_CODE (fndecl) == FUNCTION_DECL
10695 && DECL_BUILT_IN (fndecl))
10696 {
10697 /* If last argument is __builtin_va_arg_pack (), arguments to this
10698 function are not finalized yet. Defer folding until they are. */
10699 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10700 {
10701 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10702 if (fndecl2
10703 && TREE_CODE (fndecl2) == FUNCTION_DECL
10704 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10705 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10706 return NULL_TREE;
10707 }
10708 if (avoid_folding_inline_builtin (fndecl))
10709 return NULL_TREE;
10710 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10711 return targetm.fold_builtin (fndecl, n, argarray, false);
10712 else
10713 return fold_builtin_n (loc, fndecl, argarray, n, false);
10714 }
10715
10716 return NULL_TREE;
10717 }
10718
10719 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10720 along with N new arguments specified as the "..." parameters. SKIP
10721 is the number of arguments in EXP to be omitted. This function is used
10722 to do varargs-to-varargs transformations. */
10723
10724 static tree
10725 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10726 {
10727 va_list ap;
10728 tree t;
10729
10730 va_start (ap, n);
10731 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10732 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10733 va_end (ap);
10734
10735 return t;
10736 }
10737
10738 /* Validate a single argument ARG against a tree code CODE representing
10739 a type. */
10740
10741 static bool
10742 validate_arg (const_tree arg, enum tree_code code)
10743 {
10744 if (!arg)
10745 return false;
10746 else if (code == POINTER_TYPE)
10747 return POINTER_TYPE_P (TREE_TYPE (arg));
10748 else if (code == INTEGER_TYPE)
10749 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10750 return code == TREE_CODE (TREE_TYPE (arg));
10751 }
10752
10753 /* This function validates the types of a function call argument list
10754 against a specified list of tree_codes. If the last specifier is a 0,
10755 that represents an ellipses, otherwise the last specifier must be a
10756 VOID_TYPE.
10757
10758 This is the GIMPLE version of validate_arglist. Eventually we want to
10759 completely convert builtins.c to work from GIMPLEs and the tree based
10760 validate_arglist will then be removed. */
10761
10762 bool
10763 validate_gimple_arglist (const gcall *call, ...)
10764 {
10765 enum tree_code code;
10766 bool res = 0;
10767 va_list ap;
10768 const_tree arg;
10769 size_t i;
10770
10771 va_start (ap, call);
10772 i = 0;
10773
10774 do
10775 {
10776 code = (enum tree_code) va_arg (ap, int);
10777 switch (code)
10778 {
10779 case 0:
10780 /* This signifies an ellipses, any further arguments are all ok. */
10781 res = true;
10782 goto end;
10783 case VOID_TYPE:
10784 /* This signifies an endlink, if no arguments remain, return
10785 true, otherwise return false. */
10786 res = (i == gimple_call_num_args (call));
10787 goto end;
10788 default:
10789 /* If no parameters remain or the parameter's code does not
10790 match the specified code, return false. Otherwise continue
10791 checking any remaining arguments. */
10792 arg = gimple_call_arg (call, i++);
10793 if (!validate_arg (arg, code))
10794 goto end;
10795 break;
10796 }
10797 }
10798 while (1);
10799
10800 /* We need gotos here since we can only have one VA_CLOSE in a
10801 function. */
10802 end: ;
10803 va_end (ap);
10804
10805 return res;
10806 }
10807
10808 /* Default target-specific builtin expander that does nothing. */
10809
10810 rtx
10811 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10812 rtx target ATTRIBUTE_UNUSED,
10813 rtx subtarget ATTRIBUTE_UNUSED,
10814 machine_mode mode ATTRIBUTE_UNUSED,
10815 int ignore ATTRIBUTE_UNUSED)
10816 {
10817 return NULL_RTX;
10818 }
10819
10820 /* Returns true is EXP represents data that would potentially reside
10821 in a readonly section. */
10822
10823 bool
10824 readonly_data_expr (tree exp)
10825 {
10826 STRIP_NOPS (exp);
10827
10828 if (TREE_CODE (exp) != ADDR_EXPR)
10829 return false;
10830
10831 exp = get_base_address (TREE_OPERAND (exp, 0));
10832 if (!exp)
10833 return false;
10834
10835 /* Make sure we call decl_readonly_section only for trees it
10836 can handle (since it returns true for everything it doesn't
10837 understand). */
10838 if (TREE_CODE (exp) == STRING_CST
10839 || TREE_CODE (exp) == CONSTRUCTOR
10840 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10841 return decl_readonly_section (exp, 0);
10842 else
10843 return false;
10844 }
10845
10846 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10847 to the call, and TYPE is its return type.
10848
10849 Return NULL_TREE if no simplification was possible, otherwise return the
10850 simplified form of the call as a tree.
10851
10852 The simplified form may be a constant or other expression which
10853 computes the same value, but in a more efficient manner (including
10854 calls to other builtin functions).
10855
10856 The call may contain arguments which need to be evaluated, but
10857 which are not useful to determine the result of the call. In
10858 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10859 COMPOUND_EXPR will be an argument which must be evaluated.
10860 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10861 COMPOUND_EXPR in the chain will contain the tree for the simplified
10862 form of the builtin function call. */
10863
10864 static tree
10865 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10866 {
10867 if (!validate_arg (s1, POINTER_TYPE)
10868 || !validate_arg (s2, POINTER_TYPE))
10869 return NULL_TREE;
10870 else
10871 {
10872 tree fn;
10873 const char *p1, *p2;
10874
10875 p2 = c_getstr (s2);
10876 if (p2 == NULL)
10877 return NULL_TREE;
10878
10879 p1 = c_getstr (s1);
10880 if (p1 != NULL)
10881 {
10882 const char *r = strstr (p1, p2);
10883 tree tem;
10884
10885 if (r == NULL)
10886 return build_int_cst (TREE_TYPE (s1), 0);
10887
10888 /* Return an offset into the constant string argument. */
10889 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10890 return fold_convert_loc (loc, type, tem);
10891 }
10892
10893 /* The argument is const char *, and the result is char *, so we need
10894 a type conversion here to avoid a warning. */
10895 if (p2[0] == '\0')
10896 return fold_convert_loc (loc, type, s1);
10897
10898 if (p2[1] != '\0')
10899 return NULL_TREE;
10900
10901 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10902 if (!fn)
10903 return NULL_TREE;
10904
10905 /* New argument list transforming strstr(s1, s2) to
10906 strchr(s1, s2[0]). */
10907 return build_call_expr_loc (loc, fn, 2, s1,
10908 build_int_cst (integer_type_node, p2[0]));
10909 }
10910 }
10911
10912 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10913 the call, and TYPE is its return type.
10914
10915 Return NULL_TREE if no simplification was possible, otherwise return the
10916 simplified form of the call as a tree.
10917
10918 The simplified form may be a constant or other expression which
10919 computes the same value, but in a more efficient manner (including
10920 calls to other builtin functions).
10921
10922 The call may contain arguments which need to be evaluated, but
10923 which are not useful to determine the result of the call. In
10924 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10925 COMPOUND_EXPR will be an argument which must be evaluated.
10926 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10927 COMPOUND_EXPR in the chain will contain the tree for the simplified
10928 form of the builtin function call. */
10929
10930 static tree
10931 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10932 {
10933 if (!validate_arg (s1, POINTER_TYPE)
10934 || !validate_arg (s2, INTEGER_TYPE))
10935 return NULL_TREE;
10936 else
10937 {
10938 const char *p1;
10939
10940 if (TREE_CODE (s2) != INTEGER_CST)
10941 return NULL_TREE;
10942
10943 p1 = c_getstr (s1);
10944 if (p1 != NULL)
10945 {
10946 char c;
10947 const char *r;
10948 tree tem;
10949
10950 if (target_char_cast (s2, &c))
10951 return NULL_TREE;
10952
10953 r = strchr (p1, c);
10954
10955 if (r == NULL)
10956 return build_int_cst (TREE_TYPE (s1), 0);
10957
10958 /* Return an offset into the constant string argument. */
10959 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10960 return fold_convert_loc (loc, type, tem);
10961 }
10962 return NULL_TREE;
10963 }
10964 }
10965
10966 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10967 the call, and TYPE is its return type.
10968
10969 Return NULL_TREE if no simplification was possible, otherwise return the
10970 simplified form of the call as a tree.
10971
10972 The simplified form may be a constant or other expression which
10973 computes the same value, but in a more efficient manner (including
10974 calls to other builtin functions).
10975
10976 The call may contain arguments which need to be evaluated, but
10977 which are not useful to determine the result of the call. In
10978 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10979 COMPOUND_EXPR will be an argument which must be evaluated.
10980 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10981 COMPOUND_EXPR in the chain will contain the tree for the simplified
10982 form of the builtin function call. */
10983
10984 static tree
10985 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10986 {
10987 if (!validate_arg (s1, POINTER_TYPE)
10988 || !validate_arg (s2, INTEGER_TYPE))
10989 return NULL_TREE;
10990 else
10991 {
10992 tree fn;
10993 const char *p1;
10994
10995 if (TREE_CODE (s2) != INTEGER_CST)
10996 return NULL_TREE;
10997
10998 p1 = c_getstr (s1);
10999 if (p1 != NULL)
11000 {
11001 char c;
11002 const char *r;
11003 tree tem;
11004
11005 if (target_char_cast (s2, &c))
11006 return NULL_TREE;
11007
11008 r = strrchr (p1, c);
11009
11010 if (r == NULL)
11011 return build_int_cst (TREE_TYPE (s1), 0);
11012
11013 /* Return an offset into the constant string argument. */
11014 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11015 return fold_convert_loc (loc, type, tem);
11016 }
11017
11018 if (! integer_zerop (s2))
11019 return NULL_TREE;
11020
11021 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11022 if (!fn)
11023 return NULL_TREE;
11024
11025 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11026 return build_call_expr_loc (loc, fn, 2, s1, s2);
11027 }
11028 }
11029
11030 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11031 to the call, and TYPE is its return type.
11032
11033 Return NULL_TREE if no simplification was possible, otherwise return the
11034 simplified form of the call as a tree.
11035
11036 The simplified form may be a constant or other expression which
11037 computes the same value, but in a more efficient manner (including
11038 calls to other builtin functions).
11039
11040 The call may contain arguments which need to be evaluated, but
11041 which are not useful to determine the result of the call. In
11042 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11043 COMPOUND_EXPR will be an argument which must be evaluated.
11044 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11045 COMPOUND_EXPR in the chain will contain the tree for the simplified
11046 form of the builtin function call. */
11047
11048 static tree
11049 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11050 {
11051 if (!validate_arg (s1, POINTER_TYPE)
11052 || !validate_arg (s2, POINTER_TYPE))
11053 return NULL_TREE;
11054 else
11055 {
11056 tree fn;
11057 const char *p1, *p2;
11058
11059 p2 = c_getstr (s2);
11060 if (p2 == NULL)
11061 return NULL_TREE;
11062
11063 p1 = c_getstr (s1);
11064 if (p1 != NULL)
11065 {
11066 const char *r = strpbrk (p1, p2);
11067 tree tem;
11068
11069 if (r == NULL)
11070 return build_int_cst (TREE_TYPE (s1), 0);
11071
11072 /* Return an offset into the constant string argument. */
11073 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11074 return fold_convert_loc (loc, type, tem);
11075 }
11076
11077 if (p2[0] == '\0')
11078 /* strpbrk(x, "") == NULL.
11079 Evaluate and ignore s1 in case it had side-effects. */
11080 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11081
11082 if (p2[1] != '\0')
11083 return NULL_TREE; /* Really call strpbrk. */
11084
11085 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11086 if (!fn)
11087 return NULL_TREE;
11088
11089 /* New argument list transforming strpbrk(s1, s2) to
11090 strchr(s1, s2[0]). */
11091 return build_call_expr_loc (loc, fn, 2, s1,
11092 build_int_cst (integer_type_node, p2[0]));
11093 }
11094 }
11095
11096 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11097 to the call.
11098
11099 Return NULL_TREE if no simplification was possible, otherwise return the
11100 simplified form of the call as a tree.
11101
11102 The simplified form may be a constant or other expression which
11103 computes the same value, but in a more efficient manner (including
11104 calls to other builtin functions).
11105
11106 The call may contain arguments which need to be evaluated, but
11107 which are not useful to determine the result of the call. In
11108 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11109 COMPOUND_EXPR will be an argument which must be evaluated.
11110 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11111 COMPOUND_EXPR in the chain will contain the tree for the simplified
11112 form of the builtin function call. */
11113
11114 static tree
11115 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11116 {
11117 if (!validate_arg (s1, POINTER_TYPE)
11118 || !validate_arg (s2, POINTER_TYPE))
11119 return NULL_TREE;
11120 else
11121 {
11122 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11123
11124 /* If both arguments are constants, evaluate at compile-time. */
11125 if (p1 && p2)
11126 {
11127 const size_t r = strspn (p1, p2);
11128 return build_int_cst (size_type_node, r);
11129 }
11130
11131 /* If either argument is "", return NULL_TREE. */
11132 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11133 /* Evaluate and ignore both arguments in case either one has
11134 side-effects. */
11135 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11136 s1, s2);
11137 return NULL_TREE;
11138 }
11139 }
11140
11141 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11142 to the call.
11143
11144 Return NULL_TREE if no simplification was possible, otherwise return the
11145 simplified form of the call as a tree.
11146
11147 The simplified form may be a constant or other expression which
11148 computes the same value, but in a more efficient manner (including
11149 calls to other builtin functions).
11150
11151 The call may contain arguments which need to be evaluated, but
11152 which are not useful to determine the result of the call. In
11153 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11154 COMPOUND_EXPR will be an argument which must be evaluated.
11155 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11156 COMPOUND_EXPR in the chain will contain the tree for the simplified
11157 form of the builtin function call. */
11158
11159 static tree
11160 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11161 {
11162 if (!validate_arg (s1, POINTER_TYPE)
11163 || !validate_arg (s2, POINTER_TYPE))
11164 return NULL_TREE;
11165 else
11166 {
11167 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11168
11169 /* If both arguments are constants, evaluate at compile-time. */
11170 if (p1 && p2)
11171 {
11172 const size_t r = strcspn (p1, p2);
11173 return build_int_cst (size_type_node, r);
11174 }
11175
11176 /* If the first argument is "", return NULL_TREE. */
11177 if (p1 && *p1 == '\0')
11178 {
11179 /* Evaluate and ignore argument s2 in case it has
11180 side-effects. */
11181 return omit_one_operand_loc (loc, size_type_node,
11182 size_zero_node, s2);
11183 }
11184
11185 /* If the second argument is "", return __builtin_strlen(s1). */
11186 if (p2 && *p2 == '\0')
11187 {
11188 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11189
11190 /* If the replacement _DECL isn't initialized, don't do the
11191 transformation. */
11192 if (!fn)
11193 return NULL_TREE;
11194
11195 return build_call_expr_loc (loc, fn, 1, s1);
11196 }
11197 return NULL_TREE;
11198 }
11199 }
11200
11201 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11202 produced. False otherwise. This is done so that we don't output the error
11203 or warning twice or three times. */
11204
11205 bool
11206 fold_builtin_next_arg (tree exp, bool va_start_p)
11207 {
11208 tree fntype = TREE_TYPE (current_function_decl);
11209 int nargs = call_expr_nargs (exp);
11210 tree arg;
11211 /* There is good chance the current input_location points inside the
11212 definition of the va_start macro (perhaps on the token for
11213 builtin) in a system header, so warnings will not be emitted.
11214 Use the location in real source code. */
11215 source_location current_location =
11216 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11217 NULL);
11218
11219 if (!stdarg_p (fntype))
11220 {
11221 error ("%<va_start%> used in function with fixed args");
11222 return true;
11223 }
11224
11225 if (va_start_p)
11226 {
11227 if (va_start_p && (nargs != 2))
11228 {
11229 error ("wrong number of arguments to function %<va_start%>");
11230 return true;
11231 }
11232 arg = CALL_EXPR_ARG (exp, 1);
11233 }
11234 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11235 when we checked the arguments and if needed issued a warning. */
11236 else
11237 {
11238 if (nargs == 0)
11239 {
11240 /* Evidently an out of date version of <stdarg.h>; can't validate
11241 va_start's second argument, but can still work as intended. */
11242 warning_at (current_location,
11243 OPT_Wvarargs,
11244 "%<__builtin_next_arg%> called without an argument");
11245 return true;
11246 }
11247 else if (nargs > 1)
11248 {
11249 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11250 return true;
11251 }
11252 arg = CALL_EXPR_ARG (exp, 0);
11253 }
11254
11255 if (TREE_CODE (arg) == SSA_NAME)
11256 arg = SSA_NAME_VAR (arg);
11257
11258 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11259 or __builtin_next_arg (0) the first time we see it, after checking
11260 the arguments and if needed issuing a warning. */
11261 if (!integer_zerop (arg))
11262 {
11263 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11264
11265 /* Strip off all nops for the sake of the comparison. This
11266 is not quite the same as STRIP_NOPS. It does more.
11267 We must also strip off INDIRECT_EXPR for C++ reference
11268 parameters. */
11269 while (CONVERT_EXPR_P (arg)
11270 || TREE_CODE (arg) == INDIRECT_REF)
11271 arg = TREE_OPERAND (arg, 0);
11272 if (arg != last_parm)
11273 {
11274 /* FIXME: Sometimes with the tree optimizers we can get the
11275 not the last argument even though the user used the last
11276 argument. We just warn and set the arg to be the last
11277 argument so that we will get wrong-code because of
11278 it. */
11279 warning_at (current_location,
11280 OPT_Wvarargs,
11281 "second parameter of %<va_start%> not last named argument");
11282 }
11283
11284 /* Undefined by C99 7.15.1.4p4 (va_start):
11285 "If the parameter parmN is declared with the register storage
11286 class, with a function or array type, or with a type that is
11287 not compatible with the type that results after application of
11288 the default argument promotions, the behavior is undefined."
11289 */
11290 else if (DECL_REGISTER (arg))
11291 {
11292 warning_at (current_location,
11293 OPT_Wvarargs,
11294 "undefined behaviour when second parameter of "
11295 "%<va_start%> is declared with %<register%> storage");
11296 }
11297
11298 /* We want to verify the second parameter just once before the tree
11299 optimizers are run and then avoid keeping it in the tree,
11300 as otherwise we could warn even for correct code like:
11301 void foo (int i, ...)
11302 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11303 if (va_start_p)
11304 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11305 else
11306 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11307 }
11308 return false;
11309 }
11310
11311
11312 /* Expand a call EXP to __builtin_object_size. */
11313
11314 static rtx
11315 expand_builtin_object_size (tree exp)
11316 {
11317 tree ost;
11318 int object_size_type;
11319 tree fndecl = get_callee_fndecl (exp);
11320
11321 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11322 {
11323 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11324 exp, fndecl);
11325 expand_builtin_trap ();
11326 return const0_rtx;
11327 }
11328
11329 ost = CALL_EXPR_ARG (exp, 1);
11330 STRIP_NOPS (ost);
11331
11332 if (TREE_CODE (ost) != INTEGER_CST
11333 || tree_int_cst_sgn (ost) < 0
11334 || compare_tree_int (ost, 3) > 0)
11335 {
11336 error ("%Klast argument of %D is not integer constant between 0 and 3",
11337 exp, fndecl);
11338 expand_builtin_trap ();
11339 return const0_rtx;
11340 }
11341
11342 object_size_type = tree_to_shwi (ost);
11343
11344 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11345 }
11346
11347 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11348 FCODE is the BUILT_IN_* to use.
11349 Return NULL_RTX if we failed; the caller should emit a normal call,
11350 otherwise try to get the result in TARGET, if convenient (and in
11351 mode MODE if that's convenient). */
11352
11353 static rtx
11354 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11355 enum built_in_function fcode)
11356 {
11357 tree dest, src, len, size;
11358
11359 if (!validate_arglist (exp,
11360 POINTER_TYPE,
11361 fcode == BUILT_IN_MEMSET_CHK
11362 ? INTEGER_TYPE : POINTER_TYPE,
11363 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11364 return NULL_RTX;
11365
11366 dest = CALL_EXPR_ARG (exp, 0);
11367 src = CALL_EXPR_ARG (exp, 1);
11368 len = CALL_EXPR_ARG (exp, 2);
11369 size = CALL_EXPR_ARG (exp, 3);
11370
11371 if (! tree_fits_uhwi_p (size))
11372 return NULL_RTX;
11373
11374 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11375 {
11376 tree fn;
11377
11378 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11379 {
11380 warning_at (tree_nonartificial_location (exp),
11381 0, "%Kcall to %D will always overflow destination buffer",
11382 exp, get_callee_fndecl (exp));
11383 return NULL_RTX;
11384 }
11385
11386 fn = NULL_TREE;
11387 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11388 mem{cpy,pcpy,move,set} is available. */
11389 switch (fcode)
11390 {
11391 case BUILT_IN_MEMCPY_CHK:
11392 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11393 break;
11394 case BUILT_IN_MEMPCPY_CHK:
11395 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11396 break;
11397 case BUILT_IN_MEMMOVE_CHK:
11398 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11399 break;
11400 case BUILT_IN_MEMSET_CHK:
11401 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11402 break;
11403 default:
11404 break;
11405 }
11406
11407 if (! fn)
11408 return NULL_RTX;
11409
11410 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11411 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11412 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11413 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11414 }
11415 else if (fcode == BUILT_IN_MEMSET_CHK)
11416 return NULL_RTX;
11417 else
11418 {
11419 unsigned int dest_align = get_pointer_alignment (dest);
11420
11421 /* If DEST is not a pointer type, call the normal function. */
11422 if (dest_align == 0)
11423 return NULL_RTX;
11424
11425 /* If SRC and DEST are the same (and not volatile), do nothing. */
11426 if (operand_equal_p (src, dest, 0))
11427 {
11428 tree expr;
11429
11430 if (fcode != BUILT_IN_MEMPCPY_CHK)
11431 {
11432 /* Evaluate and ignore LEN in case it has side-effects. */
11433 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11434 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11435 }
11436
11437 expr = fold_build_pointer_plus (dest, len);
11438 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11439 }
11440
11441 /* __memmove_chk special case. */
11442 if (fcode == BUILT_IN_MEMMOVE_CHK)
11443 {
11444 unsigned int src_align = get_pointer_alignment (src);
11445
11446 if (src_align == 0)
11447 return NULL_RTX;
11448
11449 /* If src is categorized for a readonly section we can use
11450 normal __memcpy_chk. */
11451 if (readonly_data_expr (src))
11452 {
11453 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11454 if (!fn)
11455 return NULL_RTX;
11456 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11457 dest, src, len, size);
11458 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11459 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11460 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11461 }
11462 }
11463 return NULL_RTX;
11464 }
11465 }
11466
11467 /* Emit warning if a buffer overflow is detected at compile time. */
11468
11469 static void
11470 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11471 {
11472 int is_strlen = 0;
11473 tree len, size;
11474 location_t loc = tree_nonartificial_location (exp);
11475
11476 switch (fcode)
11477 {
11478 case BUILT_IN_STRCPY_CHK:
11479 case BUILT_IN_STPCPY_CHK:
11480 /* For __strcat_chk the warning will be emitted only if overflowing
11481 by at least strlen (dest) + 1 bytes. */
11482 case BUILT_IN_STRCAT_CHK:
11483 len = CALL_EXPR_ARG (exp, 1);
11484 size = CALL_EXPR_ARG (exp, 2);
11485 is_strlen = 1;
11486 break;
11487 case BUILT_IN_STRNCAT_CHK:
11488 case BUILT_IN_STRNCPY_CHK:
11489 case BUILT_IN_STPNCPY_CHK:
11490 len = CALL_EXPR_ARG (exp, 2);
11491 size = CALL_EXPR_ARG (exp, 3);
11492 break;
11493 case BUILT_IN_SNPRINTF_CHK:
11494 case BUILT_IN_VSNPRINTF_CHK:
11495 len = CALL_EXPR_ARG (exp, 1);
11496 size = CALL_EXPR_ARG (exp, 3);
11497 break;
11498 default:
11499 gcc_unreachable ();
11500 }
11501
11502 if (!len || !size)
11503 return;
11504
11505 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11506 return;
11507
11508 if (is_strlen)
11509 {
11510 len = c_strlen (len, 1);
11511 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11512 return;
11513 }
11514 else if (fcode == BUILT_IN_STRNCAT_CHK)
11515 {
11516 tree src = CALL_EXPR_ARG (exp, 1);
11517 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11518 return;
11519 src = c_strlen (src, 1);
11520 if (! src || ! tree_fits_uhwi_p (src))
11521 {
11522 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11523 exp, get_callee_fndecl (exp));
11524 return;
11525 }
11526 else if (tree_int_cst_lt (src, size))
11527 return;
11528 }
11529 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11530 return;
11531
11532 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11533 exp, get_callee_fndecl (exp));
11534 }
11535
11536 /* Emit warning if a buffer overflow is detected at compile time
11537 in __sprintf_chk/__vsprintf_chk calls. */
11538
11539 static void
11540 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11541 {
11542 tree size, len, fmt;
11543 const char *fmt_str;
11544 int nargs = call_expr_nargs (exp);
11545
11546 /* Verify the required arguments in the original call. */
11547
11548 if (nargs < 4)
11549 return;
11550 size = CALL_EXPR_ARG (exp, 2);
11551 fmt = CALL_EXPR_ARG (exp, 3);
11552
11553 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11554 return;
11555
11556 /* Check whether the format is a literal string constant. */
11557 fmt_str = c_getstr (fmt);
11558 if (fmt_str == NULL)
11559 return;
11560
11561 if (!init_target_chars ())
11562 return;
11563
11564 /* If the format doesn't contain % args or %%, we know its size. */
11565 if (strchr (fmt_str, target_percent) == 0)
11566 len = build_int_cstu (size_type_node, strlen (fmt_str));
11567 /* If the format is "%s" and first ... argument is a string literal,
11568 we know it too. */
11569 else if (fcode == BUILT_IN_SPRINTF_CHK
11570 && strcmp (fmt_str, target_percent_s) == 0)
11571 {
11572 tree arg;
11573
11574 if (nargs < 5)
11575 return;
11576 arg = CALL_EXPR_ARG (exp, 4);
11577 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11578 return;
11579
11580 len = c_strlen (arg, 1);
11581 if (!len || ! tree_fits_uhwi_p (len))
11582 return;
11583 }
11584 else
11585 return;
11586
11587 if (! tree_int_cst_lt (len, size))
11588 warning_at (tree_nonartificial_location (exp),
11589 0, "%Kcall to %D will always overflow destination buffer",
11590 exp, get_callee_fndecl (exp));
11591 }
11592
11593 /* Emit warning if a free is called with address of a variable. */
11594
11595 static void
11596 maybe_emit_free_warning (tree exp)
11597 {
11598 tree arg = CALL_EXPR_ARG (exp, 0);
11599
11600 STRIP_NOPS (arg);
11601 if (TREE_CODE (arg) != ADDR_EXPR)
11602 return;
11603
11604 arg = get_base_address (TREE_OPERAND (arg, 0));
11605 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11606 return;
11607
11608 if (SSA_VAR_P (arg))
11609 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11610 "%Kattempt to free a non-heap object %qD", exp, arg);
11611 else
11612 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11613 "%Kattempt to free a non-heap object", exp);
11614 }
11615
11616 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11617 if possible. */
11618
11619 static tree
11620 fold_builtin_object_size (tree ptr, tree ost)
11621 {
11622 unsigned HOST_WIDE_INT bytes;
11623 int object_size_type;
11624
11625 if (!validate_arg (ptr, POINTER_TYPE)
11626 || !validate_arg (ost, INTEGER_TYPE))
11627 return NULL_TREE;
11628
11629 STRIP_NOPS (ost);
11630
11631 if (TREE_CODE (ost) != INTEGER_CST
11632 || tree_int_cst_sgn (ost) < 0
11633 || compare_tree_int (ost, 3) > 0)
11634 return NULL_TREE;
11635
11636 object_size_type = tree_to_shwi (ost);
11637
11638 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11639 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11640 and (size_t) 0 for types 2 and 3. */
11641 if (TREE_SIDE_EFFECTS (ptr))
11642 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11643
11644 if (TREE_CODE (ptr) == ADDR_EXPR)
11645 {
11646 bytes = compute_builtin_object_size (ptr, object_size_type);
11647 if (wi::fits_to_tree_p (bytes, size_type_node))
11648 return build_int_cstu (size_type_node, bytes);
11649 }
11650 else if (TREE_CODE (ptr) == SSA_NAME)
11651 {
11652 /* If object size is not known yet, delay folding until
11653 later. Maybe subsequent passes will help determining
11654 it. */
11655 bytes = compute_builtin_object_size (ptr, object_size_type);
11656 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11657 && wi::fits_to_tree_p (bytes, size_type_node))
11658 return build_int_cstu (size_type_node, bytes);
11659 }
11660
11661 return NULL_TREE;
11662 }
11663
11664 /* Builtins with folding operations that operate on "..." arguments
11665 need special handling; we need to store the arguments in a convenient
11666 data structure before attempting any folding. Fortunately there are
11667 only a few builtins that fall into this category. FNDECL is the
11668 function, EXP is the CALL_EXPR for the call. */
11669
11670 static tree
11671 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11672 {
11673 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11674 tree ret = NULL_TREE;
11675
11676 switch (fcode)
11677 {
11678 case BUILT_IN_FPCLASSIFY:
11679 ret = fold_builtin_fpclassify (loc, args, nargs);
11680 break;
11681
11682 default:
11683 break;
11684 }
11685 if (ret)
11686 {
11687 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11688 SET_EXPR_LOCATION (ret, loc);
11689 TREE_NO_WARNING (ret) = 1;
11690 return ret;
11691 }
11692 return NULL_TREE;
11693 }
11694
11695 /* Initialize format string characters in the target charset. */
11696
11697 bool
11698 init_target_chars (void)
11699 {
11700 static bool init;
11701 if (!init)
11702 {
11703 target_newline = lang_hooks.to_target_charset ('\n');
11704 target_percent = lang_hooks.to_target_charset ('%');
11705 target_c = lang_hooks.to_target_charset ('c');
11706 target_s = lang_hooks.to_target_charset ('s');
11707 if (target_newline == 0 || target_percent == 0 || target_c == 0
11708 || target_s == 0)
11709 return false;
11710
11711 target_percent_c[0] = target_percent;
11712 target_percent_c[1] = target_c;
11713 target_percent_c[2] = '\0';
11714
11715 target_percent_s[0] = target_percent;
11716 target_percent_s[1] = target_s;
11717 target_percent_s[2] = '\0';
11718
11719 target_percent_s_newline[0] = target_percent;
11720 target_percent_s_newline[1] = target_s;
11721 target_percent_s_newline[2] = target_newline;
11722 target_percent_s_newline[3] = '\0';
11723
11724 init = true;
11725 }
11726 return true;
11727 }
11728
11729 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11730 and no overflow/underflow occurred. INEXACT is true if M was not
11731 exactly calculated. TYPE is the tree type for the result. This
11732 function assumes that you cleared the MPFR flags and then
11733 calculated M to see if anything subsequently set a flag prior to
11734 entering this function. Return NULL_TREE if any checks fail. */
11735
11736 static tree
11737 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11738 {
11739 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11740 overflow/underflow occurred. If -frounding-math, proceed iff the
11741 result of calling FUNC was exact. */
11742 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11743 && (!flag_rounding_math || !inexact))
11744 {
11745 REAL_VALUE_TYPE rr;
11746
11747 real_from_mpfr (&rr, m, type, GMP_RNDN);
11748 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11749 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11750 but the mpft_t is not, then we underflowed in the
11751 conversion. */
11752 if (real_isfinite (&rr)
11753 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11754 {
11755 REAL_VALUE_TYPE rmode;
11756
11757 real_convert (&rmode, TYPE_MODE (type), &rr);
11758 /* Proceed iff the specified mode can hold the value. */
11759 if (real_identical (&rmode, &rr))
11760 return build_real (type, rmode);
11761 }
11762 }
11763 return NULL_TREE;
11764 }
11765
11766 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11767 number and no overflow/underflow occurred. INEXACT is true if M
11768 was not exactly calculated. TYPE is the tree type for the result.
11769 This function assumes that you cleared the MPFR flags and then
11770 calculated M to see if anything subsequently set a flag prior to
11771 entering this function. Return NULL_TREE if any checks fail, if
11772 FORCE_CONVERT is true, then bypass the checks. */
11773
11774 static tree
11775 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11776 {
11777 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11778 overflow/underflow occurred. If -frounding-math, proceed iff the
11779 result of calling FUNC was exact. */
11780 if (force_convert
11781 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11782 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11783 && (!flag_rounding_math || !inexact)))
11784 {
11785 REAL_VALUE_TYPE re, im;
11786
11787 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11788 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11789 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11790 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11791 but the mpft_t is not, then we underflowed in the
11792 conversion. */
11793 if (force_convert
11794 || (real_isfinite (&re) && real_isfinite (&im)
11795 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11796 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11797 {
11798 REAL_VALUE_TYPE re_mode, im_mode;
11799
11800 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11801 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11802 /* Proceed iff the specified mode can hold the value. */
11803 if (force_convert
11804 || (real_identical (&re_mode, &re)
11805 && real_identical (&im_mode, &im)))
11806 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11807 build_real (TREE_TYPE (type), im_mode));
11808 }
11809 }
11810 return NULL_TREE;
11811 }
11812
11813 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11814 FUNC on it and return the resulting value as a tree with type TYPE.
11815 If MIN and/or MAX are not NULL, then the supplied ARG must be
11816 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11817 acceptable values, otherwise they are not. The mpfr precision is
11818 set to the precision of TYPE. We assume that function FUNC returns
11819 zero if the result could be calculated exactly within the requested
11820 precision. */
11821
11822 static tree
11823 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11824 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11825 bool inclusive)
11826 {
11827 tree result = NULL_TREE;
11828
11829 STRIP_NOPS (arg);
11830
11831 /* To proceed, MPFR must exactly represent the target floating point
11832 format, which only happens when the target base equals two. */
11833 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11834 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11835 {
11836 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11837
11838 if (real_isfinite (ra)
11839 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11840 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11841 {
11842 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11843 const int prec = fmt->p;
11844 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11845 int inexact;
11846 mpfr_t m;
11847
11848 mpfr_init2 (m, prec);
11849 mpfr_from_real (m, ra, GMP_RNDN);
11850 mpfr_clear_flags ();
11851 inexact = func (m, m, rnd);
11852 result = do_mpfr_ckconv (m, type, inexact);
11853 mpfr_clear (m);
11854 }
11855 }
11856
11857 return result;
11858 }
11859
11860 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11861 FUNC on it and return the resulting value as a tree with type TYPE.
11862 The mpfr precision is set to the precision of TYPE. We assume that
11863 function FUNC returns zero if the result could be calculated
11864 exactly within the requested precision. */
11865
11866 static tree
11867 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11868 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11869 {
11870 tree result = NULL_TREE;
11871
11872 STRIP_NOPS (arg1);
11873 STRIP_NOPS (arg2);
11874
11875 /* To proceed, MPFR must exactly represent the target floating point
11876 format, which only happens when the target base equals two. */
11877 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11878 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11879 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11880 {
11881 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11882 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11883
11884 if (real_isfinite (ra1) && real_isfinite (ra2))
11885 {
11886 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11887 const int prec = fmt->p;
11888 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11889 int inexact;
11890 mpfr_t m1, m2;
11891
11892 mpfr_inits2 (prec, m1, m2, NULL);
11893 mpfr_from_real (m1, ra1, GMP_RNDN);
11894 mpfr_from_real (m2, ra2, GMP_RNDN);
11895 mpfr_clear_flags ();
11896 inexact = func (m1, m1, m2, rnd);
11897 result = do_mpfr_ckconv (m1, type, inexact);
11898 mpfr_clears (m1, m2, NULL);
11899 }
11900 }
11901
11902 return result;
11903 }
11904
11905 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11906 FUNC on it and return the resulting value as a tree with type TYPE.
11907 The mpfr precision is set to the precision of TYPE. We assume that
11908 function FUNC returns zero if the result could be calculated
11909 exactly within the requested precision. */
11910
11911 static tree
11912 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11913 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11914 {
11915 tree result = NULL_TREE;
11916
11917 STRIP_NOPS (arg1);
11918 STRIP_NOPS (arg2);
11919 STRIP_NOPS (arg3);
11920
11921 /* To proceed, MPFR must exactly represent the target floating point
11922 format, which only happens when the target base equals two. */
11923 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11924 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11925 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11926 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11927 {
11928 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11929 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11930 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11931
11932 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11933 {
11934 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11935 const int prec = fmt->p;
11936 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11937 int inexact;
11938 mpfr_t m1, m2, m3;
11939
11940 mpfr_inits2 (prec, m1, m2, m3, NULL);
11941 mpfr_from_real (m1, ra1, GMP_RNDN);
11942 mpfr_from_real (m2, ra2, GMP_RNDN);
11943 mpfr_from_real (m3, ra3, GMP_RNDN);
11944 mpfr_clear_flags ();
11945 inexact = func (m1, m1, m2, m3, rnd);
11946 result = do_mpfr_ckconv (m1, type, inexact);
11947 mpfr_clears (m1, m2, m3, NULL);
11948 }
11949 }
11950
11951 return result;
11952 }
11953
11954 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11955 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11956 If ARG_SINP and ARG_COSP are NULL then the result is returned
11957 as a complex value.
11958 The type is taken from the type of ARG and is used for setting the
11959 precision of the calculation and results. */
11960
11961 static tree
11962 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11963 {
11964 tree const type = TREE_TYPE (arg);
11965 tree result = NULL_TREE;
11966
11967 STRIP_NOPS (arg);
11968
11969 /* To proceed, MPFR must exactly represent the target floating point
11970 format, which only happens when the target base equals two. */
11971 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11972 && TREE_CODE (arg) == REAL_CST
11973 && !TREE_OVERFLOW (arg))
11974 {
11975 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11976
11977 if (real_isfinite (ra))
11978 {
11979 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11980 const int prec = fmt->p;
11981 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11982 tree result_s, result_c;
11983 int inexact;
11984 mpfr_t m, ms, mc;
11985
11986 mpfr_inits2 (prec, m, ms, mc, NULL);
11987 mpfr_from_real (m, ra, GMP_RNDN);
11988 mpfr_clear_flags ();
11989 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11990 result_s = do_mpfr_ckconv (ms, type, inexact);
11991 result_c = do_mpfr_ckconv (mc, type, inexact);
11992 mpfr_clears (m, ms, mc, NULL);
11993 if (result_s && result_c)
11994 {
11995 /* If we are to return in a complex value do so. */
11996 if (!arg_sinp && !arg_cosp)
11997 return build_complex (build_complex_type (type),
11998 result_c, result_s);
11999
12000 /* Dereference the sin/cos pointer arguments. */
12001 arg_sinp = build_fold_indirect_ref (arg_sinp);
12002 arg_cosp = build_fold_indirect_ref (arg_cosp);
12003 /* Proceed if valid pointer type were passed in. */
12004 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12005 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12006 {
12007 /* Set the values. */
12008 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12009 result_s);
12010 TREE_SIDE_EFFECTS (result_s) = 1;
12011 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12012 result_c);
12013 TREE_SIDE_EFFECTS (result_c) = 1;
12014 /* Combine the assignments into a compound expr. */
12015 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12016 result_s, result_c));
12017 }
12018 }
12019 }
12020 }
12021 return result;
12022 }
12023
12024 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12025 two-argument mpfr order N Bessel function FUNC on them and return
12026 the resulting value as a tree with type TYPE. The mpfr precision
12027 is set to the precision of TYPE. We assume that function FUNC
12028 returns zero if the result could be calculated exactly within the
12029 requested precision. */
12030 static tree
12031 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12032 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12033 const REAL_VALUE_TYPE *min, bool inclusive)
12034 {
12035 tree result = NULL_TREE;
12036
12037 STRIP_NOPS (arg1);
12038 STRIP_NOPS (arg2);
12039
12040 /* To proceed, MPFR must exactly represent the target floating point
12041 format, which only happens when the target base equals two. */
12042 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12043 && tree_fits_shwi_p (arg1)
12044 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12045 {
12046 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12047 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12048
12049 if (n == (long)n
12050 && real_isfinite (ra)
12051 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12052 {
12053 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12054 const int prec = fmt->p;
12055 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12056 int inexact;
12057 mpfr_t m;
12058
12059 mpfr_init2 (m, prec);
12060 mpfr_from_real (m, ra, GMP_RNDN);
12061 mpfr_clear_flags ();
12062 inexact = func (m, n, m, rnd);
12063 result = do_mpfr_ckconv (m, type, inexact);
12064 mpfr_clear (m);
12065 }
12066 }
12067
12068 return result;
12069 }
12070
12071 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12072 the pointer *(ARG_QUO) and return the result. The type is taken
12073 from the type of ARG0 and is used for setting the precision of the
12074 calculation and results. */
12075
12076 static tree
12077 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12078 {
12079 tree const type = TREE_TYPE (arg0);
12080 tree result = NULL_TREE;
12081
12082 STRIP_NOPS (arg0);
12083 STRIP_NOPS (arg1);
12084
12085 /* To proceed, MPFR must exactly represent the target floating point
12086 format, which only happens when the target base equals two. */
12087 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12088 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12089 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12090 {
12091 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12092 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12093
12094 if (real_isfinite (ra0) && real_isfinite (ra1))
12095 {
12096 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12097 const int prec = fmt->p;
12098 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12099 tree result_rem;
12100 long integer_quo;
12101 mpfr_t m0, m1;
12102
12103 mpfr_inits2 (prec, m0, m1, NULL);
12104 mpfr_from_real (m0, ra0, GMP_RNDN);
12105 mpfr_from_real (m1, ra1, GMP_RNDN);
12106 mpfr_clear_flags ();
12107 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12108 /* Remquo is independent of the rounding mode, so pass
12109 inexact=0 to do_mpfr_ckconv(). */
12110 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12111 mpfr_clears (m0, m1, NULL);
12112 if (result_rem)
12113 {
12114 /* MPFR calculates quo in the host's long so it may
12115 return more bits in quo than the target int can hold
12116 if sizeof(host long) > sizeof(target int). This can
12117 happen even for native compilers in LP64 mode. In
12118 these cases, modulo the quo value with the largest
12119 number that the target int can hold while leaving one
12120 bit for the sign. */
12121 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12122 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12123
12124 /* Dereference the quo pointer argument. */
12125 arg_quo = build_fold_indirect_ref (arg_quo);
12126 /* Proceed iff a valid pointer type was passed in. */
12127 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12128 {
12129 /* Set the value. */
12130 tree result_quo
12131 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12132 build_int_cst (TREE_TYPE (arg_quo),
12133 integer_quo));
12134 TREE_SIDE_EFFECTS (result_quo) = 1;
12135 /* Combine the quo assignment with the rem. */
12136 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12137 result_quo, result_rem));
12138 }
12139 }
12140 }
12141 }
12142 return result;
12143 }
12144
12145 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12146 resulting value as a tree with type TYPE. The mpfr precision is
12147 set to the precision of TYPE. We assume that this mpfr function
12148 returns zero if the result could be calculated exactly within the
12149 requested precision. In addition, the integer pointer represented
12150 by ARG_SG will be dereferenced and set to the appropriate signgam
12151 (-1,1) value. */
12152
12153 static tree
12154 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12155 {
12156 tree result = NULL_TREE;
12157
12158 STRIP_NOPS (arg);
12159
12160 /* To proceed, MPFR must exactly represent the target floating point
12161 format, which only happens when the target base equals two. Also
12162 verify ARG is a constant and that ARG_SG is an int pointer. */
12163 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12164 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12165 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12166 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12167 {
12168 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12169
12170 /* In addition to NaN and Inf, the argument cannot be zero or a
12171 negative integer. */
12172 if (real_isfinite (ra)
12173 && ra->cl != rvc_zero
12174 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12175 {
12176 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12177 const int prec = fmt->p;
12178 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12179 int inexact, sg;
12180 mpfr_t m;
12181 tree result_lg;
12182
12183 mpfr_init2 (m, prec);
12184 mpfr_from_real (m, ra, GMP_RNDN);
12185 mpfr_clear_flags ();
12186 inexact = mpfr_lgamma (m, &sg, m, rnd);
12187 result_lg = do_mpfr_ckconv (m, type, inexact);
12188 mpfr_clear (m);
12189 if (result_lg)
12190 {
12191 tree result_sg;
12192
12193 /* Dereference the arg_sg pointer argument. */
12194 arg_sg = build_fold_indirect_ref (arg_sg);
12195 /* Assign the signgam value into *arg_sg. */
12196 result_sg = fold_build2 (MODIFY_EXPR,
12197 TREE_TYPE (arg_sg), arg_sg,
12198 build_int_cst (TREE_TYPE (arg_sg), sg));
12199 TREE_SIDE_EFFECTS (result_sg) = 1;
12200 /* Combine the signgam assignment with the lgamma result. */
12201 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12202 result_sg, result_lg));
12203 }
12204 }
12205 }
12206
12207 return result;
12208 }
12209
12210 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12211 function FUNC on it and return the resulting value as a tree with
12212 type TYPE. The mpfr precision is set to the precision of TYPE. We
12213 assume that function FUNC returns zero if the result could be
12214 calculated exactly within the requested precision. */
12215
12216 static tree
12217 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12218 {
12219 tree result = NULL_TREE;
12220
12221 STRIP_NOPS (arg);
12222
12223 /* To proceed, MPFR must exactly represent the target floating point
12224 format, which only happens when the target base equals two. */
12225 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12226 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12227 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12228 {
12229 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12230 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12231
12232 if (real_isfinite (re) && real_isfinite (im))
12233 {
12234 const struct real_format *const fmt =
12235 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12236 const int prec = fmt->p;
12237 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12238 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12239 int inexact;
12240 mpc_t m;
12241
12242 mpc_init2 (m, prec);
12243 mpfr_from_real (mpc_realref (m), re, rnd);
12244 mpfr_from_real (mpc_imagref (m), im, rnd);
12245 mpfr_clear_flags ();
12246 inexact = func (m, m, crnd);
12247 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12248 mpc_clear (m);
12249 }
12250 }
12251
12252 return result;
12253 }
12254
12255 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12256 mpc function FUNC on it and return the resulting value as a tree
12257 with type TYPE. The mpfr precision is set to the precision of
12258 TYPE. We assume that function FUNC returns zero if the result
12259 could be calculated exactly within the requested precision. If
12260 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12261 in the arguments and/or results. */
12262
12263 tree
12264 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12265 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12266 {
12267 tree result = NULL_TREE;
12268
12269 STRIP_NOPS (arg0);
12270 STRIP_NOPS (arg1);
12271
12272 /* To proceed, MPFR must exactly represent the target floating point
12273 format, which only happens when the target base equals two. */
12274 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12276 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12277 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12278 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12279 {
12280 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12281 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12282 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12283 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12284
12285 if (do_nonfinite
12286 || (real_isfinite (re0) && real_isfinite (im0)
12287 && real_isfinite (re1) && real_isfinite (im1)))
12288 {
12289 const struct real_format *const fmt =
12290 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12291 const int prec = fmt->p;
12292 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12293 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12294 int inexact;
12295 mpc_t m0, m1;
12296
12297 mpc_init2 (m0, prec);
12298 mpc_init2 (m1, prec);
12299 mpfr_from_real (mpc_realref (m0), re0, rnd);
12300 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12301 mpfr_from_real (mpc_realref (m1), re1, rnd);
12302 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12303 mpfr_clear_flags ();
12304 inexact = func (m0, m0, m1, crnd);
12305 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12306 mpc_clear (m0);
12307 mpc_clear (m1);
12308 }
12309 }
12310
12311 return result;
12312 }
12313
12314 /* A wrapper function for builtin folding that prevents warnings for
12315 "statement without effect" and the like, caused by removing the
12316 call node earlier than the warning is generated. */
12317
12318 tree
12319 fold_call_stmt (gcall *stmt, bool ignore)
12320 {
12321 tree ret = NULL_TREE;
12322 tree fndecl = gimple_call_fndecl (stmt);
12323 location_t loc = gimple_location (stmt);
12324 if (fndecl
12325 && TREE_CODE (fndecl) == FUNCTION_DECL
12326 && DECL_BUILT_IN (fndecl)
12327 && !gimple_call_va_arg_pack_p (stmt))
12328 {
12329 int nargs = gimple_call_num_args (stmt);
12330 tree *args = (nargs > 0
12331 ? gimple_call_arg_ptr (stmt, 0)
12332 : &error_mark_node);
12333
12334 if (avoid_folding_inline_builtin (fndecl))
12335 return NULL_TREE;
12336 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12337 {
12338 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12339 }
12340 else
12341 {
12342 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12343 if (ret)
12344 {
12345 /* Propagate location information from original call to
12346 expansion of builtin. Otherwise things like
12347 maybe_emit_chk_warning, that operate on the expansion
12348 of a builtin, will use the wrong location information. */
12349 if (gimple_has_location (stmt))
12350 {
12351 tree realret = ret;
12352 if (TREE_CODE (ret) == NOP_EXPR)
12353 realret = TREE_OPERAND (ret, 0);
12354 if (CAN_HAVE_LOCATION_P (realret)
12355 && !EXPR_HAS_LOCATION (realret))
12356 SET_EXPR_LOCATION (realret, loc);
12357 return realret;
12358 }
12359 return ret;
12360 }
12361 }
12362 }
12363 return NULL_TREE;
12364 }
12365
12366 /* Look up the function in builtin_decl that corresponds to DECL
12367 and set ASMSPEC as its user assembler name. DECL must be a
12368 function decl that declares a builtin. */
12369
12370 void
12371 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12372 {
12373 tree builtin;
12374 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12375 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12376 && asmspec != 0);
12377
12378 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12379 set_user_assembler_name (builtin, asmspec);
12380 switch (DECL_FUNCTION_CODE (decl))
12381 {
12382 case BUILT_IN_MEMCPY:
12383 init_block_move_fn (asmspec);
12384 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12385 break;
12386 case BUILT_IN_MEMSET:
12387 init_block_clear_fn (asmspec);
12388 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12389 break;
12390 case BUILT_IN_MEMMOVE:
12391 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12392 break;
12393 case BUILT_IN_MEMCMP:
12394 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12395 break;
12396 case BUILT_IN_ABORT:
12397 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12398 break;
12399 case BUILT_IN_FFS:
12400 if (INT_TYPE_SIZE < BITS_PER_WORD)
12401 {
12402 set_user_assembler_libfunc ("ffs", asmspec);
12403 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12404 MODE_INT, 0), "ffs");
12405 }
12406 break;
12407 default:
12408 break;
12409 }
12410 }
12411
12412 /* Return true if DECL is a builtin that expands to a constant or similarly
12413 simple code. */
12414 bool
12415 is_simple_builtin (tree decl)
12416 {
12417 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12418 switch (DECL_FUNCTION_CODE (decl))
12419 {
12420 /* Builtins that expand to constants. */
12421 case BUILT_IN_CONSTANT_P:
12422 case BUILT_IN_EXPECT:
12423 case BUILT_IN_OBJECT_SIZE:
12424 case BUILT_IN_UNREACHABLE:
12425 /* Simple register moves or loads from stack. */
12426 case BUILT_IN_ASSUME_ALIGNED:
12427 case BUILT_IN_RETURN_ADDRESS:
12428 case BUILT_IN_EXTRACT_RETURN_ADDR:
12429 case BUILT_IN_FROB_RETURN_ADDR:
12430 case BUILT_IN_RETURN:
12431 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12432 case BUILT_IN_FRAME_ADDRESS:
12433 case BUILT_IN_VA_END:
12434 case BUILT_IN_STACK_SAVE:
12435 case BUILT_IN_STACK_RESTORE:
12436 /* Exception state returns or moves registers around. */
12437 case BUILT_IN_EH_FILTER:
12438 case BUILT_IN_EH_POINTER:
12439 case BUILT_IN_EH_COPY_VALUES:
12440 return true;
12441
12442 default:
12443 return false;
12444 }
12445
12446 return false;
12447 }
12448
12449 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12450 most probably expanded inline into reasonably simple code. This is a
12451 superset of is_simple_builtin. */
12452 bool
12453 is_inexpensive_builtin (tree decl)
12454 {
12455 if (!decl)
12456 return false;
12457 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12458 return true;
12459 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12460 switch (DECL_FUNCTION_CODE (decl))
12461 {
12462 case BUILT_IN_ABS:
12463 case BUILT_IN_ALLOCA:
12464 case BUILT_IN_ALLOCA_WITH_ALIGN:
12465 case BUILT_IN_BSWAP16:
12466 case BUILT_IN_BSWAP32:
12467 case BUILT_IN_BSWAP64:
12468 case BUILT_IN_CLZ:
12469 case BUILT_IN_CLZIMAX:
12470 case BUILT_IN_CLZL:
12471 case BUILT_IN_CLZLL:
12472 case BUILT_IN_CTZ:
12473 case BUILT_IN_CTZIMAX:
12474 case BUILT_IN_CTZL:
12475 case BUILT_IN_CTZLL:
12476 case BUILT_IN_FFS:
12477 case BUILT_IN_FFSIMAX:
12478 case BUILT_IN_FFSL:
12479 case BUILT_IN_FFSLL:
12480 case BUILT_IN_IMAXABS:
12481 case BUILT_IN_FINITE:
12482 case BUILT_IN_FINITEF:
12483 case BUILT_IN_FINITEL:
12484 case BUILT_IN_FINITED32:
12485 case BUILT_IN_FINITED64:
12486 case BUILT_IN_FINITED128:
12487 case BUILT_IN_FPCLASSIFY:
12488 case BUILT_IN_ISFINITE:
12489 case BUILT_IN_ISINF_SIGN:
12490 case BUILT_IN_ISINF:
12491 case BUILT_IN_ISINFF:
12492 case BUILT_IN_ISINFL:
12493 case BUILT_IN_ISINFD32:
12494 case BUILT_IN_ISINFD64:
12495 case BUILT_IN_ISINFD128:
12496 case BUILT_IN_ISNAN:
12497 case BUILT_IN_ISNANF:
12498 case BUILT_IN_ISNANL:
12499 case BUILT_IN_ISNAND32:
12500 case BUILT_IN_ISNAND64:
12501 case BUILT_IN_ISNAND128:
12502 case BUILT_IN_ISNORMAL:
12503 case BUILT_IN_ISGREATER:
12504 case BUILT_IN_ISGREATEREQUAL:
12505 case BUILT_IN_ISLESS:
12506 case BUILT_IN_ISLESSEQUAL:
12507 case BUILT_IN_ISLESSGREATER:
12508 case BUILT_IN_ISUNORDERED:
12509 case BUILT_IN_VA_ARG_PACK:
12510 case BUILT_IN_VA_ARG_PACK_LEN:
12511 case BUILT_IN_VA_COPY:
12512 case BUILT_IN_TRAP:
12513 case BUILT_IN_SAVEREGS:
12514 case BUILT_IN_POPCOUNTL:
12515 case BUILT_IN_POPCOUNTLL:
12516 case BUILT_IN_POPCOUNTIMAX:
12517 case BUILT_IN_POPCOUNT:
12518 case BUILT_IN_PARITYL:
12519 case BUILT_IN_PARITYLL:
12520 case BUILT_IN_PARITYIMAX:
12521 case BUILT_IN_PARITY:
12522 case BUILT_IN_LABS:
12523 case BUILT_IN_LLABS:
12524 case BUILT_IN_PREFETCH:
12525 case BUILT_IN_ACC_ON_DEVICE:
12526 return true;
12527
12528 default:
12529 return is_simple_builtin (decl);
12530 }
12531
12532 return false;
12533 }