[Patch] The comparison in a compare exchange should not take place in VOIDmode
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hash-set.h"
26 #include "vec.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "inchash.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "stor-layout.h"
35 #include "calls.h"
36 #include "varasm.h"
37 #include "tree-object-size.h"
38 #include "realmpfr.h"
39 #include "predict.h"
40 #include "hashtab.h"
41 #include "hard-reg-set.h"
42 #include "function.h"
43 #include "cfgrtl.h"
44 #include "basic-block.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "flags.h"
51 #include "regs.h"
52 #include "except.h"
53 #include "insn-config.h"
54 #include "statistics.h"
55 #include "expmed.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "emit-rtl.h"
59 #include "stmt.h"
60 #include "expr.h"
61 #include "insn-codes.h"
62 #include "optabs.h"
63 #include "libfuncs.h"
64 #include "recog.h"
65 #include "output.h"
66 #include "typeclass.h"
67 #include "tm_p.h"
68 #include "target.h"
69 #include "langhooks.h"
70 #include "tree-ssanames.h"
71 #include "tree-dfa.h"
72 #include "value-prof.h"
73 #include "diagnostic-core.h"
74 #include "builtins.h"
75 #include "asan.h"
76 #include "cilk.h"
77 #include "ipa-ref.h"
78 #include "lto-streamer.h"
79 #include "cgraph.h"
80 #include "tree-chkp.h"
81 #include "rtl-chkp.h"
82 #include "gomp-constants.h"
83
84
85 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
86
87 struct target_builtins default_target_builtins;
88 #if SWITCHABLE_TARGET
89 struct target_builtins *this_target_builtins = &default_target_builtins;
90 #endif
91
92 /* Define the names of the builtin function types and codes. */
93 const char *const built_in_class_names[BUILT_IN_LAST]
94 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
95
96 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
97 const char * built_in_names[(int) END_BUILTINS] =
98 {
99 #include "builtins.def"
100 };
101 #undef DEF_BUILTIN
102
103 /* Setup an array of builtin_info_type, make sure each element decl is
104 initialized to NULL_TREE. */
105 builtin_info_type builtin_info[(int)END_BUILTINS];
106
107 /* Non-zero if __builtin_constant_p should be folded right away. */
108 bool force_folding_builtin_constant_p;
109
110 static rtx c_readstr (const char *, machine_mode);
111 static int target_char_cast (tree, char *);
112 static rtx get_memory_rtx (tree, tree);
113 static int apply_args_size (void);
114 static int apply_result_size (void);
115 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
116 static rtx result_vector (int, rtx);
117 #endif
118 static void expand_builtin_prefetch (tree);
119 static rtx expand_builtin_apply_args (void);
120 static rtx expand_builtin_apply_args_1 (void);
121 static rtx expand_builtin_apply (rtx, rtx, rtx);
122 static void expand_builtin_return (rtx);
123 static enum type_class type_to_class (tree);
124 static rtx expand_builtin_classify_type (tree);
125 static void expand_errno_check (tree, rtx);
126 static rtx expand_builtin_mathfn (tree, rtx, rtx);
127 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
128 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
129 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
130 static rtx expand_builtin_interclass_mathfn (tree, rtx);
131 static rtx expand_builtin_sincos (tree);
132 static rtx expand_builtin_cexpi (tree, rtx);
133 static rtx expand_builtin_int_roundingfn (tree, rtx);
134 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
135 static rtx expand_builtin_next_arg (void);
136 static rtx expand_builtin_va_start (tree);
137 static rtx expand_builtin_va_end (tree);
138 static rtx expand_builtin_va_copy (tree);
139 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
140 static rtx expand_builtin_strcmp (tree, rtx);
141 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
142 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
143 static rtx expand_builtin_memcpy (tree, rtx);
144 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
145 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
146 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
147 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
148 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
149 machine_mode, int, tree);
150 static rtx expand_builtin_strcpy (tree, rtx);
151 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
152 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
153 static rtx expand_builtin_strncpy (tree, rtx);
154 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
155 static rtx expand_builtin_memset (tree, rtx, machine_mode);
156 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
157 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
158 static rtx expand_builtin_bzero (tree);
159 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
160 static rtx expand_builtin_alloca (tree, bool);
161 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
162 static rtx expand_builtin_frame_address (tree, tree);
163 static tree stabilize_va_list_loc (location_t, tree, int);
164 static rtx expand_builtin_expect (tree, rtx);
165 static tree fold_builtin_constant_p (tree);
166 static tree fold_builtin_classify_type (tree);
167 static tree fold_builtin_strlen (location_t, tree, tree);
168 static tree fold_builtin_inf (location_t, tree, int);
169 static tree fold_builtin_nan (tree, tree, int);
170 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
171 static bool validate_arg (const_tree, enum tree_code code);
172 static bool integer_valued_real_p (tree);
173 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
174 static rtx expand_builtin_fabs (tree, rtx, rtx);
175 static rtx expand_builtin_signbit (tree, rtx);
176 static tree fold_builtin_sqrt (location_t, tree, tree);
177 static tree fold_builtin_cbrt (location_t, tree, tree);
178 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
180 static tree fold_builtin_cos (location_t, tree, tree, tree);
181 static tree fold_builtin_cosh (location_t, tree, tree, tree);
182 static tree fold_builtin_tan (tree, tree);
183 static tree fold_builtin_trunc (location_t, tree, tree);
184 static tree fold_builtin_floor (location_t, tree, tree);
185 static tree fold_builtin_ceil (location_t, tree, tree);
186 static tree fold_builtin_round (location_t, tree, tree);
187 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
188 static tree fold_builtin_bitop (tree, tree);
189 static tree fold_builtin_strchr (location_t, tree, tree, tree);
190 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
191 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
192 static tree fold_builtin_strcmp (location_t, tree, tree);
193 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
194 static tree fold_builtin_signbit (location_t, tree, tree);
195 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
196 static tree fold_builtin_isascii (location_t, tree);
197 static tree fold_builtin_toascii (location_t, tree);
198 static tree fold_builtin_isdigit (location_t, tree);
199 static tree fold_builtin_fabs (location_t, tree, tree);
200 static tree fold_builtin_abs (location_t, tree, tree);
201 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
202 enum tree_code);
203 static tree fold_builtin_0 (location_t, tree);
204 static tree fold_builtin_1 (location_t, tree, tree);
205 static tree fold_builtin_2 (location_t, tree, tree, tree);
206 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_varargs (location_t, tree, tree*, int);
208
209 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
210 static tree fold_builtin_strstr (location_t, tree, tree, tree);
211 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
212 static tree fold_builtin_strspn (location_t, tree, tree);
213 static tree fold_builtin_strcspn (location_t, tree, tree);
214
215 static rtx expand_builtin_object_size (tree);
216 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
217 enum built_in_function);
218 static void maybe_emit_chk_warning (tree, enum built_in_function);
219 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
220 static void maybe_emit_free_warning (tree);
221 static tree fold_builtin_object_size (tree, tree);
222
223 unsigned HOST_WIDE_INT target_newline;
224 unsigned HOST_WIDE_INT target_percent;
225 static unsigned HOST_WIDE_INT target_c;
226 static unsigned HOST_WIDE_INT target_s;
227 char target_percent_c[3];
228 char target_percent_s[3];
229 char target_percent_s_newline[4];
230 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
231 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
232 static tree do_mpfr_arg2 (tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_arg3 (tree, tree, tree, tree,
235 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
236 static tree do_mpfr_sincos (tree, tree, tree);
237 static tree do_mpfr_bessel_n (tree, tree, tree,
238 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
239 const REAL_VALUE_TYPE *, bool);
240 static tree do_mpfr_remquo (tree, tree, tree);
241 static tree do_mpfr_lgamma_r (tree, tree, tree);
242 static void expand_builtin_sync_synchronize (void);
243
244 /* Return true if NAME starts with __builtin_ or __sync_. */
245
246 static bool
247 is_builtin_name (const char *name)
248 {
249 if (strncmp (name, "__builtin_", 10) == 0)
250 return true;
251 if (strncmp (name, "__sync_", 7) == 0)
252 return true;
253 if (strncmp (name, "__atomic_", 9) == 0)
254 return true;
255 if (flag_cilkplus
256 && (!strcmp (name, "__cilkrts_detach")
257 || !strcmp (name, "__cilkrts_pop_frame")))
258 return true;
259 return false;
260 }
261
262
263 /* Return true if DECL is a function symbol representing a built-in. */
264
265 bool
266 is_builtin_fn (tree decl)
267 {
268 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
269 }
270
271 /* Return true if NODE should be considered for inline expansion regardless
272 of the optimization level. This means whenever a function is invoked with
273 its "internal" name, which normally contains the prefix "__builtin". */
274
275 static bool
276 called_as_built_in (tree node)
277 {
278 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
279 we want the name used to call the function, not the name it
280 will have. */
281 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
282 return is_builtin_name (name);
283 }
284
285 /* Compute values M and N such that M divides (address of EXP - N) and such
286 that N < M. If these numbers can be determined, store M in alignp and N in
287 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
288 *alignp and any bit-offset to *bitposp.
289
290 Note that the address (and thus the alignment) computed here is based
291 on the address to which a symbol resolves, whereas DECL_ALIGN is based
292 on the address at which an object is actually located. These two
293 addresses are not always the same. For example, on ARM targets,
294 the address &foo of a Thumb function foo() has the lowest bit set,
295 whereas foo() itself starts on an even address.
296
297 If ADDR_P is true we are taking the address of the memory reference EXP
298 and thus cannot rely on the access taking place. */
299
300 static bool
301 get_object_alignment_2 (tree exp, unsigned int *alignp,
302 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
303 {
304 HOST_WIDE_INT bitsize, bitpos;
305 tree offset;
306 machine_mode mode;
307 int unsignedp, volatilep;
308 unsigned int align = BITS_PER_UNIT;
309 bool known_alignment = false;
310
311 /* Get the innermost object and the constant (bitpos) and possibly
312 variable (offset) offset of the access. */
313 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
314 &mode, &unsignedp, &volatilep, true);
315
316 /* Extract alignment information from the innermost object and
317 possibly adjust bitpos and offset. */
318 if (TREE_CODE (exp) == FUNCTION_DECL)
319 {
320 /* Function addresses can encode extra information besides their
321 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
322 allows the low bit to be used as a virtual bit, we know
323 that the address itself must be at least 2-byte aligned. */
324 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
325 align = 2 * BITS_PER_UNIT;
326 }
327 else if (TREE_CODE (exp) == LABEL_DECL)
328 ;
329 else if (TREE_CODE (exp) == CONST_DECL)
330 {
331 /* The alignment of a CONST_DECL is determined by its initializer. */
332 exp = DECL_INITIAL (exp);
333 align = TYPE_ALIGN (TREE_TYPE (exp));
334 #ifdef CONSTANT_ALIGNMENT
335 if (CONSTANT_CLASS_P (exp))
336 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
337 #endif
338 known_alignment = true;
339 }
340 else if (DECL_P (exp))
341 {
342 align = DECL_ALIGN (exp);
343 known_alignment = true;
344 }
345 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
346 {
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 }
349 else if (TREE_CODE (exp) == INDIRECT_REF
350 || TREE_CODE (exp) == MEM_REF
351 || TREE_CODE (exp) == TARGET_MEM_REF)
352 {
353 tree addr = TREE_OPERAND (exp, 0);
354 unsigned ptr_align;
355 unsigned HOST_WIDE_INT ptr_bitpos;
356 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
357
358 /* If the address is explicitely aligned, handle that. */
359 if (TREE_CODE (addr) == BIT_AND_EXPR
360 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
361 {
362 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
363 ptr_bitmask *= BITS_PER_UNIT;
364 align = ptr_bitmask & -ptr_bitmask;
365 addr = TREE_OPERAND (addr, 0);
366 }
367
368 known_alignment
369 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
370 align = MAX (ptr_align, align);
371
372 /* Re-apply explicit alignment to the bitpos. */
373 ptr_bitpos &= ptr_bitmask;
374
375 /* The alignment of the pointer operand in a TARGET_MEM_REF
376 has to take the variable offset parts into account. */
377 if (TREE_CODE (exp) == TARGET_MEM_REF)
378 {
379 if (TMR_INDEX (exp))
380 {
381 unsigned HOST_WIDE_INT step = 1;
382 if (TMR_STEP (exp))
383 step = TREE_INT_CST_LOW (TMR_STEP (exp));
384 align = MIN (align, (step & -step) * BITS_PER_UNIT);
385 }
386 if (TMR_INDEX2 (exp))
387 align = BITS_PER_UNIT;
388 known_alignment = false;
389 }
390
391 /* When EXP is an actual memory reference then we can use
392 TYPE_ALIGN of a pointer indirection to derive alignment.
393 Do so only if get_pointer_alignment_1 did not reveal absolute
394 alignment knowledge and if using that alignment would
395 improve the situation. */
396 if (!addr_p && !known_alignment
397 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
398 align = TYPE_ALIGN (TREE_TYPE (exp));
399 else
400 {
401 /* Else adjust bitpos accordingly. */
402 bitpos += ptr_bitpos;
403 if (TREE_CODE (exp) == MEM_REF
404 || TREE_CODE (exp) == TARGET_MEM_REF)
405 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
406 }
407 }
408 else if (TREE_CODE (exp) == STRING_CST)
409 {
410 /* STRING_CST are the only constant objects we allow to be not
411 wrapped inside a CONST_DECL. */
412 align = TYPE_ALIGN (TREE_TYPE (exp));
413 #ifdef CONSTANT_ALIGNMENT
414 if (CONSTANT_CLASS_P (exp))
415 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
416 #endif
417 known_alignment = true;
418 }
419
420 /* If there is a non-constant offset part extract the maximum
421 alignment that can prevail. */
422 if (offset)
423 {
424 unsigned int trailing_zeros = tree_ctz (offset);
425 if (trailing_zeros < HOST_BITS_PER_INT)
426 {
427 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
428 if (inner)
429 align = MIN (align, inner);
430 }
431 }
432
433 *alignp = align;
434 *bitposp = bitpos & (*alignp - 1);
435 return known_alignment;
436 }
437
438 /* For a memory reference expression EXP compute values M and N such that M
439 divides (&EXP - N) and such that N < M. If these numbers can be determined,
440 store M in alignp and N in *BITPOSP and return true. Otherwise return false
441 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
442
443 bool
444 get_object_alignment_1 (tree exp, unsigned int *alignp,
445 unsigned HOST_WIDE_INT *bitposp)
446 {
447 return get_object_alignment_2 (exp, alignp, bitposp, false);
448 }
449
450 /* Return the alignment in bits of EXP, an object. */
451
452 unsigned int
453 get_object_alignment (tree exp)
454 {
455 unsigned HOST_WIDE_INT bitpos = 0;
456 unsigned int align;
457
458 get_object_alignment_1 (exp, &align, &bitpos);
459
460 /* align and bitpos now specify known low bits of the pointer.
461 ptr & (align - 1) == bitpos. */
462
463 if (bitpos != 0)
464 align = (bitpos & -bitpos);
465 return align;
466 }
467
468 /* For a pointer valued expression EXP compute values M and N such that M
469 divides (EXP - N) and such that N < M. If these numbers can be determined,
470 store M in alignp and N in *BITPOSP and return true. Return false if
471 the results are just a conservative approximation.
472
473 If EXP is not a pointer, false is returned too. */
474
475 bool
476 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
477 unsigned HOST_WIDE_INT *bitposp)
478 {
479 STRIP_NOPS (exp);
480
481 if (TREE_CODE (exp) == ADDR_EXPR)
482 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
483 alignp, bitposp, true);
484 else if (TREE_CODE (exp) == SSA_NAME
485 && POINTER_TYPE_P (TREE_TYPE (exp)))
486 {
487 unsigned int ptr_align, ptr_misalign;
488 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
489
490 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
491 {
492 *bitposp = ptr_misalign * BITS_PER_UNIT;
493 *alignp = ptr_align * BITS_PER_UNIT;
494 /* We cannot really tell whether this result is an approximation. */
495 return true;
496 }
497 else
498 {
499 *bitposp = 0;
500 *alignp = BITS_PER_UNIT;
501 return false;
502 }
503 }
504 else if (TREE_CODE (exp) == INTEGER_CST)
505 {
506 *alignp = BIGGEST_ALIGNMENT;
507 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
508 & (BIGGEST_ALIGNMENT - 1));
509 return true;
510 }
511
512 *bitposp = 0;
513 *alignp = BITS_PER_UNIT;
514 return false;
515 }
516
517 /* Return the alignment in bits of EXP, a pointer valued expression.
518 The alignment returned is, by default, the alignment of the thing that
519 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
520
521 Otherwise, look at the expression to see if we can do better, i.e., if the
522 expression is actually pointing at an object whose alignment is tighter. */
523
524 unsigned int
525 get_pointer_alignment (tree exp)
526 {
527 unsigned HOST_WIDE_INT bitpos = 0;
528 unsigned int align;
529
530 get_pointer_alignment_1 (exp, &align, &bitpos);
531
532 /* align and bitpos now specify known low bits of the pointer.
533 ptr & (align - 1) == bitpos. */
534
535 if (bitpos != 0)
536 align = (bitpos & -bitpos);
537
538 return align;
539 }
540
541 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
542 way, because it could contain a zero byte in the middle.
543 TREE_STRING_LENGTH is the size of the character array, not the string.
544
545 ONLY_VALUE should be nonzero if the result is not going to be emitted
546 into the instruction stream and zero if it is going to be expanded.
547 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
548 is returned, otherwise NULL, since
549 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
550 evaluate the side-effects.
551
552 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
553 accesses. Note that this implies the result is not going to be emitted
554 into the instruction stream.
555
556 The value returned is of type `ssizetype'.
557
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
560
561 tree
562 c_strlen (tree src, int only_value)
563 {
564 tree offset_node;
565 HOST_WIDE_INT offset;
566 int max;
567 const char *ptr;
568 location_t loc;
569
570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 {
574 tree len1, len2;
575
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
578 if (tree_int_cst_equal (len1, len2))
579 return len1;
580 }
581
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
585
586 loc = EXPR_LOC_OR_LOC (src, input_location);
587
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return NULL_TREE;
591
592 max = TREE_STRING_LENGTH (src) - 1;
593 ptr = TREE_STRING_POINTER (src);
594
595 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
596 {
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
600 int i;
601
602 for (i = 0; i < max; i++)
603 if (ptr[i] == 0)
604 return NULL_TREE;
605
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
612
613 return size_diffop_loc (loc, size_int (max), offset_node);
614 }
615
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node == 0)
619 offset = 0;
620 else if (! tree_fits_shwi_p (offset_node))
621 offset = -1;
622 else
623 offset = tree_to_shwi (offset_node);
624
625 /* If the offset is known to be out of bounds, warn, and call strlen at
626 runtime. */
627 if (offset < 0 || offset > max)
628 {
629 /* Suppress multiple warnings for propagated constant strings. */
630 if (only_value != 2
631 && !TREE_NO_WARNING (src))
632 {
633 warning_at (loc, 0, "offset outside bounds of constant string");
634 TREE_NO_WARNING (src) = 1;
635 }
636 return NULL_TREE;
637 }
638
639 /* Use strlen to search for the first zero byte. Since any strings
640 constructed with build_string will have nulls appended, we win even
641 if we get handed something like (char[4])"abcd".
642
643 Since OFFSET is our starting index into the string, no further
644 calculation is needed. */
645 return ssize_int (strlen (ptr + offset));
646 }
647
648 /* Return a char pointer for a C string if it is a string constant
649 or sum of string constant and integer constant. */
650
651 const char *
652 c_getstr (tree src)
653 {
654 tree offset_node;
655
656 src = string_constant (src, &offset_node);
657 if (src == 0)
658 return 0;
659
660 if (offset_node == 0)
661 return TREE_STRING_POINTER (src);
662 else if (!tree_fits_uhwi_p (offset_node)
663 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
664 return 0;
665
666 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
667 }
668
669 /* Return a constant integer corresponding to target reading
670 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
671
672 static rtx
673 c_readstr (const char *str, machine_mode mode)
674 {
675 HOST_WIDE_INT ch;
676 unsigned int i, j;
677 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
678
679 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
680 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
681 / HOST_BITS_PER_WIDE_INT;
682
683 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
684 for (i = 0; i < len; i++)
685 tmp[i] = 0;
686
687 ch = 1;
688 for (i = 0; i < GET_MODE_SIZE (mode); i++)
689 {
690 j = i;
691 if (WORDS_BIG_ENDIAN)
692 j = GET_MODE_SIZE (mode) - i - 1;
693 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
694 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
695 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
696 j *= BITS_PER_UNIT;
697
698 if (ch)
699 ch = (unsigned char) str[i];
700 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
701 }
702
703 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
704 return immed_wide_int_const (c, mode);
705 }
706
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
709 P. */
710
711 static int
712 target_char_cast (tree cst, char *p)
713 {
714 unsigned HOST_WIDE_INT val, hostval;
715
716 if (TREE_CODE (cst) != INTEGER_CST
717 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
718 return 1;
719
720 /* Do not care if it fits or not right here. */
721 val = TREE_INT_CST_LOW (cst);
722
723 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
724 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
725
726 hostval = val;
727 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
728 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
729
730 if (val != hostval)
731 return 1;
732
733 *p = hostval;
734 return 0;
735 }
736
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
740
741 static tree
742 builtin_save_expr (tree exp)
743 {
744 if (TREE_CODE (exp) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp) == 0
746 && (TREE_CODE (exp) == PARM_DECL
747 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
748 return exp;
749
750 return save_expr (exp);
751 }
752
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
756
757 static rtx
758 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 {
760 int i;
761
762 #ifdef INITIAL_FRAME_ADDRESS_RTX
763 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
764 #else
765 rtx tem;
766
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
771
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
777 tem = frame_pointer_rtx;
778 else
779 {
780 tem = hard_frame_pointer_rtx;
781
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl->accesses_prior_frames = 1;
784 }
785 #endif
786
787 /* Some machines need special handling before we can access
788 arbitrary frames. For example, on the SPARC, we must first flush
789 all register windows to the stack. */
790 #ifdef SETUP_FRAME_ADDRESSES
791 if (count > 0)
792 SETUP_FRAME_ADDRESSES ();
793 #endif
794
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
800 count--;
801
802 /* Scan back COUNT frames to the specified frame. */
803 for (i = 0; i < count; i++)
804 {
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 #ifdef DYNAMIC_CHAIN_ADDRESS
808 tem = DYNAMIC_CHAIN_ADDRESS (tem);
809 #endif
810 tem = memory_address (Pmode, tem);
811 tem = gen_frame_mem (Pmode, tem);
812 tem = copy_to_reg (tem);
813 }
814
815 /* For __builtin_frame_address, return what we've got. But, on
816 the SPARC for example, we may have to add a bias. */
817 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
818 #ifdef FRAME_ADDR_RTX
819 return FRAME_ADDR_RTX (tem);
820 #else
821 return tem;
822 #endif
823
824 /* For __builtin_return_address, get the return address from that frame. */
825 #ifdef RETURN_ADDR_RTX
826 tem = RETURN_ADDR_RTX (count, tem);
827 #else
828 tem = memory_address (Pmode,
829 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
830 tem = gen_frame_mem (Pmode, tem);
831 #endif
832 return tem;
833 }
834
835 /* Alias set used for setjmp buffer. */
836 static alias_set_type setjmp_alias_set = -1;
837
838 /* Construct the leading half of a __builtin_setjmp call. Control will
839 return to RECEIVER_LABEL. This is also called directly by the SJLJ
840 exception handling code. */
841
842 void
843 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
844 {
845 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
846 rtx stack_save;
847 rtx mem;
848
849 if (setjmp_alias_set == -1)
850 setjmp_alias_set = new_alias_set ();
851
852 buf_addr = convert_memory_address (Pmode, buf_addr);
853
854 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
855
856 /* We store the frame pointer and the address of receiver_label in
857 the buffer and use the rest of it for the stack save area, which
858 is machine-dependent. */
859
860 mem = gen_rtx_MEM (Pmode, buf_addr);
861 set_mem_alias_set (mem, setjmp_alias_set);
862 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
863
864 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
865 GET_MODE_SIZE (Pmode))),
866 set_mem_alias_set (mem, setjmp_alias_set);
867
868 emit_move_insn (validize_mem (mem),
869 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
870
871 stack_save = gen_rtx_MEM (sa_mode,
872 plus_constant (Pmode, buf_addr,
873 2 * GET_MODE_SIZE (Pmode)));
874 set_mem_alias_set (stack_save, setjmp_alias_set);
875 emit_stack_save (SAVE_NONLOCAL, &stack_save);
876
877 /* If there is further processing to do, do it. */
878 #ifdef HAVE_builtin_setjmp_setup
879 if (HAVE_builtin_setjmp_setup)
880 emit_insn (gen_builtin_setjmp_setup (buf_addr));
881 #endif
882
883 /* We have a nonlocal label. */
884 cfun->has_nonlocal_label = 1;
885 }
886
887 /* Construct the trailing part of a __builtin_setjmp call. This is
888 also called directly by the SJLJ exception handling code.
889 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
890
891 void
892 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
893 {
894 rtx chain;
895
896 /* Mark the FP as used when we get here, so we have to make sure it's
897 marked as used by this function. */
898 emit_use (hard_frame_pointer_rtx);
899
900 /* Mark the static chain as clobbered here so life information
901 doesn't get messed up for it. */
902 chain = targetm.calls.static_chain (current_function_decl, true);
903 if (chain && REG_P (chain))
904 emit_clobber (chain);
905
906 /* Now put in the code to restore the frame pointer, and argument
907 pointer, if needed. */
908 #ifdef HAVE_nonlocal_goto
909 if (! HAVE_nonlocal_goto)
910 #endif
911 {
912 /* First adjust our frame pointer to its actual value. It was
913 previously set to the start of the virtual area corresponding to
914 the stacked variables when we branched here and now needs to be
915 adjusted to the actual hardware fp value.
916
917 Assignments to virtual registers are converted by
918 instantiate_virtual_regs into the corresponding assignment
919 to the underlying register (fp in this case) that makes
920 the original assignment true.
921 So the following insn will actually be decrementing fp by
922 STARTING_FRAME_OFFSET. */
923 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
924
925 /* Restoring the frame pointer also modifies the hard frame pointer.
926 Mark it used (so that the previous assignment remains live once
927 the frame pointer is eliminated) and clobbered (to represent the
928 implicit update from the assignment). */
929 emit_use (hard_frame_pointer_rtx);
930 emit_clobber (hard_frame_pointer_rtx);
931 }
932
933 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
934 if (fixed_regs[ARG_POINTER_REGNUM])
935 {
936 #ifdef ELIMINABLE_REGS
937 /* If the argument pointer can be eliminated in favor of the
938 frame pointer, we don't need to restore it. We assume here
939 that if such an elimination is present, it can always be used.
940 This is the case on all known machines; if we don't make this
941 assumption, we do unnecessary saving on many machines. */
942 size_t i;
943 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
944
945 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
946 if (elim_regs[i].from == ARG_POINTER_REGNUM
947 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
948 break;
949
950 if (i == ARRAY_SIZE (elim_regs))
951 #endif
952 {
953 /* Now restore our arg pointer from the address at which it
954 was saved in our stack frame. */
955 emit_move_insn (crtl->args.internal_arg_pointer,
956 copy_to_reg (get_arg_pointer_save_area ()));
957 }
958 }
959 #endif
960
961 #ifdef HAVE_builtin_setjmp_receiver
962 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
963 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
964 else
965 #endif
966 #ifdef HAVE_nonlocal_goto_receiver
967 if (HAVE_nonlocal_goto_receiver)
968 emit_insn (gen_nonlocal_goto_receiver ());
969 else
970 #endif
971 { /* Nothing */ }
972
973 /* We must not allow the code we just generated to be reordered by
974 scheduling. Specifically, the update of the frame pointer must
975 happen immediately, not later. */
976 emit_insn (gen_blockage ());
977 }
978
979 /* __builtin_longjmp is passed a pointer to an array of five words (not
980 all will be used on all machines). It operates similarly to the C
981 library function of the same name, but is more efficient. Much of
982 the code below is copied from the handling of non-local gotos. */
983
984 static void
985 expand_builtin_longjmp (rtx buf_addr, rtx value)
986 {
987 rtx fp, lab, stack;
988 rtx_insn *insn, *last;
989 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
990
991 /* DRAP is needed for stack realign if longjmp is expanded to current
992 function */
993 if (SUPPORTS_STACK_ALIGNMENT)
994 crtl->need_drap = true;
995
996 if (setjmp_alias_set == -1)
997 setjmp_alias_set = new_alias_set ();
998
999 buf_addr = convert_memory_address (Pmode, buf_addr);
1000
1001 buf_addr = force_reg (Pmode, buf_addr);
1002
1003 /* We require that the user must pass a second argument of 1, because
1004 that is what builtin_setjmp will return. */
1005 gcc_assert (value == const1_rtx);
1006
1007 last = get_last_insn ();
1008 #ifdef HAVE_builtin_longjmp
1009 if (HAVE_builtin_longjmp)
1010 emit_insn (gen_builtin_longjmp (buf_addr));
1011 else
1012 #endif
1013 {
1014 fp = gen_rtx_MEM (Pmode, buf_addr);
1015 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1016 GET_MODE_SIZE (Pmode)));
1017
1018 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1019 2 * GET_MODE_SIZE (Pmode)));
1020 set_mem_alias_set (fp, setjmp_alias_set);
1021 set_mem_alias_set (lab, setjmp_alias_set);
1022 set_mem_alias_set (stack, setjmp_alias_set);
1023
1024 /* Pick up FP, label, and SP from the block and jump. This code is
1025 from expand_goto in stmt.c; see there for detailed comments. */
1026 #ifdef HAVE_nonlocal_goto
1027 if (HAVE_nonlocal_goto)
1028 /* We have to pass a value to the nonlocal_goto pattern that will
1029 get copied into the static_chain pointer, but it does not matter
1030 what that value is, because builtin_setjmp does not use it. */
1031 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1032 else
1033 #endif
1034 {
1035 lab = copy_to_reg (lab);
1036
1037 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1038 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039
1040 emit_move_insn (hard_frame_pointer_rtx, fp);
1041 emit_stack_restore (SAVE_NONLOCAL, stack);
1042
1043 emit_use (hard_frame_pointer_rtx);
1044 emit_use (stack_pointer_rtx);
1045 emit_indirect_jump (lab);
1046 }
1047 }
1048
1049 /* Search backwards and mark the jump insn as a non-local goto.
1050 Note that this precludes the use of __builtin_longjmp to a
1051 __builtin_setjmp target in the same function. However, we've
1052 already cautioned the user that these functions are for
1053 internal exception handling use only. */
1054 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1055 {
1056 gcc_assert (insn != last);
1057
1058 if (JUMP_P (insn))
1059 {
1060 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1061 break;
1062 }
1063 else if (CALL_P (insn))
1064 break;
1065 }
1066 }
1067
1068 static inline bool
1069 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1070 {
1071 return (iter->i < iter->n);
1072 }
1073
1074 /* This function validates the types of a function call argument list
1075 against a specified list of tree_codes. If the last specifier is a 0,
1076 that represents an ellipses, otherwise the last specifier must be a
1077 VOID_TYPE. */
1078
1079 static bool
1080 validate_arglist (const_tree callexpr, ...)
1081 {
1082 enum tree_code code;
1083 bool res = 0;
1084 va_list ap;
1085 const_call_expr_arg_iterator iter;
1086 const_tree arg;
1087
1088 va_start (ap, callexpr);
1089 init_const_call_expr_arg_iterator (callexpr, &iter);
1090
1091 do
1092 {
1093 code = (enum tree_code) va_arg (ap, int);
1094 switch (code)
1095 {
1096 case 0:
1097 /* This signifies an ellipses, any further arguments are all ok. */
1098 res = true;
1099 goto end;
1100 case VOID_TYPE:
1101 /* This signifies an endlink, if no arguments remain, return
1102 true, otherwise return false. */
1103 res = !more_const_call_expr_args_p (&iter);
1104 goto end;
1105 default:
1106 /* If no parameters remain or the parameter's code does not
1107 match the specified code, return false. Otherwise continue
1108 checking any remaining arguments. */
1109 arg = next_const_call_expr_arg (&iter);
1110 if (!validate_arg (arg, code))
1111 goto end;
1112 break;
1113 }
1114 }
1115 while (1);
1116
1117 /* We need gotos here since we can only have one VA_CLOSE in a
1118 function. */
1119 end: ;
1120 va_end (ap);
1121
1122 return res;
1123 }
1124
1125 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1126 and the address of the save area. */
1127
1128 static rtx
1129 expand_builtin_nonlocal_goto (tree exp)
1130 {
1131 tree t_label, t_save_area;
1132 rtx r_label, r_save_area, r_fp, r_sp;
1133 rtx_insn *insn;
1134
1135 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1136 return NULL_RTX;
1137
1138 t_label = CALL_EXPR_ARG (exp, 0);
1139 t_save_area = CALL_EXPR_ARG (exp, 1);
1140
1141 r_label = expand_normal (t_label);
1142 r_label = convert_memory_address (Pmode, r_label);
1143 r_save_area = expand_normal (t_save_area);
1144 r_save_area = convert_memory_address (Pmode, r_save_area);
1145 /* Copy the address of the save location to a register just in case it was
1146 based on the frame pointer. */
1147 r_save_area = copy_to_reg (r_save_area);
1148 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1149 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1150 plus_constant (Pmode, r_save_area,
1151 GET_MODE_SIZE (Pmode)));
1152
1153 crtl->has_nonlocal_goto = 1;
1154
1155 #ifdef HAVE_nonlocal_goto
1156 /* ??? We no longer need to pass the static chain value, afaik. */
1157 if (HAVE_nonlocal_goto)
1158 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1159 else
1160 #endif
1161 {
1162 r_label = copy_to_reg (r_label);
1163
1164 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1165 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1166
1167 /* Restore frame pointer for containing function. */
1168 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1169 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1170
1171 /* USE of hard_frame_pointer_rtx added for consistency;
1172 not clear if really needed. */
1173 emit_use (hard_frame_pointer_rtx);
1174 emit_use (stack_pointer_rtx);
1175
1176 /* If the architecture is using a GP register, we must
1177 conservatively assume that the target function makes use of it.
1178 The prologue of functions with nonlocal gotos must therefore
1179 initialize the GP register to the appropriate value, and we
1180 must then make sure that this value is live at the point
1181 of the jump. (Note that this doesn't necessarily apply
1182 to targets with a nonlocal_goto pattern; they are free
1183 to implement it in their own way. Note also that this is
1184 a no-op if the GP register is a global invariant.) */
1185 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1186 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1187 emit_use (pic_offset_table_rtx);
1188
1189 emit_indirect_jump (r_label);
1190 }
1191
1192 /* Search backwards to the jump insn and mark it as a
1193 non-local goto. */
1194 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1195 {
1196 if (JUMP_P (insn))
1197 {
1198 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1199 break;
1200 }
1201 else if (CALL_P (insn))
1202 break;
1203 }
1204
1205 return const0_rtx;
1206 }
1207
1208 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1209 (not all will be used on all machines) that was passed to __builtin_setjmp.
1210 It updates the stack pointer in that block to the current value. This is
1211 also called directly by the SJLJ exception handling code. */
1212
1213 void
1214 expand_builtin_update_setjmp_buf (rtx buf_addr)
1215 {
1216 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1217 rtx stack_save
1218 = gen_rtx_MEM (sa_mode,
1219 memory_address
1220 (sa_mode,
1221 plus_constant (Pmode, buf_addr,
1222 2 * GET_MODE_SIZE (Pmode))));
1223
1224 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1225 }
1226
1227 /* Expand a call to __builtin_prefetch. For a target that does not support
1228 data prefetch, evaluate the memory address argument in case it has side
1229 effects. */
1230
1231 static void
1232 expand_builtin_prefetch (tree exp)
1233 {
1234 tree arg0, arg1, arg2;
1235 int nargs;
1236 rtx op0, op1, op2;
1237
1238 if (!validate_arglist (exp, POINTER_TYPE, 0))
1239 return;
1240
1241 arg0 = CALL_EXPR_ARG (exp, 0);
1242
1243 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1244 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1245 locality). */
1246 nargs = call_expr_nargs (exp);
1247 if (nargs > 1)
1248 arg1 = CALL_EXPR_ARG (exp, 1);
1249 else
1250 arg1 = integer_zero_node;
1251 if (nargs > 2)
1252 arg2 = CALL_EXPR_ARG (exp, 2);
1253 else
1254 arg2 = integer_three_node;
1255
1256 /* Argument 0 is an address. */
1257 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1258
1259 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1260 if (TREE_CODE (arg1) != INTEGER_CST)
1261 {
1262 error ("second argument to %<__builtin_prefetch%> must be a constant");
1263 arg1 = integer_zero_node;
1264 }
1265 op1 = expand_normal (arg1);
1266 /* Argument 1 must be either zero or one. */
1267 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1268 {
1269 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1270 " using zero");
1271 op1 = const0_rtx;
1272 }
1273
1274 /* Argument 2 (locality) must be a compile-time constant int. */
1275 if (TREE_CODE (arg2) != INTEGER_CST)
1276 {
1277 error ("third argument to %<__builtin_prefetch%> must be a constant");
1278 arg2 = integer_zero_node;
1279 }
1280 op2 = expand_normal (arg2);
1281 /* Argument 2 must be 0, 1, 2, or 3. */
1282 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1283 {
1284 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1285 op2 = const0_rtx;
1286 }
1287
1288 #ifdef HAVE_prefetch
1289 if (HAVE_prefetch)
1290 {
1291 struct expand_operand ops[3];
1292
1293 create_address_operand (&ops[0], op0);
1294 create_integer_operand (&ops[1], INTVAL (op1));
1295 create_integer_operand (&ops[2], INTVAL (op2));
1296 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1297 return;
1298 }
1299 #endif
1300
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0) && side_effects_p (op0))
1304 emit_insn (op0);
1305 }
1306
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
1311
1312 static rtx
1313 get_memory_rtx (tree exp, tree len)
1314 {
1315 tree orig_exp = exp;
1316 rtx addr, mem;
1317
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1322
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1325
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1331
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
1348 {
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
1356 set_mem_attributes (mem, exp, 0);
1357 }
1358 set_mem_alias_set (mem, 0);
1359 return mem;
1360 }
1361 \f
1362 /* Built-in functions to perform an untyped call and return. */
1363
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1368
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1371
1372 static int
1373 apply_args_size (void)
1374 {
1375 static int size = -1;
1376 int align;
1377 unsigned int regno;
1378 machine_mode mode;
1379
1380 /* The values computed by this function never change. */
1381 if (size < 0)
1382 {
1383 /* The first value is the incoming arg-pointer. */
1384 size = GET_MODE_SIZE (Pmode);
1385
1386 /* The second value is the structure value address unless this is
1387 passed as an "invisible" first argument. */
1388 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1389 size += GET_MODE_SIZE (Pmode);
1390
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if (FUNCTION_ARG_REGNO_P (regno))
1393 {
1394 mode = targetm.calls.get_raw_arg_mode (regno);
1395
1396 gcc_assert (mode != VOIDmode);
1397
1398 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1399 if (size % align != 0)
1400 size = CEIL (size, align) * align;
1401 size += GET_MODE_SIZE (mode);
1402 apply_args_mode[regno] = mode;
1403 }
1404 else
1405 {
1406 apply_args_mode[regno] = VOIDmode;
1407 }
1408 }
1409 return size;
1410 }
1411
1412 /* Return the size required for the block returned by __builtin_apply,
1413 and initialize apply_result_mode. */
1414
1415 static int
1416 apply_result_size (void)
1417 {
1418 static int size = -1;
1419 int align, regno;
1420 machine_mode mode;
1421
1422 /* The values computed by this function never change. */
1423 if (size < 0)
1424 {
1425 size = 0;
1426
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if (targetm.calls.function_value_regno_p (regno))
1429 {
1430 mode = targetm.calls.get_raw_result_mode (regno);
1431
1432 gcc_assert (mode != VOIDmode);
1433
1434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1435 if (size % align != 0)
1436 size = CEIL (size, align) * align;
1437 size += GET_MODE_SIZE (mode);
1438 apply_result_mode[regno] = mode;
1439 }
1440 else
1441 apply_result_mode[regno] = VOIDmode;
1442
1443 /* Allow targets that use untyped_call and untyped_return to override
1444 the size so that machine-specific information can be stored here. */
1445 #ifdef APPLY_RESULT_SIZE
1446 size = APPLY_RESULT_SIZE;
1447 #endif
1448 }
1449 return size;
1450 }
1451
1452 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1453 /* Create a vector describing the result block RESULT. If SAVEP is true,
1454 the result block is used to save the values; otherwise it is used to
1455 restore the values. */
1456
1457 static rtx
1458 result_vector (int savep, rtx result)
1459 {
1460 int regno, size, align, nelts;
1461 machine_mode mode;
1462 rtx reg, mem;
1463 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1464
1465 size = nelts = 0;
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if ((mode = apply_result_mode[regno]) != VOIDmode)
1468 {
1469 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1470 if (size % align != 0)
1471 size = CEIL (size, align) * align;
1472 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1473 mem = adjust_address (result, mode, size);
1474 savevec[nelts++] = (savep
1475 ? gen_rtx_SET (mem, reg)
1476 : gen_rtx_SET (reg, mem));
1477 size += GET_MODE_SIZE (mode);
1478 }
1479 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1480 }
1481 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1482
1483 /* Save the state required to perform an untyped call with the same
1484 arguments as were passed to the current function. */
1485
1486 static rtx
1487 expand_builtin_apply_args_1 (void)
1488 {
1489 rtx registers, tem;
1490 int size, align, regno;
1491 machine_mode mode;
1492 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1493
1494 /* Create a block where the arg-pointer, structure value address,
1495 and argument registers can be saved. */
1496 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1497
1498 /* Walk past the arg-pointer and structure value address. */
1499 size = GET_MODE_SIZE (Pmode);
1500 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1501 size += GET_MODE_SIZE (Pmode);
1502
1503 /* Save each register used in calling a function to the block. */
1504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1505 if ((mode = apply_args_mode[regno]) != VOIDmode)
1506 {
1507 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1508 if (size % align != 0)
1509 size = CEIL (size, align) * align;
1510
1511 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1512
1513 emit_move_insn (adjust_address (registers, mode, size), tem);
1514 size += GET_MODE_SIZE (mode);
1515 }
1516
1517 /* Save the arg pointer to the block. */
1518 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1519 /* We need the pointer as the caller actually passed them to us, not
1520 as we might have pretended they were passed. Make sure it's a valid
1521 operand, as emit_move_insn isn't expected to handle a PLUS. */
1522 if (STACK_GROWS_DOWNWARD)
1523 tem
1524 = force_operand (plus_constant (Pmode, tem,
1525 crtl->args.pretend_args_size),
1526 NULL_RTX);
1527 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1528
1529 size = GET_MODE_SIZE (Pmode);
1530
1531 /* Save the structure value address unless this is passed as an
1532 "invisible" first argument. */
1533 if (struct_incoming_value)
1534 {
1535 emit_move_insn (adjust_address (registers, Pmode, size),
1536 copy_to_reg (struct_incoming_value));
1537 size += GET_MODE_SIZE (Pmode);
1538 }
1539
1540 /* Return the address of the block. */
1541 return copy_addr_to_reg (XEXP (registers, 0));
1542 }
1543
1544 /* __builtin_apply_args returns block of memory allocated on
1545 the stack into which is stored the arg pointer, structure
1546 value address, static chain, and all the registers that might
1547 possibly be used in performing a function call. The code is
1548 moved to the start of the function so the incoming values are
1549 saved. */
1550
1551 static rtx
1552 expand_builtin_apply_args (void)
1553 {
1554 /* Don't do __builtin_apply_args more than once in a function.
1555 Save the result of the first call and reuse it. */
1556 if (apply_args_value != 0)
1557 return apply_args_value;
1558 {
1559 /* When this function is called, it means that registers must be
1560 saved on entry to this function. So we migrate the
1561 call to the first insn of this function. */
1562 rtx temp;
1563 rtx seq;
1564
1565 start_sequence ();
1566 temp = expand_builtin_apply_args_1 ();
1567 seq = get_insns ();
1568 end_sequence ();
1569
1570 apply_args_value = temp;
1571
1572 /* Put the insns after the NOTE that starts the function.
1573 If this is inside a start_sequence, make the outer-level insn
1574 chain current, so the code is placed at the start of the
1575 function. If internal_arg_pointer is a non-virtual pseudo,
1576 it needs to be placed after the function that initializes
1577 that pseudo. */
1578 push_topmost_sequence ();
1579 if (REG_P (crtl->args.internal_arg_pointer)
1580 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1581 emit_insn_before (seq, parm_birth_insn);
1582 else
1583 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1584 pop_topmost_sequence ();
1585 return temp;
1586 }
1587 }
1588
1589 /* Perform an untyped call and save the state required to perform an
1590 untyped return of whatever value was returned by the given function. */
1591
1592 static rtx
1593 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1594 {
1595 int size, align, regno;
1596 machine_mode mode;
1597 rtx incoming_args, result, reg, dest, src;
1598 rtx_call_insn *call_insn;
1599 rtx old_stack_level = 0;
1600 rtx call_fusage = 0;
1601 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1602
1603 arguments = convert_memory_address (Pmode, arguments);
1604
1605 /* Create a block where the return registers can be saved. */
1606 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1607
1608 /* Fetch the arg pointer from the ARGUMENTS block. */
1609 incoming_args = gen_reg_rtx (Pmode);
1610 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1611 if (!STACK_GROWS_DOWNWARD)
1612 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1613 incoming_args, 0, OPTAB_LIB_WIDEN);
1614
1615 /* Push a new argument block and copy the arguments. Do not allow
1616 the (potential) memcpy call below to interfere with our stack
1617 manipulations. */
1618 do_pending_stack_adjust ();
1619 NO_DEFER_POP;
1620
1621 /* Save the stack with nonlocal if available. */
1622 #ifdef HAVE_save_stack_nonlocal
1623 if (HAVE_save_stack_nonlocal)
1624 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1625 else
1626 #endif
1627 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1628
1629 /* Allocate a block of memory onto the stack and copy the memory
1630 arguments to the outgoing arguments address. We can pass TRUE
1631 as the 4th argument because we just saved the stack pointer
1632 and will restore it right after the call. */
1633 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1634
1635 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1636 may have already set current_function_calls_alloca to true.
1637 current_function_calls_alloca won't be set if argsize is zero,
1638 so we have to guarantee need_drap is true here. */
1639 if (SUPPORTS_STACK_ALIGNMENT)
1640 crtl->need_drap = true;
1641
1642 dest = virtual_outgoing_args_rtx;
1643 if (!STACK_GROWS_DOWNWARD)
1644 {
1645 if (CONST_INT_P (argsize))
1646 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1647 else
1648 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1649 }
1650 dest = gen_rtx_MEM (BLKmode, dest);
1651 set_mem_align (dest, PARM_BOUNDARY);
1652 src = gen_rtx_MEM (BLKmode, incoming_args);
1653 set_mem_align (src, PARM_BOUNDARY);
1654 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1655
1656 /* Refer to the argument block. */
1657 apply_args_size ();
1658 arguments = gen_rtx_MEM (BLKmode, arguments);
1659 set_mem_align (arguments, PARM_BOUNDARY);
1660
1661 /* Walk past the arg-pointer and structure value address. */
1662 size = GET_MODE_SIZE (Pmode);
1663 if (struct_value)
1664 size += GET_MODE_SIZE (Pmode);
1665
1666 /* Restore each of the registers previously saved. Make USE insns
1667 for each of these registers for use in making the call. */
1668 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1669 if ((mode = apply_args_mode[regno]) != VOIDmode)
1670 {
1671 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1672 if (size % align != 0)
1673 size = CEIL (size, align) * align;
1674 reg = gen_rtx_REG (mode, regno);
1675 emit_move_insn (reg, adjust_address (arguments, mode, size));
1676 use_reg (&call_fusage, reg);
1677 size += GET_MODE_SIZE (mode);
1678 }
1679
1680 /* Restore the structure value address unless this is passed as an
1681 "invisible" first argument. */
1682 size = GET_MODE_SIZE (Pmode);
1683 if (struct_value)
1684 {
1685 rtx value = gen_reg_rtx (Pmode);
1686 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1687 emit_move_insn (struct_value, value);
1688 if (REG_P (struct_value))
1689 use_reg (&call_fusage, struct_value);
1690 size += GET_MODE_SIZE (Pmode);
1691 }
1692
1693 /* All arguments and registers used for the call are set up by now! */
1694 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1695
1696 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1697 and we don't want to load it into a register as an optimization,
1698 because prepare_call_address already did it if it should be done. */
1699 if (GET_CODE (function) != SYMBOL_REF)
1700 function = memory_address (FUNCTION_MODE, function);
1701
1702 /* Generate the actual call instruction and save the return value. */
1703 #ifdef HAVE_untyped_call
1704 if (HAVE_untyped_call)
1705 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1706 result, result_vector (1, result)));
1707 else
1708 #endif
1709 #ifdef HAVE_call_value
1710 if (HAVE_call_value)
1711 {
1712 rtx valreg = 0;
1713
1714 /* Locate the unique return register. It is not possible to
1715 express a call that sets more than one return register using
1716 call_value; use untyped_call for that. In fact, untyped_call
1717 only needs to save the return registers in the given block. */
1718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1719 if ((mode = apply_result_mode[regno]) != VOIDmode)
1720 {
1721 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1722
1723 valreg = gen_rtx_REG (mode, regno);
1724 }
1725
1726 emit_call_insn (GEN_CALL_VALUE (valreg,
1727 gen_rtx_MEM (FUNCTION_MODE, function),
1728 const0_rtx, NULL_RTX, const0_rtx));
1729
1730 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1731 }
1732 else
1733 #endif
1734 gcc_unreachable ();
1735
1736 /* Find the CALL insn we just emitted, and attach the register usage
1737 information. */
1738 call_insn = last_call_insn ();
1739 add_function_usage_to (call_insn, call_fusage);
1740
1741 /* Restore the stack. */
1742 #ifdef HAVE_save_stack_nonlocal
1743 if (HAVE_save_stack_nonlocal)
1744 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1745 else
1746 #endif
1747 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1748 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1749
1750 OK_DEFER_POP;
1751
1752 /* Return the address of the result block. */
1753 result = copy_addr_to_reg (XEXP (result, 0));
1754 return convert_memory_address (ptr_mode, result);
1755 }
1756
1757 /* Perform an untyped return. */
1758
1759 static void
1760 expand_builtin_return (rtx result)
1761 {
1762 int size, align, regno;
1763 machine_mode mode;
1764 rtx reg;
1765 rtx_insn *call_fusage = 0;
1766
1767 result = convert_memory_address (Pmode, result);
1768
1769 apply_result_size ();
1770 result = gen_rtx_MEM (BLKmode, result);
1771
1772 #ifdef HAVE_untyped_return
1773 if (HAVE_untyped_return)
1774 {
1775 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1776 emit_barrier ();
1777 return;
1778 }
1779 #endif
1780
1781 /* Restore the return value and note that each value is used. */
1782 size = 0;
1783 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1784 if ((mode = apply_result_mode[regno]) != VOIDmode)
1785 {
1786 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1787 if (size % align != 0)
1788 size = CEIL (size, align) * align;
1789 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1790 emit_move_insn (reg, adjust_address (result, mode, size));
1791
1792 push_to_sequence (call_fusage);
1793 emit_use (reg);
1794 call_fusage = get_insns ();
1795 end_sequence ();
1796 size += GET_MODE_SIZE (mode);
1797 }
1798
1799 /* Put the USE insns before the return. */
1800 emit_insn (call_fusage);
1801
1802 /* Return whatever values was restored by jumping directly to the end
1803 of the function. */
1804 expand_naked_return ();
1805 }
1806
1807 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1808
1809 static enum type_class
1810 type_to_class (tree type)
1811 {
1812 switch (TREE_CODE (type))
1813 {
1814 case VOID_TYPE: return void_type_class;
1815 case INTEGER_TYPE: return integer_type_class;
1816 case ENUMERAL_TYPE: return enumeral_type_class;
1817 case BOOLEAN_TYPE: return boolean_type_class;
1818 case POINTER_TYPE: return pointer_type_class;
1819 case REFERENCE_TYPE: return reference_type_class;
1820 case OFFSET_TYPE: return offset_type_class;
1821 case REAL_TYPE: return real_type_class;
1822 case COMPLEX_TYPE: return complex_type_class;
1823 case FUNCTION_TYPE: return function_type_class;
1824 case METHOD_TYPE: return method_type_class;
1825 case RECORD_TYPE: return record_type_class;
1826 case UNION_TYPE:
1827 case QUAL_UNION_TYPE: return union_type_class;
1828 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1829 ? string_type_class : array_type_class);
1830 case LANG_TYPE: return lang_type_class;
1831 default: return no_type_class;
1832 }
1833 }
1834
1835 /* Expand a call EXP to __builtin_classify_type. */
1836
1837 static rtx
1838 expand_builtin_classify_type (tree exp)
1839 {
1840 if (call_expr_nargs (exp))
1841 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1842 return GEN_INT (no_type_class);
1843 }
1844
1845 /* This helper macro, meant to be used in mathfn_built_in below,
1846 determines which among a set of three builtin math functions is
1847 appropriate for a given type mode. The `F' and `L' cases are
1848 automatically generated from the `double' case. */
1849 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1850 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1851 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1852 fcodel = BUILT_IN_MATHFN##L ; break;
1853 /* Similar to above, but appends _R after any F/L suffix. */
1854 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1855 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1856 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1857 fcodel = BUILT_IN_MATHFN##L_R ; break;
1858
1859 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1860 if available. If IMPLICIT is true use the implicit builtin declaration,
1861 otherwise use the explicit declaration. If we can't do the conversion,
1862 return zero. */
1863
1864 static tree
1865 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1866 {
1867 enum built_in_function fcode, fcodef, fcodel, fcode2;
1868
1869 switch (fn)
1870 {
1871 CASE_MATHFN (BUILT_IN_ACOS)
1872 CASE_MATHFN (BUILT_IN_ACOSH)
1873 CASE_MATHFN (BUILT_IN_ASIN)
1874 CASE_MATHFN (BUILT_IN_ASINH)
1875 CASE_MATHFN (BUILT_IN_ATAN)
1876 CASE_MATHFN (BUILT_IN_ATAN2)
1877 CASE_MATHFN (BUILT_IN_ATANH)
1878 CASE_MATHFN (BUILT_IN_CBRT)
1879 CASE_MATHFN (BUILT_IN_CEIL)
1880 CASE_MATHFN (BUILT_IN_CEXPI)
1881 CASE_MATHFN (BUILT_IN_COPYSIGN)
1882 CASE_MATHFN (BUILT_IN_COS)
1883 CASE_MATHFN (BUILT_IN_COSH)
1884 CASE_MATHFN (BUILT_IN_DREM)
1885 CASE_MATHFN (BUILT_IN_ERF)
1886 CASE_MATHFN (BUILT_IN_ERFC)
1887 CASE_MATHFN (BUILT_IN_EXP)
1888 CASE_MATHFN (BUILT_IN_EXP10)
1889 CASE_MATHFN (BUILT_IN_EXP2)
1890 CASE_MATHFN (BUILT_IN_EXPM1)
1891 CASE_MATHFN (BUILT_IN_FABS)
1892 CASE_MATHFN (BUILT_IN_FDIM)
1893 CASE_MATHFN (BUILT_IN_FLOOR)
1894 CASE_MATHFN (BUILT_IN_FMA)
1895 CASE_MATHFN (BUILT_IN_FMAX)
1896 CASE_MATHFN (BUILT_IN_FMIN)
1897 CASE_MATHFN (BUILT_IN_FMOD)
1898 CASE_MATHFN (BUILT_IN_FREXP)
1899 CASE_MATHFN (BUILT_IN_GAMMA)
1900 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1901 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1902 CASE_MATHFN (BUILT_IN_HYPOT)
1903 CASE_MATHFN (BUILT_IN_ILOGB)
1904 CASE_MATHFN (BUILT_IN_ICEIL)
1905 CASE_MATHFN (BUILT_IN_IFLOOR)
1906 CASE_MATHFN (BUILT_IN_INF)
1907 CASE_MATHFN (BUILT_IN_IRINT)
1908 CASE_MATHFN (BUILT_IN_IROUND)
1909 CASE_MATHFN (BUILT_IN_ISINF)
1910 CASE_MATHFN (BUILT_IN_J0)
1911 CASE_MATHFN (BUILT_IN_J1)
1912 CASE_MATHFN (BUILT_IN_JN)
1913 CASE_MATHFN (BUILT_IN_LCEIL)
1914 CASE_MATHFN (BUILT_IN_LDEXP)
1915 CASE_MATHFN (BUILT_IN_LFLOOR)
1916 CASE_MATHFN (BUILT_IN_LGAMMA)
1917 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1918 CASE_MATHFN (BUILT_IN_LLCEIL)
1919 CASE_MATHFN (BUILT_IN_LLFLOOR)
1920 CASE_MATHFN (BUILT_IN_LLRINT)
1921 CASE_MATHFN (BUILT_IN_LLROUND)
1922 CASE_MATHFN (BUILT_IN_LOG)
1923 CASE_MATHFN (BUILT_IN_LOG10)
1924 CASE_MATHFN (BUILT_IN_LOG1P)
1925 CASE_MATHFN (BUILT_IN_LOG2)
1926 CASE_MATHFN (BUILT_IN_LOGB)
1927 CASE_MATHFN (BUILT_IN_LRINT)
1928 CASE_MATHFN (BUILT_IN_LROUND)
1929 CASE_MATHFN (BUILT_IN_MODF)
1930 CASE_MATHFN (BUILT_IN_NAN)
1931 CASE_MATHFN (BUILT_IN_NANS)
1932 CASE_MATHFN (BUILT_IN_NEARBYINT)
1933 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1934 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1935 CASE_MATHFN (BUILT_IN_POW)
1936 CASE_MATHFN (BUILT_IN_POWI)
1937 CASE_MATHFN (BUILT_IN_POW10)
1938 CASE_MATHFN (BUILT_IN_REMAINDER)
1939 CASE_MATHFN (BUILT_IN_REMQUO)
1940 CASE_MATHFN (BUILT_IN_RINT)
1941 CASE_MATHFN (BUILT_IN_ROUND)
1942 CASE_MATHFN (BUILT_IN_SCALB)
1943 CASE_MATHFN (BUILT_IN_SCALBLN)
1944 CASE_MATHFN (BUILT_IN_SCALBN)
1945 CASE_MATHFN (BUILT_IN_SIGNBIT)
1946 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1947 CASE_MATHFN (BUILT_IN_SIN)
1948 CASE_MATHFN (BUILT_IN_SINCOS)
1949 CASE_MATHFN (BUILT_IN_SINH)
1950 CASE_MATHFN (BUILT_IN_SQRT)
1951 CASE_MATHFN (BUILT_IN_TAN)
1952 CASE_MATHFN (BUILT_IN_TANH)
1953 CASE_MATHFN (BUILT_IN_TGAMMA)
1954 CASE_MATHFN (BUILT_IN_TRUNC)
1955 CASE_MATHFN (BUILT_IN_Y0)
1956 CASE_MATHFN (BUILT_IN_Y1)
1957 CASE_MATHFN (BUILT_IN_YN)
1958
1959 default:
1960 return NULL_TREE;
1961 }
1962
1963 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1964 fcode2 = fcode;
1965 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1966 fcode2 = fcodef;
1967 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1968 fcode2 = fcodel;
1969 else
1970 return NULL_TREE;
1971
1972 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1973 return NULL_TREE;
1974
1975 return builtin_decl_explicit (fcode2);
1976 }
1977
1978 /* Like mathfn_built_in_1(), but always use the implicit array. */
1979
1980 tree
1981 mathfn_built_in (tree type, enum built_in_function fn)
1982 {
1983 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1984 }
1985
1986 /* If errno must be maintained, expand the RTL to check if the result,
1987 TARGET, of a built-in function call, EXP, is NaN, and if so set
1988 errno to EDOM. */
1989
1990 static void
1991 expand_errno_check (tree exp, rtx target)
1992 {
1993 rtx_code_label *lab = gen_label_rtx ();
1994
1995 /* Test the result; if it is NaN, set errno=EDOM because
1996 the argument was not in the domain. */
1997 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1998 NULL_RTX, NULL, lab,
1999 /* The jump is very likely. */
2000 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2001
2002 #ifdef TARGET_EDOM
2003 /* If this built-in doesn't throw an exception, set errno directly. */
2004 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2005 {
2006 #ifdef GEN_ERRNO_RTX
2007 rtx errno_rtx = GEN_ERRNO_RTX;
2008 #else
2009 rtx errno_rtx
2010 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2011 #endif
2012 emit_move_insn (errno_rtx,
2013 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2014 emit_label (lab);
2015 return;
2016 }
2017 #endif
2018
2019 /* Make sure the library call isn't expanded as a tail call. */
2020 CALL_EXPR_TAILCALL (exp) = 0;
2021
2022 /* We can't set errno=EDOM directly; let the library call do it.
2023 Pop the arguments right away in case the call gets deleted. */
2024 NO_DEFER_POP;
2025 expand_call (exp, target, 0);
2026 OK_DEFER_POP;
2027 emit_label (lab);
2028 }
2029
2030 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2031 Return NULL_RTX if a normal call should be emitted rather than expanding
2032 the function in-line. EXP is the expression that is a call to the builtin
2033 function; if convenient, the result should be placed in TARGET.
2034 SUBTARGET may be used as the target for computing one of EXP's operands. */
2035
2036 static rtx
2037 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2038 {
2039 optab builtin_optab;
2040 rtx op0;
2041 rtx_insn *insns;
2042 tree fndecl = get_callee_fndecl (exp);
2043 machine_mode mode;
2044 bool errno_set = false;
2045 bool try_widening = false;
2046 tree arg;
2047
2048 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2049 return NULL_RTX;
2050
2051 arg = CALL_EXPR_ARG (exp, 0);
2052
2053 switch (DECL_FUNCTION_CODE (fndecl))
2054 {
2055 CASE_FLT_FN (BUILT_IN_SQRT):
2056 errno_set = ! tree_expr_nonnegative_p (arg);
2057 try_widening = true;
2058 builtin_optab = sqrt_optab;
2059 break;
2060 CASE_FLT_FN (BUILT_IN_EXP):
2061 errno_set = true; builtin_optab = exp_optab; break;
2062 CASE_FLT_FN (BUILT_IN_EXP10):
2063 CASE_FLT_FN (BUILT_IN_POW10):
2064 errno_set = true; builtin_optab = exp10_optab; break;
2065 CASE_FLT_FN (BUILT_IN_EXP2):
2066 errno_set = true; builtin_optab = exp2_optab; break;
2067 CASE_FLT_FN (BUILT_IN_EXPM1):
2068 errno_set = true; builtin_optab = expm1_optab; break;
2069 CASE_FLT_FN (BUILT_IN_LOGB):
2070 errno_set = true; builtin_optab = logb_optab; break;
2071 CASE_FLT_FN (BUILT_IN_LOG):
2072 errno_set = true; builtin_optab = log_optab; break;
2073 CASE_FLT_FN (BUILT_IN_LOG10):
2074 errno_set = true; builtin_optab = log10_optab; break;
2075 CASE_FLT_FN (BUILT_IN_LOG2):
2076 errno_set = true; builtin_optab = log2_optab; break;
2077 CASE_FLT_FN (BUILT_IN_LOG1P):
2078 errno_set = true; builtin_optab = log1p_optab; break;
2079 CASE_FLT_FN (BUILT_IN_ASIN):
2080 builtin_optab = asin_optab; break;
2081 CASE_FLT_FN (BUILT_IN_ACOS):
2082 builtin_optab = acos_optab; break;
2083 CASE_FLT_FN (BUILT_IN_TAN):
2084 builtin_optab = tan_optab; break;
2085 CASE_FLT_FN (BUILT_IN_ATAN):
2086 builtin_optab = atan_optab; break;
2087 CASE_FLT_FN (BUILT_IN_FLOOR):
2088 builtin_optab = floor_optab; break;
2089 CASE_FLT_FN (BUILT_IN_CEIL):
2090 builtin_optab = ceil_optab; break;
2091 CASE_FLT_FN (BUILT_IN_TRUNC):
2092 builtin_optab = btrunc_optab; break;
2093 CASE_FLT_FN (BUILT_IN_ROUND):
2094 builtin_optab = round_optab; break;
2095 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2096 builtin_optab = nearbyint_optab;
2097 if (flag_trapping_math)
2098 break;
2099 /* Else fallthrough and expand as rint. */
2100 CASE_FLT_FN (BUILT_IN_RINT):
2101 builtin_optab = rint_optab; break;
2102 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2103 builtin_optab = significand_optab; break;
2104 default:
2105 gcc_unreachable ();
2106 }
2107
2108 /* Make a suitable register to place result in. */
2109 mode = TYPE_MODE (TREE_TYPE (exp));
2110
2111 if (! flag_errno_math || ! HONOR_NANS (mode))
2112 errno_set = false;
2113
2114 /* Before working hard, check whether the instruction is available, but try
2115 to widen the mode for specific operations. */
2116 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2117 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2118 && (!errno_set || !optimize_insn_for_size_p ()))
2119 {
2120 rtx result = gen_reg_rtx (mode);
2121
2122 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2123 need to expand the argument again. This way, we will not perform
2124 side-effects more the once. */
2125 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2126
2127 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2128
2129 start_sequence ();
2130
2131 /* Compute into RESULT.
2132 Set RESULT to wherever the result comes back. */
2133 result = expand_unop (mode, builtin_optab, op0, result, 0);
2134
2135 if (result != 0)
2136 {
2137 if (errno_set)
2138 expand_errno_check (exp, result);
2139
2140 /* Output the entire sequence. */
2141 insns = get_insns ();
2142 end_sequence ();
2143 emit_insn (insns);
2144 return result;
2145 }
2146
2147 /* If we were unable to expand via the builtin, stop the sequence
2148 (without outputting the insns) and call to the library function
2149 with the stabilized argument list. */
2150 end_sequence ();
2151 }
2152
2153 return expand_call (exp, target, target == const0_rtx);
2154 }
2155
2156 /* Expand a call to the builtin binary math functions (pow and atan2).
2157 Return NULL_RTX if a normal call should be emitted rather than expanding the
2158 function in-line. EXP is the expression that is a call to the builtin
2159 function; if convenient, the result should be placed in TARGET.
2160 SUBTARGET may be used as the target for computing one of EXP's
2161 operands. */
2162
2163 static rtx
2164 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2165 {
2166 optab builtin_optab;
2167 rtx op0, op1, result;
2168 rtx_insn *insns;
2169 int op1_type = REAL_TYPE;
2170 tree fndecl = get_callee_fndecl (exp);
2171 tree arg0, arg1;
2172 machine_mode mode;
2173 bool errno_set = true;
2174
2175 switch (DECL_FUNCTION_CODE (fndecl))
2176 {
2177 CASE_FLT_FN (BUILT_IN_SCALBN):
2178 CASE_FLT_FN (BUILT_IN_SCALBLN):
2179 CASE_FLT_FN (BUILT_IN_LDEXP):
2180 op1_type = INTEGER_TYPE;
2181 default:
2182 break;
2183 }
2184
2185 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2186 return NULL_RTX;
2187
2188 arg0 = CALL_EXPR_ARG (exp, 0);
2189 arg1 = CALL_EXPR_ARG (exp, 1);
2190
2191 switch (DECL_FUNCTION_CODE (fndecl))
2192 {
2193 CASE_FLT_FN (BUILT_IN_POW):
2194 builtin_optab = pow_optab; break;
2195 CASE_FLT_FN (BUILT_IN_ATAN2):
2196 builtin_optab = atan2_optab; break;
2197 CASE_FLT_FN (BUILT_IN_SCALB):
2198 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2199 return 0;
2200 builtin_optab = scalb_optab; break;
2201 CASE_FLT_FN (BUILT_IN_SCALBN):
2202 CASE_FLT_FN (BUILT_IN_SCALBLN):
2203 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2204 return 0;
2205 /* Fall through... */
2206 CASE_FLT_FN (BUILT_IN_LDEXP):
2207 builtin_optab = ldexp_optab; break;
2208 CASE_FLT_FN (BUILT_IN_FMOD):
2209 builtin_optab = fmod_optab; break;
2210 CASE_FLT_FN (BUILT_IN_REMAINDER):
2211 CASE_FLT_FN (BUILT_IN_DREM):
2212 builtin_optab = remainder_optab; break;
2213 default:
2214 gcc_unreachable ();
2215 }
2216
2217 /* Make a suitable register to place result in. */
2218 mode = TYPE_MODE (TREE_TYPE (exp));
2219
2220 /* Before working hard, check whether the instruction is available. */
2221 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2222 return NULL_RTX;
2223
2224 result = gen_reg_rtx (mode);
2225
2226 if (! flag_errno_math || ! HONOR_NANS (mode))
2227 errno_set = false;
2228
2229 if (errno_set && optimize_insn_for_size_p ())
2230 return 0;
2231
2232 /* Always stabilize the argument list. */
2233 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2234 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2235
2236 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2237 op1 = expand_normal (arg1);
2238
2239 start_sequence ();
2240
2241 /* Compute into RESULT.
2242 Set RESULT to wherever the result comes back. */
2243 result = expand_binop (mode, builtin_optab, op0, op1,
2244 result, 0, OPTAB_DIRECT);
2245
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2249 if (result == 0)
2250 {
2251 end_sequence ();
2252 return expand_call (exp, target, target == const0_rtx);
2253 }
2254
2255 if (errno_set)
2256 expand_errno_check (exp, result);
2257
2258 /* Output the entire sequence. */
2259 insns = get_insns ();
2260 end_sequence ();
2261 emit_insn (insns);
2262
2263 return result;
2264 }
2265
2266 /* Expand a call to the builtin trinary math functions (fma).
2267 Return NULL_RTX if a normal call should be emitted rather than expanding the
2268 function in-line. EXP is the expression that is a call to the builtin
2269 function; if convenient, the result should be placed in TARGET.
2270 SUBTARGET may be used as the target for computing one of EXP's
2271 operands. */
2272
2273 static rtx
2274 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2275 {
2276 optab builtin_optab;
2277 rtx op0, op1, op2, result;
2278 rtx_insn *insns;
2279 tree fndecl = get_callee_fndecl (exp);
2280 tree arg0, arg1, arg2;
2281 machine_mode mode;
2282
2283 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2284 return NULL_RTX;
2285
2286 arg0 = CALL_EXPR_ARG (exp, 0);
2287 arg1 = CALL_EXPR_ARG (exp, 1);
2288 arg2 = CALL_EXPR_ARG (exp, 2);
2289
2290 switch (DECL_FUNCTION_CODE (fndecl))
2291 {
2292 CASE_FLT_FN (BUILT_IN_FMA):
2293 builtin_optab = fma_optab; break;
2294 default:
2295 gcc_unreachable ();
2296 }
2297
2298 /* Make a suitable register to place result in. */
2299 mode = TYPE_MODE (TREE_TYPE (exp));
2300
2301 /* Before working hard, check whether the instruction is available. */
2302 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2303 return NULL_RTX;
2304
2305 result = gen_reg_rtx (mode);
2306
2307 /* Always stabilize the argument list. */
2308 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2309 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2310 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2311
2312 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2313 op1 = expand_normal (arg1);
2314 op2 = expand_normal (arg2);
2315
2316 start_sequence ();
2317
2318 /* Compute into RESULT.
2319 Set RESULT to wherever the result comes back. */
2320 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2321 result, 0);
2322
2323 /* If we were unable to expand via the builtin, stop the sequence
2324 (without outputting the insns) and call to the library function
2325 with the stabilized argument list. */
2326 if (result == 0)
2327 {
2328 end_sequence ();
2329 return expand_call (exp, target, target == const0_rtx);
2330 }
2331
2332 /* Output the entire sequence. */
2333 insns = get_insns ();
2334 end_sequence ();
2335 emit_insn (insns);
2336
2337 return result;
2338 }
2339
2340 /* Expand a call to the builtin sin and cos math functions.
2341 Return NULL_RTX if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2343 function; if convenient, the result should be placed in TARGET.
2344 SUBTARGET may be used as the target for computing one of EXP's
2345 operands. */
2346
2347 static rtx
2348 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2349 {
2350 optab builtin_optab;
2351 rtx op0;
2352 rtx_insn *insns;
2353 tree fndecl = get_callee_fndecl (exp);
2354 machine_mode mode;
2355 tree arg;
2356
2357 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2358 return NULL_RTX;
2359
2360 arg = CALL_EXPR_ARG (exp, 0);
2361
2362 switch (DECL_FUNCTION_CODE (fndecl))
2363 {
2364 CASE_FLT_FN (BUILT_IN_SIN):
2365 CASE_FLT_FN (BUILT_IN_COS):
2366 builtin_optab = sincos_optab; break;
2367 default:
2368 gcc_unreachable ();
2369 }
2370
2371 /* Make a suitable register to place result in. */
2372 mode = TYPE_MODE (TREE_TYPE (exp));
2373
2374 /* Check if sincos insn is available, otherwise fallback
2375 to sin or cos insn. */
2376 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2377 switch (DECL_FUNCTION_CODE (fndecl))
2378 {
2379 CASE_FLT_FN (BUILT_IN_SIN):
2380 builtin_optab = sin_optab; break;
2381 CASE_FLT_FN (BUILT_IN_COS):
2382 builtin_optab = cos_optab; break;
2383 default:
2384 gcc_unreachable ();
2385 }
2386
2387 /* Before working hard, check whether the instruction is available. */
2388 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2389 {
2390 rtx result = gen_reg_rtx (mode);
2391
2392 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2393 need to expand the argument again. This way, we will not perform
2394 side-effects more the once. */
2395 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2396
2397 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2398
2399 start_sequence ();
2400
2401 /* Compute into RESULT.
2402 Set RESULT to wherever the result comes back. */
2403 if (builtin_optab == sincos_optab)
2404 {
2405 int ok;
2406
2407 switch (DECL_FUNCTION_CODE (fndecl))
2408 {
2409 CASE_FLT_FN (BUILT_IN_SIN):
2410 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2411 break;
2412 CASE_FLT_FN (BUILT_IN_COS):
2413 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2414 break;
2415 default:
2416 gcc_unreachable ();
2417 }
2418 gcc_assert (ok);
2419 }
2420 else
2421 result = expand_unop (mode, builtin_optab, op0, result, 0);
2422
2423 if (result != 0)
2424 {
2425 /* Output the entire sequence. */
2426 insns = get_insns ();
2427 end_sequence ();
2428 emit_insn (insns);
2429 return result;
2430 }
2431
2432 /* If we were unable to expand via the builtin, stop the sequence
2433 (without outputting the insns) and call to the library function
2434 with the stabilized argument list. */
2435 end_sequence ();
2436 }
2437
2438 return expand_call (exp, target, target == const0_rtx);
2439 }
2440
2441 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2442 return an RTL instruction code that implements the functionality.
2443 If that isn't possible or available return CODE_FOR_nothing. */
2444
2445 static enum insn_code
2446 interclass_mathfn_icode (tree arg, tree fndecl)
2447 {
2448 bool errno_set = false;
2449 optab builtin_optab = unknown_optab;
2450 machine_mode mode;
2451
2452 switch (DECL_FUNCTION_CODE (fndecl))
2453 {
2454 CASE_FLT_FN (BUILT_IN_ILOGB):
2455 errno_set = true; builtin_optab = ilogb_optab; break;
2456 CASE_FLT_FN (BUILT_IN_ISINF):
2457 builtin_optab = isinf_optab; break;
2458 case BUILT_IN_ISNORMAL:
2459 case BUILT_IN_ISFINITE:
2460 CASE_FLT_FN (BUILT_IN_FINITE):
2461 case BUILT_IN_FINITED32:
2462 case BUILT_IN_FINITED64:
2463 case BUILT_IN_FINITED128:
2464 case BUILT_IN_ISINFD32:
2465 case BUILT_IN_ISINFD64:
2466 case BUILT_IN_ISINFD128:
2467 /* These builtins have no optabs (yet). */
2468 break;
2469 default:
2470 gcc_unreachable ();
2471 }
2472
2473 /* There's no easy way to detect the case we need to set EDOM. */
2474 if (flag_errno_math && errno_set)
2475 return CODE_FOR_nothing;
2476
2477 /* Optab mode depends on the mode of the input argument. */
2478 mode = TYPE_MODE (TREE_TYPE (arg));
2479
2480 if (builtin_optab)
2481 return optab_handler (builtin_optab, mode);
2482 return CODE_FOR_nothing;
2483 }
2484
2485 /* Expand a call to one of the builtin math functions that operate on
2486 floating point argument and output an integer result (ilogb, isinf,
2487 isnan, etc).
2488 Return 0 if a normal call should be emitted rather than expanding the
2489 function in-line. EXP is the expression that is a call to the builtin
2490 function; if convenient, the result should be placed in TARGET. */
2491
2492 static rtx
2493 expand_builtin_interclass_mathfn (tree exp, rtx target)
2494 {
2495 enum insn_code icode = CODE_FOR_nothing;
2496 rtx op0;
2497 tree fndecl = get_callee_fndecl (exp);
2498 machine_mode mode;
2499 tree arg;
2500
2501 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2502 return NULL_RTX;
2503
2504 arg = CALL_EXPR_ARG (exp, 0);
2505 icode = interclass_mathfn_icode (arg, fndecl);
2506 mode = TYPE_MODE (TREE_TYPE (arg));
2507
2508 if (icode != CODE_FOR_nothing)
2509 {
2510 struct expand_operand ops[1];
2511 rtx_insn *last = get_last_insn ();
2512 tree orig_arg = arg;
2513
2514 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2515 need to expand the argument again. This way, we will not perform
2516 side-effects more the once. */
2517 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2518
2519 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2520
2521 if (mode != GET_MODE (op0))
2522 op0 = convert_to_mode (mode, op0, 0);
2523
2524 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2525 if (maybe_legitimize_operands (icode, 0, 1, ops)
2526 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2527 return ops[0].value;
2528
2529 delete_insns_since (last);
2530 CALL_EXPR_ARG (exp, 0) = orig_arg;
2531 }
2532
2533 return NULL_RTX;
2534 }
2535
2536 /* Expand a call to the builtin sincos math function.
2537 Return NULL_RTX if a normal call should be emitted rather than expanding the
2538 function in-line. EXP is the expression that is a call to the builtin
2539 function. */
2540
2541 static rtx
2542 expand_builtin_sincos (tree exp)
2543 {
2544 rtx op0, op1, op2, target1, target2;
2545 machine_mode mode;
2546 tree arg, sinp, cosp;
2547 int result;
2548 location_t loc = EXPR_LOCATION (exp);
2549 tree alias_type, alias_off;
2550
2551 if (!validate_arglist (exp, REAL_TYPE,
2552 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2553 return NULL_RTX;
2554
2555 arg = CALL_EXPR_ARG (exp, 0);
2556 sinp = CALL_EXPR_ARG (exp, 1);
2557 cosp = CALL_EXPR_ARG (exp, 2);
2558
2559 /* Make a suitable register to place result in. */
2560 mode = TYPE_MODE (TREE_TYPE (arg));
2561
2562 /* Check if sincos insn is available, otherwise emit the call. */
2563 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2564 return NULL_RTX;
2565
2566 target1 = gen_reg_rtx (mode);
2567 target2 = gen_reg_rtx (mode);
2568
2569 op0 = expand_normal (arg);
2570 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2571 alias_off = build_int_cst (alias_type, 0);
2572 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2573 sinp, alias_off));
2574 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2575 cosp, alias_off));
2576
2577 /* Compute into target1 and target2.
2578 Set TARGET to wherever the result comes back. */
2579 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2580 gcc_assert (result);
2581
2582 /* Move target1 and target2 to the memory locations indicated
2583 by op1 and op2. */
2584 emit_move_insn (op1, target1);
2585 emit_move_insn (op2, target2);
2586
2587 return const0_rtx;
2588 }
2589
2590 /* Expand a call to the internal cexpi builtin to the sincos math function.
2591 EXP is the expression that is a call to the builtin function; if convenient,
2592 the result should be placed in TARGET. */
2593
2594 static rtx
2595 expand_builtin_cexpi (tree exp, rtx target)
2596 {
2597 tree fndecl = get_callee_fndecl (exp);
2598 tree arg, type;
2599 machine_mode mode;
2600 rtx op0, op1, op2;
2601 location_t loc = EXPR_LOCATION (exp);
2602
2603 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2604 return NULL_RTX;
2605
2606 arg = CALL_EXPR_ARG (exp, 0);
2607 type = TREE_TYPE (arg);
2608 mode = TYPE_MODE (TREE_TYPE (arg));
2609
2610 /* Try expanding via a sincos optab, fall back to emitting a libcall
2611 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2612 is only generated from sincos, cexp or if we have either of them. */
2613 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2614 {
2615 op1 = gen_reg_rtx (mode);
2616 op2 = gen_reg_rtx (mode);
2617
2618 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2619
2620 /* Compute into op1 and op2. */
2621 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2622 }
2623 else if (targetm.libc_has_function (function_sincos))
2624 {
2625 tree call, fn = NULL_TREE;
2626 tree top1, top2;
2627 rtx op1a, op2a;
2628
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2635 else
2636 gcc_unreachable ();
2637
2638 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2639 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2640 op1a = copy_addr_to_reg (XEXP (op1, 0));
2641 op2a = copy_addr_to_reg (XEXP (op2, 0));
2642 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2643 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2644
2645 /* Make sure not to fold the sincos call again. */
2646 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2647 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2648 call, 3, arg, top1, top2));
2649 }
2650 else
2651 {
2652 tree call, fn = NULL_TREE, narg;
2653 tree ctype = build_complex_type (type);
2654
2655 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2656 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2658 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2659 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2660 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2661 else
2662 gcc_unreachable ();
2663
2664 /* If we don't have a decl for cexp create one. This is the
2665 friendliest fallback if the user calls __builtin_cexpi
2666 without full target C99 function support. */
2667 if (fn == NULL_TREE)
2668 {
2669 tree fntype;
2670 const char *name = NULL;
2671
2672 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2673 name = "cexpf";
2674 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2675 name = "cexp";
2676 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2677 name = "cexpl";
2678
2679 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2680 fn = build_fn_decl (name, fntype);
2681 }
2682
2683 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2684 build_real (type, dconst0), arg);
2685
2686 /* Make sure not to fold the cexp call again. */
2687 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2688 return expand_expr (build_call_nary (ctype, call, 1, narg),
2689 target, VOIDmode, EXPAND_NORMAL);
2690 }
2691
2692 /* Now build the proper return type. */
2693 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2694 make_tree (TREE_TYPE (arg), op2),
2695 make_tree (TREE_TYPE (arg), op1)),
2696 target, VOIDmode, EXPAND_NORMAL);
2697 }
2698
2699 /* Conveniently construct a function call expression. FNDECL names the
2700 function to be called, N is the number of arguments, and the "..."
2701 parameters are the argument expressions. Unlike build_call_exr
2702 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2703
2704 static tree
2705 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2706 {
2707 va_list ap;
2708 tree fntype = TREE_TYPE (fndecl);
2709 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2710
2711 va_start (ap, n);
2712 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2713 va_end (ap);
2714 SET_EXPR_LOCATION (fn, loc);
2715 return fn;
2716 }
2717
2718 /* Expand a call to one of the builtin rounding functions gcc defines
2719 as an extension (lfloor and lceil). As these are gcc extensions we
2720 do not need to worry about setting errno to EDOM.
2721 If expanding via optab fails, lower expression to (int)(floor(x)).
2722 EXP is the expression that is a call to the builtin function;
2723 if convenient, the result should be placed in TARGET. */
2724
2725 static rtx
2726 expand_builtin_int_roundingfn (tree exp, rtx target)
2727 {
2728 convert_optab builtin_optab;
2729 rtx op0, tmp;
2730 rtx_insn *insns;
2731 tree fndecl = get_callee_fndecl (exp);
2732 enum built_in_function fallback_fn;
2733 tree fallback_fndecl;
2734 machine_mode mode;
2735 tree arg;
2736
2737 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2738 gcc_unreachable ();
2739
2740 arg = CALL_EXPR_ARG (exp, 0);
2741
2742 switch (DECL_FUNCTION_CODE (fndecl))
2743 {
2744 CASE_FLT_FN (BUILT_IN_ICEIL):
2745 CASE_FLT_FN (BUILT_IN_LCEIL):
2746 CASE_FLT_FN (BUILT_IN_LLCEIL):
2747 builtin_optab = lceil_optab;
2748 fallback_fn = BUILT_IN_CEIL;
2749 break;
2750
2751 CASE_FLT_FN (BUILT_IN_IFLOOR):
2752 CASE_FLT_FN (BUILT_IN_LFLOOR):
2753 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2754 builtin_optab = lfloor_optab;
2755 fallback_fn = BUILT_IN_FLOOR;
2756 break;
2757
2758 default:
2759 gcc_unreachable ();
2760 }
2761
2762 /* Make a suitable register to place result in. */
2763 mode = TYPE_MODE (TREE_TYPE (exp));
2764
2765 target = gen_reg_rtx (mode);
2766
2767 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2768 need to expand the argument again. This way, we will not perform
2769 side-effects more the once. */
2770 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2771
2772 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2773
2774 start_sequence ();
2775
2776 /* Compute into TARGET. */
2777 if (expand_sfix_optab (target, op0, builtin_optab))
2778 {
2779 /* Output the entire sequence. */
2780 insns = get_insns ();
2781 end_sequence ();
2782 emit_insn (insns);
2783 return target;
2784 }
2785
2786 /* If we were unable to expand via the builtin, stop the sequence
2787 (without outputting the insns). */
2788 end_sequence ();
2789
2790 /* Fall back to floating point rounding optab. */
2791 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2792
2793 /* For non-C99 targets we may end up without a fallback fndecl here
2794 if the user called __builtin_lfloor directly. In this case emit
2795 a call to the floor/ceil variants nevertheless. This should result
2796 in the best user experience for not full C99 targets. */
2797 if (fallback_fndecl == NULL_TREE)
2798 {
2799 tree fntype;
2800 const char *name = NULL;
2801
2802 switch (DECL_FUNCTION_CODE (fndecl))
2803 {
2804 case BUILT_IN_ICEIL:
2805 case BUILT_IN_LCEIL:
2806 case BUILT_IN_LLCEIL:
2807 name = "ceil";
2808 break;
2809 case BUILT_IN_ICEILF:
2810 case BUILT_IN_LCEILF:
2811 case BUILT_IN_LLCEILF:
2812 name = "ceilf";
2813 break;
2814 case BUILT_IN_ICEILL:
2815 case BUILT_IN_LCEILL:
2816 case BUILT_IN_LLCEILL:
2817 name = "ceill";
2818 break;
2819 case BUILT_IN_IFLOOR:
2820 case BUILT_IN_LFLOOR:
2821 case BUILT_IN_LLFLOOR:
2822 name = "floor";
2823 break;
2824 case BUILT_IN_IFLOORF:
2825 case BUILT_IN_LFLOORF:
2826 case BUILT_IN_LLFLOORF:
2827 name = "floorf";
2828 break;
2829 case BUILT_IN_IFLOORL:
2830 case BUILT_IN_LFLOORL:
2831 case BUILT_IN_LLFLOORL:
2832 name = "floorl";
2833 break;
2834 default:
2835 gcc_unreachable ();
2836 }
2837
2838 fntype = build_function_type_list (TREE_TYPE (arg),
2839 TREE_TYPE (arg), NULL_TREE);
2840 fallback_fndecl = build_fn_decl (name, fntype);
2841 }
2842
2843 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2844
2845 tmp = expand_normal (exp);
2846 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2847
2848 /* Truncate the result of floating point optab to integer
2849 via expand_fix (). */
2850 target = gen_reg_rtx (mode);
2851 expand_fix (target, tmp, 0);
2852
2853 return target;
2854 }
2855
2856 /* Expand a call to one of the builtin math functions doing integer
2857 conversion (lrint).
2858 Return 0 if a normal call should be emitted rather than expanding the
2859 function in-line. EXP is the expression that is a call to the builtin
2860 function; if convenient, the result should be placed in TARGET. */
2861
2862 static rtx
2863 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2864 {
2865 convert_optab builtin_optab;
2866 rtx op0;
2867 rtx_insn *insns;
2868 tree fndecl = get_callee_fndecl (exp);
2869 tree arg;
2870 machine_mode mode;
2871 enum built_in_function fallback_fn = BUILT_IN_NONE;
2872
2873 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2874 gcc_unreachable ();
2875
2876 arg = CALL_EXPR_ARG (exp, 0);
2877
2878 switch (DECL_FUNCTION_CODE (fndecl))
2879 {
2880 CASE_FLT_FN (BUILT_IN_IRINT):
2881 fallback_fn = BUILT_IN_LRINT;
2882 /* FALLTHRU */
2883 CASE_FLT_FN (BUILT_IN_LRINT):
2884 CASE_FLT_FN (BUILT_IN_LLRINT):
2885 builtin_optab = lrint_optab;
2886 break;
2887
2888 CASE_FLT_FN (BUILT_IN_IROUND):
2889 fallback_fn = BUILT_IN_LROUND;
2890 /* FALLTHRU */
2891 CASE_FLT_FN (BUILT_IN_LROUND):
2892 CASE_FLT_FN (BUILT_IN_LLROUND):
2893 builtin_optab = lround_optab;
2894 break;
2895
2896 default:
2897 gcc_unreachable ();
2898 }
2899
2900 /* There's no easy way to detect the case we need to set EDOM. */
2901 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2902 return NULL_RTX;
2903
2904 /* Make a suitable register to place result in. */
2905 mode = TYPE_MODE (TREE_TYPE (exp));
2906
2907 /* There's no easy way to detect the case we need to set EDOM. */
2908 if (!flag_errno_math)
2909 {
2910 rtx result = gen_reg_rtx (mode);
2911
2912 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2913 need to expand the argument again. This way, we will not perform
2914 side-effects more the once. */
2915 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2916
2917 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2918
2919 start_sequence ();
2920
2921 if (expand_sfix_optab (result, op0, builtin_optab))
2922 {
2923 /* Output the entire sequence. */
2924 insns = get_insns ();
2925 end_sequence ();
2926 emit_insn (insns);
2927 return result;
2928 }
2929
2930 /* If we were unable to expand via the builtin, stop the sequence
2931 (without outputting the insns) and call to the library function
2932 with the stabilized argument list. */
2933 end_sequence ();
2934 }
2935
2936 if (fallback_fn != BUILT_IN_NONE)
2937 {
2938 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2939 targets, (int) round (x) should never be transformed into
2940 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2941 a call to lround in the hope that the target provides at least some
2942 C99 functions. This should result in the best user experience for
2943 not full C99 targets. */
2944 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2945 fallback_fn, 0);
2946
2947 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2948 fallback_fndecl, 1, arg);
2949
2950 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2951 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2952 return convert_to_mode (mode, target, 0);
2953 }
2954
2955 return expand_call (exp, target, target == const0_rtx);
2956 }
2957
2958 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2959 a normal call should be emitted rather than expanding the function
2960 in-line. EXP is the expression that is a call to the builtin
2961 function; if convenient, the result should be placed in TARGET. */
2962
2963 static rtx
2964 expand_builtin_powi (tree exp, rtx target)
2965 {
2966 tree arg0, arg1;
2967 rtx op0, op1;
2968 machine_mode mode;
2969 machine_mode mode2;
2970
2971 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2973
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2976 mode = TYPE_MODE (TREE_TYPE (exp));
2977
2978 /* Emit a libcall to libgcc. */
2979
2980 /* Mode of the 2nd argument must match that of an int. */
2981 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2982
2983 if (target == NULL_RTX)
2984 target = gen_reg_rtx (mode);
2985
2986 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2987 if (GET_MODE (op0) != mode)
2988 op0 = convert_to_mode (mode, op0, 0);
2989 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2990 if (GET_MODE (op1) != mode2)
2991 op1 = convert_to_mode (mode2, op1, 0);
2992
2993 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2994 target, LCT_CONST, mode, 2,
2995 op0, mode, op1, mode2);
2996
2997 return target;
2998 }
2999
3000 /* Expand expression EXP which is a call to the strlen builtin. Return
3001 NULL_RTX if we failed the caller should emit a normal call, otherwise
3002 try to get the result in TARGET, if convenient. */
3003
3004 static rtx
3005 expand_builtin_strlen (tree exp, rtx target,
3006 machine_mode target_mode)
3007 {
3008 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3009 return NULL_RTX;
3010 else
3011 {
3012 struct expand_operand ops[4];
3013 rtx pat;
3014 tree len;
3015 tree src = CALL_EXPR_ARG (exp, 0);
3016 rtx src_reg;
3017 rtx_insn *before_strlen;
3018 machine_mode insn_mode = target_mode;
3019 enum insn_code icode = CODE_FOR_nothing;
3020 unsigned int align;
3021
3022 /* If the length can be computed at compile-time, return it. */
3023 len = c_strlen (src, 0);
3024 if (len)
3025 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3026
3027 /* If the length can be computed at compile-time and is constant
3028 integer, but there are side-effects in src, evaluate
3029 src for side-effects, then return len.
3030 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3031 can be optimized into: i++; x = 3; */
3032 len = c_strlen (src, 1);
3033 if (len && TREE_CODE (len) == INTEGER_CST)
3034 {
3035 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3036 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3037 }
3038
3039 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3040
3041 /* If SRC is not a pointer type, don't do this operation inline. */
3042 if (align == 0)
3043 return NULL_RTX;
3044
3045 /* Bail out if we can't compute strlen in the right mode. */
3046 while (insn_mode != VOIDmode)
3047 {
3048 icode = optab_handler (strlen_optab, insn_mode);
3049 if (icode != CODE_FOR_nothing)
3050 break;
3051
3052 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3053 }
3054 if (insn_mode == VOIDmode)
3055 return NULL_RTX;
3056
3057 /* Make a place to hold the source address. We will not expand
3058 the actual source until we are sure that the expansion will
3059 not fail -- there are trees that cannot be expanded twice. */
3060 src_reg = gen_reg_rtx (Pmode);
3061
3062 /* Mark the beginning of the strlen sequence so we can emit the
3063 source operand later. */
3064 before_strlen = get_last_insn ();
3065
3066 create_output_operand (&ops[0], target, insn_mode);
3067 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3068 create_integer_operand (&ops[2], 0);
3069 create_integer_operand (&ops[3], align);
3070 if (!maybe_expand_insn (icode, 4, ops))
3071 return NULL_RTX;
3072
3073 /* Now that we are assured of success, expand the source. */
3074 start_sequence ();
3075 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3076 if (pat != src_reg)
3077 {
3078 #ifdef POINTERS_EXTEND_UNSIGNED
3079 if (GET_MODE (pat) != Pmode)
3080 pat = convert_to_mode (Pmode, pat,
3081 POINTERS_EXTEND_UNSIGNED);
3082 #endif
3083 emit_move_insn (src_reg, pat);
3084 }
3085 pat = get_insns ();
3086 end_sequence ();
3087
3088 if (before_strlen)
3089 emit_insn_after (pat, before_strlen);
3090 else
3091 emit_insn_before (pat, get_insns ());
3092
3093 /* Return the value in the proper mode for this function. */
3094 if (GET_MODE (ops[0].value) == target_mode)
3095 target = ops[0].value;
3096 else if (target != 0)
3097 convert_move (target, ops[0].value, 0);
3098 else
3099 target = convert_to_mode (target_mode, ops[0].value, 0);
3100
3101 return target;
3102 }
3103 }
3104
3105 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3106 bytes from constant string DATA + OFFSET and return it as target
3107 constant. */
3108
3109 static rtx
3110 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3111 machine_mode mode)
3112 {
3113 const char *str = (const char *) data;
3114
3115 gcc_assert (offset >= 0
3116 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3117 <= strlen (str) + 1));
3118
3119 return c_readstr (str + offset, mode);
3120 }
3121
3122 /* LEN specify length of the block of memcpy/memset operation.
3123 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3124 In some cases we can make very likely guess on max size, then we
3125 set it into PROBABLE_MAX_SIZE. */
3126
3127 static void
3128 determine_block_size (tree len, rtx len_rtx,
3129 unsigned HOST_WIDE_INT *min_size,
3130 unsigned HOST_WIDE_INT *max_size,
3131 unsigned HOST_WIDE_INT *probable_max_size)
3132 {
3133 if (CONST_INT_P (len_rtx))
3134 {
3135 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3136 return;
3137 }
3138 else
3139 {
3140 wide_int min, max;
3141 enum value_range_type range_type = VR_UNDEFINED;
3142
3143 /* Determine bounds from the type. */
3144 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3145 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3146 else
3147 *min_size = 0;
3148 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3149 *probable_max_size = *max_size
3150 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3151 else
3152 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3153
3154 if (TREE_CODE (len) == SSA_NAME)
3155 range_type = get_range_info (len, &min, &max);
3156 if (range_type == VR_RANGE)
3157 {
3158 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3159 *min_size = min.to_uhwi ();
3160 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3161 *probable_max_size = *max_size = max.to_uhwi ();
3162 }
3163 else if (range_type == VR_ANTI_RANGE)
3164 {
3165 /* Anti range 0...N lets us to determine minimal size to N+1. */
3166 if (min == 0)
3167 {
3168 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3169 *min_size = max.to_uhwi () + 1;
3170 }
3171 /* Code like
3172
3173 int n;
3174 if (n < 100)
3175 memcpy (a, b, n)
3176
3177 Produce anti range allowing negative values of N. We still
3178 can use the information and make a guess that N is not negative.
3179 */
3180 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3181 *probable_max_size = min.to_uhwi () - 1;
3182 }
3183 }
3184 gcc_checking_assert (*max_size <=
3185 (unsigned HOST_WIDE_INT)
3186 GET_MODE_MASK (GET_MODE (len_rtx)));
3187 }
3188
3189 /* Helper function to do the actual work for expand_builtin_memcpy. */
3190
3191 static rtx
3192 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3193 {
3194 const char *src_str;
3195 unsigned int src_align = get_pointer_alignment (src);
3196 unsigned int dest_align = get_pointer_alignment (dest);
3197 rtx dest_mem, src_mem, dest_addr, len_rtx;
3198 HOST_WIDE_INT expected_size = -1;
3199 unsigned int expected_align = 0;
3200 unsigned HOST_WIDE_INT min_size;
3201 unsigned HOST_WIDE_INT max_size;
3202 unsigned HOST_WIDE_INT probable_max_size;
3203
3204 /* If DEST is not a pointer type, call the normal function. */
3205 if (dest_align == 0)
3206 return NULL_RTX;
3207
3208 /* If either SRC is not a pointer type, don't do this
3209 operation in-line. */
3210 if (src_align == 0)
3211 return NULL_RTX;
3212
3213 if (currently_expanding_gimple_stmt)
3214 stringop_block_profile (currently_expanding_gimple_stmt,
3215 &expected_align, &expected_size);
3216
3217 if (expected_align < dest_align)
3218 expected_align = dest_align;
3219 dest_mem = get_memory_rtx (dest, len);
3220 set_mem_align (dest_mem, dest_align);
3221 len_rtx = expand_normal (len);
3222 determine_block_size (len, len_rtx, &min_size, &max_size,
3223 &probable_max_size);
3224 src_str = c_getstr (src);
3225
3226 /* If SRC is a string constant and block move would be done
3227 by pieces, we can avoid loading the string from memory
3228 and only stored the computed constants. */
3229 if (src_str
3230 && CONST_INT_P (len_rtx)
3231 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3232 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3233 CONST_CAST (char *, src_str),
3234 dest_align, false))
3235 {
3236 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3237 builtin_memcpy_read_str,
3238 CONST_CAST (char *, src_str),
3239 dest_align, false, 0);
3240 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3241 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3242 return dest_mem;
3243 }
3244
3245 src_mem = get_memory_rtx (src, len);
3246 set_mem_align (src_mem, src_align);
3247
3248 /* Copy word part most expediently. */
3249 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3250 CALL_EXPR_TAILCALL (exp)
3251 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3252 expected_align, expected_size,
3253 min_size, max_size, probable_max_size);
3254
3255 if (dest_addr == 0)
3256 {
3257 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3258 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3259 }
3260
3261 return dest_addr;
3262 }
3263
3264 /* Expand a call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3268
3269 static rtx
3270 expand_builtin_memcpy (tree exp, rtx target)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3281 }
3282 }
3283
3284 /* Expand an instrumented call EXP to the memcpy builtin.
3285 Return NULL_RTX if we failed, the caller should emit a normal call,
3286 otherwise try to get the result in TARGET, if convenient (and in
3287 mode MODE if that's convenient). */
3288
3289 static rtx
3290 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3291 {
3292 if (!validate_arglist (exp,
3293 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3294 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3295 INTEGER_TYPE, VOID_TYPE))
3296 return NULL_RTX;
3297 else
3298 {
3299 tree dest = CALL_EXPR_ARG (exp, 0);
3300 tree src = CALL_EXPR_ARG (exp, 2);
3301 tree len = CALL_EXPR_ARG (exp, 4);
3302 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3303
3304 /* Return src bounds with the result. */
3305 if (res)
3306 {
3307 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3308 expand_normal (CALL_EXPR_ARG (exp, 1)));
3309 res = chkp_join_splitted_slot (res, bnd);
3310 }
3311 return res;
3312 }
3313 }
3314
3315 /* Expand a call EXP to the mempcpy builtin.
3316 Return NULL_RTX if we failed; the caller should emit a normal call,
3317 otherwise try to get the result in TARGET, if convenient (and in
3318 mode MODE if that's convenient). If ENDP is 0 return the
3319 destination pointer, if ENDP is 1 return the end pointer ala
3320 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3321 stpcpy. */
3322
3323 static rtx
3324 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3325 {
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3328 return NULL_RTX;
3329 else
3330 {
3331 tree dest = CALL_EXPR_ARG (exp, 0);
3332 tree src = CALL_EXPR_ARG (exp, 1);
3333 tree len = CALL_EXPR_ARG (exp, 2);
3334 return expand_builtin_mempcpy_args (dest, src, len,
3335 target, mode, /*endp=*/ 1,
3336 exp);
3337 }
3338 }
3339
3340 /* Expand an instrumented call EXP to the mempcpy builtin.
3341 Return NULL_RTX if we failed, the caller should emit a normal call,
3342 otherwise try to get the result in TARGET, if convenient (and in
3343 mode MODE if that's convenient). */
3344
3345 static rtx
3346 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3347 {
3348 if (!validate_arglist (exp,
3349 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3350 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3351 INTEGER_TYPE, VOID_TYPE))
3352 return NULL_RTX;
3353 else
3354 {
3355 tree dest = CALL_EXPR_ARG (exp, 0);
3356 tree src = CALL_EXPR_ARG (exp, 2);
3357 tree len = CALL_EXPR_ARG (exp, 4);
3358 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3359 mode, 1, exp);
3360
3361 /* Return src bounds with the result. */
3362 if (res)
3363 {
3364 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3365 expand_normal (CALL_EXPR_ARG (exp, 1)));
3366 res = chkp_join_splitted_slot (res, bnd);
3367 }
3368 return res;
3369 }
3370 }
3371
3372 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3373 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3374 so that this can also be called without constructing an actual CALL_EXPR.
3375 The other arguments and return value are the same as for
3376 expand_builtin_mempcpy. */
3377
3378 static rtx
3379 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3380 rtx target, machine_mode mode, int endp,
3381 tree orig_exp)
3382 {
3383 tree fndecl = get_callee_fndecl (orig_exp);
3384
3385 /* If return value is ignored, transform mempcpy into memcpy. */
3386 if (target == const0_rtx
3387 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3388 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3389 {
3390 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3391 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3392 dest, src, len);
3393 return expand_expr (result, target, mode, EXPAND_NORMAL);
3394 }
3395 else if (target == const0_rtx
3396 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3397 {
3398 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3399 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3400 dest, src, len);
3401 return expand_expr (result, target, mode, EXPAND_NORMAL);
3402 }
3403 else
3404 {
3405 const char *src_str;
3406 unsigned int src_align = get_pointer_alignment (src);
3407 unsigned int dest_align = get_pointer_alignment (dest);
3408 rtx dest_mem, src_mem, len_rtx;
3409
3410 /* If either SRC or DEST is not a pointer type, don't do this
3411 operation in-line. */
3412 if (dest_align == 0 || src_align == 0)
3413 return NULL_RTX;
3414
3415 /* If LEN is not constant, call the normal function. */
3416 if (! tree_fits_uhwi_p (len))
3417 return NULL_RTX;
3418
3419 len_rtx = expand_normal (len);
3420 src_str = c_getstr (src);
3421
3422 /* If SRC is a string constant and block move would be done
3423 by pieces, we can avoid loading the string from memory
3424 and only stored the computed constants. */
3425 if (src_str
3426 && CONST_INT_P (len_rtx)
3427 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3428 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3429 CONST_CAST (char *, src_str),
3430 dest_align, false))
3431 {
3432 dest_mem = get_memory_rtx (dest, len);
3433 set_mem_align (dest_mem, dest_align);
3434 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3435 builtin_memcpy_read_str,
3436 CONST_CAST (char *, src_str),
3437 dest_align, false, endp);
3438 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3439 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3440 return dest_mem;
3441 }
3442
3443 if (CONST_INT_P (len_rtx)
3444 && can_move_by_pieces (INTVAL (len_rtx),
3445 MIN (dest_align, src_align)))
3446 {
3447 dest_mem = get_memory_rtx (dest, len);
3448 set_mem_align (dest_mem, dest_align);
3449 src_mem = get_memory_rtx (src, len);
3450 set_mem_align (src_mem, src_align);
3451 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3452 MIN (dest_align, src_align), endp);
3453 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3454 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3455 return dest_mem;
3456 }
3457
3458 return NULL_RTX;
3459 }
3460 }
3461
3462 #ifndef HAVE_movstr
3463 # define HAVE_movstr 0
3464 # define CODE_FOR_movstr CODE_FOR_nothing
3465 #endif
3466
3467 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3468 we failed, the caller should emit a normal call, otherwise try to
3469 get the result in TARGET, if convenient. If ENDP is 0 return the
3470 destination pointer, if ENDP is 1 return the end pointer ala
3471 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3472 stpcpy. */
3473
3474 static rtx
3475 expand_movstr (tree dest, tree src, rtx target, int endp)
3476 {
3477 struct expand_operand ops[3];
3478 rtx dest_mem;
3479 rtx src_mem;
3480
3481 if (!HAVE_movstr)
3482 return NULL_RTX;
3483
3484 dest_mem = get_memory_rtx (dest, NULL);
3485 src_mem = get_memory_rtx (src, NULL);
3486 if (!endp)
3487 {
3488 target = force_reg (Pmode, XEXP (dest_mem, 0));
3489 dest_mem = replace_equiv_address (dest_mem, target);
3490 }
3491
3492 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3493 create_fixed_operand (&ops[1], dest_mem);
3494 create_fixed_operand (&ops[2], src_mem);
3495 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3496 return NULL_RTX;
3497
3498 if (endp && target != const0_rtx)
3499 {
3500 target = ops[0].value;
3501 /* movstr is supposed to set end to the address of the NUL
3502 terminator. If the caller requested a mempcpy-like return value,
3503 adjust it. */
3504 if (endp == 1)
3505 {
3506 rtx tem = plus_constant (GET_MODE (target),
3507 gen_lowpart (GET_MODE (target), target), 1);
3508 emit_move_insn (target, force_operand (tem, NULL_RTX));
3509 }
3510 }
3511 return target;
3512 }
3513
3514 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3515 NULL_RTX if we failed the caller should emit a normal call, otherwise
3516 try to get the result in TARGET, if convenient (and in mode MODE if that's
3517 convenient). */
3518
3519 static rtx
3520 expand_builtin_strcpy (tree exp, rtx target)
3521 {
3522 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3523 {
3524 tree dest = CALL_EXPR_ARG (exp, 0);
3525 tree src = CALL_EXPR_ARG (exp, 1);
3526 return expand_builtin_strcpy_args (dest, src, target);
3527 }
3528 return NULL_RTX;
3529 }
3530
3531 /* Helper function to do the actual work for expand_builtin_strcpy. The
3532 arguments to the builtin_strcpy call DEST and SRC are broken out
3533 so that this can also be called without constructing an actual CALL_EXPR.
3534 The other arguments and return value are the same as for
3535 expand_builtin_strcpy. */
3536
3537 static rtx
3538 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3539 {
3540 return expand_movstr (dest, src, target, /*endp=*/0);
3541 }
3542
3543 /* Expand a call EXP to the stpcpy builtin.
3544 Return NULL_RTX if we failed the caller should emit a normal call,
3545 otherwise try to get the result in TARGET, if convenient (and in
3546 mode MODE if that's convenient). */
3547
3548 static rtx
3549 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3550 {
3551 tree dst, src;
3552 location_t loc = EXPR_LOCATION (exp);
3553
3554 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3555 return NULL_RTX;
3556
3557 dst = CALL_EXPR_ARG (exp, 0);
3558 src = CALL_EXPR_ARG (exp, 1);
3559
3560 /* If return value is ignored, transform stpcpy into strcpy. */
3561 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3562 {
3563 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3564 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3565 return expand_expr (result, target, mode, EXPAND_NORMAL);
3566 }
3567 else
3568 {
3569 tree len, lenp1;
3570 rtx ret;
3571
3572 /* Ensure we get an actual string whose length can be evaluated at
3573 compile-time, not an expression containing a string. This is
3574 because the latter will potentially produce pessimized code
3575 when used to produce the return value. */
3576 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3577 return expand_movstr (dst, src, target, /*endp=*/2);
3578
3579 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3580 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3581 target, mode, /*endp=*/2,
3582 exp);
3583
3584 if (ret)
3585 return ret;
3586
3587 if (TREE_CODE (len) == INTEGER_CST)
3588 {
3589 rtx len_rtx = expand_normal (len);
3590
3591 if (CONST_INT_P (len_rtx))
3592 {
3593 ret = expand_builtin_strcpy_args (dst, src, target);
3594
3595 if (ret)
3596 {
3597 if (! target)
3598 {
3599 if (mode != VOIDmode)
3600 target = gen_reg_rtx (mode);
3601 else
3602 target = gen_reg_rtx (GET_MODE (ret));
3603 }
3604 if (GET_MODE (target) != GET_MODE (ret))
3605 ret = gen_lowpart (GET_MODE (target), ret);
3606
3607 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3608 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3609 gcc_assert (ret);
3610
3611 return target;
3612 }
3613 }
3614 }
3615
3616 return expand_movstr (dst, src, target, /*endp=*/2);
3617 }
3618 }
3619
3620 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3621 bytes from constant string DATA + OFFSET and return it as target
3622 constant. */
3623
3624 rtx
3625 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3626 machine_mode mode)
3627 {
3628 const char *str = (const char *) data;
3629
3630 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3631 return const0_rtx;
3632
3633 return c_readstr (str + offset, mode);
3634 }
3635
3636 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3637 NULL_RTX if we failed the caller should emit a normal call. */
3638
3639 static rtx
3640 expand_builtin_strncpy (tree exp, rtx target)
3641 {
3642 location_t loc = EXPR_LOCATION (exp);
3643
3644 if (validate_arglist (exp,
3645 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3646 {
3647 tree dest = CALL_EXPR_ARG (exp, 0);
3648 tree src = CALL_EXPR_ARG (exp, 1);
3649 tree len = CALL_EXPR_ARG (exp, 2);
3650 tree slen = c_strlen (src, 1);
3651
3652 /* We must be passed a constant len and src parameter. */
3653 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3654 return NULL_RTX;
3655
3656 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3657
3658 /* We're required to pad with trailing zeros if the requested
3659 len is greater than strlen(s2)+1. In that case try to
3660 use store_by_pieces, if it fails, punt. */
3661 if (tree_int_cst_lt (slen, len))
3662 {
3663 unsigned int dest_align = get_pointer_alignment (dest);
3664 const char *p = c_getstr (src);
3665 rtx dest_mem;
3666
3667 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3668 || !can_store_by_pieces (tree_to_uhwi (len),
3669 builtin_strncpy_read_str,
3670 CONST_CAST (char *, p),
3671 dest_align, false))
3672 return NULL_RTX;
3673
3674 dest_mem = get_memory_rtx (dest, len);
3675 store_by_pieces (dest_mem, tree_to_uhwi (len),
3676 builtin_strncpy_read_str,
3677 CONST_CAST (char *, p), dest_align, false, 0);
3678 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3679 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3680 return dest_mem;
3681 }
3682 }
3683 return NULL_RTX;
3684 }
3685
3686 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3687 bytes from constant string DATA + OFFSET and return it as target
3688 constant. */
3689
3690 rtx
3691 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3692 machine_mode mode)
3693 {
3694 const char *c = (const char *) data;
3695 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3696
3697 memset (p, *c, GET_MODE_SIZE (mode));
3698
3699 return c_readstr (p, mode);
3700 }
3701
3702 /* Callback routine for store_by_pieces. Return the RTL of a register
3703 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3704 char value given in the RTL register data. For example, if mode is
3705 4 bytes wide, return the RTL for 0x01010101*data. */
3706
3707 static rtx
3708 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3709 machine_mode mode)
3710 {
3711 rtx target, coeff;
3712 size_t size;
3713 char *p;
3714
3715 size = GET_MODE_SIZE (mode);
3716 if (size == 1)
3717 return (rtx) data;
3718
3719 p = XALLOCAVEC (char, size);
3720 memset (p, 1, size);
3721 coeff = c_readstr (p, mode);
3722
3723 target = convert_to_mode (mode, (rtx) data, 1);
3724 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3725 return force_reg (mode, target);
3726 }
3727
3728 /* Expand expression EXP, which is a call to the memset builtin. Return
3729 NULL_RTX if we failed the caller should emit a normal call, otherwise
3730 try to get the result in TARGET, if convenient (and in mode MODE if that's
3731 convenient). */
3732
3733 static rtx
3734 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3735 {
3736 if (!validate_arglist (exp,
3737 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3738 return NULL_RTX;
3739 else
3740 {
3741 tree dest = CALL_EXPR_ARG (exp, 0);
3742 tree val = CALL_EXPR_ARG (exp, 1);
3743 tree len = CALL_EXPR_ARG (exp, 2);
3744 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3745 }
3746 }
3747
3748 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3749 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3750 try to get the result in TARGET, if convenient (and in mode MODE if that's
3751 convenient). */
3752
3753 static rtx
3754 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3755 {
3756 if (!validate_arglist (exp,
3757 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3758 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3759 return NULL_RTX;
3760 else
3761 {
3762 tree dest = CALL_EXPR_ARG (exp, 0);
3763 tree val = CALL_EXPR_ARG (exp, 2);
3764 tree len = CALL_EXPR_ARG (exp, 3);
3765 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3766
3767 /* Return src bounds with the result. */
3768 if (res)
3769 {
3770 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3771 expand_normal (CALL_EXPR_ARG (exp, 1)));
3772 res = chkp_join_splitted_slot (res, bnd);
3773 }
3774 return res;
3775 }
3776 }
3777
3778 /* Helper function to do the actual work for expand_builtin_memset. The
3779 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3780 so that this can also be called without constructing an actual CALL_EXPR.
3781 The other arguments and return value are the same as for
3782 expand_builtin_memset. */
3783
3784 static rtx
3785 expand_builtin_memset_args (tree dest, tree val, tree len,
3786 rtx target, machine_mode mode, tree orig_exp)
3787 {
3788 tree fndecl, fn;
3789 enum built_in_function fcode;
3790 machine_mode val_mode;
3791 char c;
3792 unsigned int dest_align;
3793 rtx dest_mem, dest_addr, len_rtx;
3794 HOST_WIDE_INT expected_size = -1;
3795 unsigned int expected_align = 0;
3796 unsigned HOST_WIDE_INT min_size;
3797 unsigned HOST_WIDE_INT max_size;
3798 unsigned HOST_WIDE_INT probable_max_size;
3799
3800 dest_align = get_pointer_alignment (dest);
3801
3802 /* If DEST is not a pointer type, don't do this operation in-line. */
3803 if (dest_align == 0)
3804 return NULL_RTX;
3805
3806 if (currently_expanding_gimple_stmt)
3807 stringop_block_profile (currently_expanding_gimple_stmt,
3808 &expected_align, &expected_size);
3809
3810 if (expected_align < dest_align)
3811 expected_align = dest_align;
3812
3813 /* If the LEN parameter is zero, return DEST. */
3814 if (integer_zerop (len))
3815 {
3816 /* Evaluate and ignore VAL in case it has side-effects. */
3817 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3818 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3819 }
3820
3821 /* Stabilize the arguments in case we fail. */
3822 dest = builtin_save_expr (dest);
3823 val = builtin_save_expr (val);
3824 len = builtin_save_expr (len);
3825
3826 len_rtx = expand_normal (len);
3827 determine_block_size (len, len_rtx, &min_size, &max_size,
3828 &probable_max_size);
3829 dest_mem = get_memory_rtx (dest, len);
3830 val_mode = TYPE_MODE (unsigned_char_type_node);
3831
3832 if (TREE_CODE (val) != INTEGER_CST)
3833 {
3834 rtx val_rtx;
3835
3836 val_rtx = expand_normal (val);
3837 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3838
3839 /* Assume that we can memset by pieces if we can store
3840 * the coefficients by pieces (in the required modes).
3841 * We can't pass builtin_memset_gen_str as that emits RTL. */
3842 c = 1;
3843 if (tree_fits_uhwi_p (len)
3844 && can_store_by_pieces (tree_to_uhwi (len),
3845 builtin_memset_read_str, &c, dest_align,
3846 true))
3847 {
3848 val_rtx = force_reg (val_mode, val_rtx);
3849 store_by_pieces (dest_mem, tree_to_uhwi (len),
3850 builtin_memset_gen_str, val_rtx, dest_align,
3851 true, 0);
3852 }
3853 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3854 dest_align, expected_align,
3855 expected_size, min_size, max_size,
3856 probable_max_size))
3857 goto do_libcall;
3858
3859 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3860 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3861 return dest_mem;
3862 }
3863
3864 if (target_char_cast (val, &c))
3865 goto do_libcall;
3866
3867 if (c)
3868 {
3869 if (tree_fits_uhwi_p (len)
3870 && can_store_by_pieces (tree_to_uhwi (len),
3871 builtin_memset_read_str, &c, dest_align,
3872 true))
3873 store_by_pieces (dest_mem, tree_to_uhwi (len),
3874 builtin_memset_read_str, &c, dest_align, true, 0);
3875 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3876 gen_int_mode (c, val_mode),
3877 dest_align, expected_align,
3878 expected_size, min_size, max_size,
3879 probable_max_size))
3880 goto do_libcall;
3881
3882 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3883 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3884 return dest_mem;
3885 }
3886
3887 set_mem_align (dest_mem, dest_align);
3888 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3889 CALL_EXPR_TAILCALL (orig_exp)
3890 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3891 expected_align, expected_size,
3892 min_size, max_size,
3893 probable_max_size);
3894
3895 if (dest_addr == 0)
3896 {
3897 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3898 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3899 }
3900
3901 return dest_addr;
3902
3903 do_libcall:
3904 fndecl = get_callee_fndecl (orig_exp);
3905 fcode = DECL_FUNCTION_CODE (fndecl);
3906 if (fcode == BUILT_IN_MEMSET
3907 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3908 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3909 dest, val, len);
3910 else if (fcode == BUILT_IN_BZERO)
3911 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3912 dest, len);
3913 else
3914 gcc_unreachable ();
3915 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3916 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3917 return expand_call (fn, target, target == const0_rtx);
3918 }
3919
3920 /* Expand expression EXP, which is a call to the bzero builtin. Return
3921 NULL_RTX if we failed the caller should emit a normal call. */
3922
3923 static rtx
3924 expand_builtin_bzero (tree exp)
3925 {
3926 tree dest, size;
3927 location_t loc = EXPR_LOCATION (exp);
3928
3929 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3930 return NULL_RTX;
3931
3932 dest = CALL_EXPR_ARG (exp, 0);
3933 size = CALL_EXPR_ARG (exp, 1);
3934
3935 /* New argument list transforming bzero(ptr x, int y) to
3936 memset(ptr x, int 0, size_t y). This is done this way
3937 so that if it isn't expanded inline, we fallback to
3938 calling bzero instead of memset. */
3939
3940 return expand_builtin_memset_args (dest, integer_zero_node,
3941 fold_convert_loc (loc,
3942 size_type_node, size),
3943 const0_rtx, VOIDmode, exp);
3944 }
3945
3946 /* Expand expression EXP, which is a call to the memcmp built-in function.
3947 Return NULL_RTX if we failed and the caller should emit a normal call,
3948 otherwise try to get the result in TARGET, if convenient (and in mode
3949 MODE, if that's convenient). */
3950
3951 static rtx
3952 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3953 ATTRIBUTE_UNUSED machine_mode mode)
3954 {
3955 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3956
3957 if (!validate_arglist (exp,
3958 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3959 return NULL_RTX;
3960
3961 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3962 implementing memcmp because it will stop if it encounters two
3963 zero bytes. */
3964 #if defined HAVE_cmpmemsi
3965 {
3966 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3967 rtx result;
3968 rtx insn;
3969 tree arg1 = CALL_EXPR_ARG (exp, 0);
3970 tree arg2 = CALL_EXPR_ARG (exp, 1);
3971 tree len = CALL_EXPR_ARG (exp, 2);
3972
3973 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3974 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3975 machine_mode insn_mode;
3976
3977 if (HAVE_cmpmemsi)
3978 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3979 else
3980 return NULL_RTX;
3981
3982 /* If we don't have POINTER_TYPE, call the function. */
3983 if (arg1_align == 0 || arg2_align == 0)
3984 return NULL_RTX;
3985
3986 /* Make a place to write the result of the instruction. */
3987 result = target;
3988 if (! (result != 0
3989 && REG_P (result) && GET_MODE (result) == insn_mode
3990 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3991 result = gen_reg_rtx (insn_mode);
3992
3993 arg1_rtx = get_memory_rtx (arg1, len);
3994 arg2_rtx = get_memory_rtx (arg2, len);
3995 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3996
3997 /* Set MEM_SIZE as appropriate. */
3998 if (CONST_INT_P (arg3_rtx))
3999 {
4000 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4001 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4002 }
4003
4004 if (HAVE_cmpmemsi)
4005 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4006 GEN_INT (MIN (arg1_align, arg2_align)));
4007 else
4008 gcc_unreachable ();
4009
4010 if (insn)
4011 emit_insn (insn);
4012 else
4013 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4014 TYPE_MODE (integer_type_node), 3,
4015 XEXP (arg1_rtx, 0), Pmode,
4016 XEXP (arg2_rtx, 0), Pmode,
4017 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4018 TYPE_UNSIGNED (sizetype)),
4019 TYPE_MODE (sizetype));
4020
4021 /* Return the value in the proper mode for this function. */
4022 mode = TYPE_MODE (TREE_TYPE (exp));
4023 if (GET_MODE (result) == mode)
4024 return result;
4025 else if (target != 0)
4026 {
4027 convert_move (target, result, 0);
4028 return target;
4029 }
4030 else
4031 return convert_to_mode (mode, result, 0);
4032 }
4033 #endif /* HAVE_cmpmemsi. */
4034
4035 return NULL_RTX;
4036 }
4037
4038 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4039 if we failed the caller should emit a normal call, otherwise try to get
4040 the result in TARGET, if convenient. */
4041
4042 static rtx
4043 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4044 {
4045 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4046 return NULL_RTX;
4047
4048 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4049 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4050 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4051 {
4052 rtx arg1_rtx, arg2_rtx;
4053 rtx result, insn = NULL_RTX;
4054 tree fndecl, fn;
4055 tree arg1 = CALL_EXPR_ARG (exp, 0);
4056 tree arg2 = CALL_EXPR_ARG (exp, 1);
4057
4058 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4059 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4060
4061 /* If we don't have POINTER_TYPE, call the function. */
4062 if (arg1_align == 0 || arg2_align == 0)
4063 return NULL_RTX;
4064
4065 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4066 arg1 = builtin_save_expr (arg1);
4067 arg2 = builtin_save_expr (arg2);
4068
4069 arg1_rtx = get_memory_rtx (arg1, NULL);
4070 arg2_rtx = get_memory_rtx (arg2, NULL);
4071
4072 #ifdef HAVE_cmpstrsi
4073 /* Try to call cmpstrsi. */
4074 if (HAVE_cmpstrsi)
4075 {
4076 machine_mode insn_mode
4077 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4078
4079 /* Make a place to write the result of the instruction. */
4080 result = target;
4081 if (! (result != 0
4082 && REG_P (result) && GET_MODE (result) == insn_mode
4083 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4084 result = gen_reg_rtx (insn_mode);
4085
4086 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4087 GEN_INT (MIN (arg1_align, arg2_align)));
4088 }
4089 #endif
4090 #ifdef HAVE_cmpstrnsi
4091 /* Try to determine at least one length and call cmpstrnsi. */
4092 if (!insn && HAVE_cmpstrnsi)
4093 {
4094 tree len;
4095 rtx arg3_rtx;
4096
4097 machine_mode insn_mode
4098 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4099 tree len1 = c_strlen (arg1, 1);
4100 tree len2 = c_strlen (arg2, 1);
4101
4102 if (len1)
4103 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4104 if (len2)
4105 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4106
4107 /* If we don't have a constant length for the first, use the length
4108 of the second, if we know it. We don't require a constant for
4109 this case; some cost analysis could be done if both are available
4110 but neither is constant. For now, assume they're equally cheap,
4111 unless one has side effects. If both strings have constant lengths,
4112 use the smaller. */
4113
4114 if (!len1)
4115 len = len2;
4116 else if (!len2)
4117 len = len1;
4118 else if (TREE_SIDE_EFFECTS (len1))
4119 len = len2;
4120 else if (TREE_SIDE_EFFECTS (len2))
4121 len = len1;
4122 else if (TREE_CODE (len1) != INTEGER_CST)
4123 len = len2;
4124 else if (TREE_CODE (len2) != INTEGER_CST)
4125 len = len1;
4126 else if (tree_int_cst_lt (len1, len2))
4127 len = len1;
4128 else
4129 len = len2;
4130
4131 /* If both arguments have side effects, we cannot optimize. */
4132 if (!len || TREE_SIDE_EFFECTS (len))
4133 goto do_libcall;
4134
4135 arg3_rtx = expand_normal (len);
4136
4137 /* Make a place to write the result of the instruction. */
4138 result = target;
4139 if (! (result != 0
4140 && REG_P (result) && GET_MODE (result) == insn_mode
4141 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4142 result = gen_reg_rtx (insn_mode);
4143
4144 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4145 GEN_INT (MIN (arg1_align, arg2_align)));
4146 }
4147 #endif
4148
4149 if (insn)
4150 {
4151 machine_mode mode;
4152 emit_insn (insn);
4153
4154 /* Return the value in the proper mode for this function. */
4155 mode = TYPE_MODE (TREE_TYPE (exp));
4156 if (GET_MODE (result) == mode)
4157 return result;
4158 if (target == 0)
4159 return convert_to_mode (mode, result, 0);
4160 convert_move (target, result, 0);
4161 return target;
4162 }
4163
4164 /* Expand the library call ourselves using a stabilized argument
4165 list to avoid re-evaluating the function's arguments twice. */
4166 #ifdef HAVE_cmpstrnsi
4167 do_libcall:
4168 #endif
4169 fndecl = get_callee_fndecl (exp);
4170 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4171 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4172 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4173 return expand_call (fn, target, target == const0_rtx);
4174 }
4175 #endif
4176 return NULL_RTX;
4177 }
4178
4179 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4180 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4181 the result in TARGET, if convenient. */
4182
4183 static rtx
4184 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4185 ATTRIBUTE_UNUSED machine_mode mode)
4186 {
4187 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4188
4189 if (!validate_arglist (exp,
4190 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4191 return NULL_RTX;
4192
4193 /* If c_strlen can determine an expression for one of the string
4194 lengths, and it doesn't have side effects, then emit cmpstrnsi
4195 using length MIN(strlen(string)+1, arg3). */
4196 #ifdef HAVE_cmpstrnsi
4197 if (HAVE_cmpstrnsi)
4198 {
4199 tree len, len1, len2;
4200 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4201 rtx result, insn;
4202 tree fndecl, fn;
4203 tree arg1 = CALL_EXPR_ARG (exp, 0);
4204 tree arg2 = CALL_EXPR_ARG (exp, 1);
4205 tree arg3 = CALL_EXPR_ARG (exp, 2);
4206
4207 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4208 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4209 machine_mode insn_mode
4210 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4211
4212 len1 = c_strlen (arg1, 1);
4213 len2 = c_strlen (arg2, 1);
4214
4215 if (len1)
4216 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4217 if (len2)
4218 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4219
4220 /* If we don't have a constant length for the first, use the length
4221 of the second, if we know it. We don't require a constant for
4222 this case; some cost analysis could be done if both are available
4223 but neither is constant. For now, assume they're equally cheap,
4224 unless one has side effects. If both strings have constant lengths,
4225 use the smaller. */
4226
4227 if (!len1)
4228 len = len2;
4229 else if (!len2)
4230 len = len1;
4231 else if (TREE_SIDE_EFFECTS (len1))
4232 len = len2;
4233 else if (TREE_SIDE_EFFECTS (len2))
4234 len = len1;
4235 else if (TREE_CODE (len1) != INTEGER_CST)
4236 len = len2;
4237 else if (TREE_CODE (len2) != INTEGER_CST)
4238 len = len1;
4239 else if (tree_int_cst_lt (len1, len2))
4240 len = len1;
4241 else
4242 len = len2;
4243
4244 /* If both arguments have side effects, we cannot optimize. */
4245 if (!len || TREE_SIDE_EFFECTS (len))
4246 return NULL_RTX;
4247
4248 /* The actual new length parameter is MIN(len,arg3). */
4249 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4250 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4251
4252 /* If we don't have POINTER_TYPE, call the function. */
4253 if (arg1_align == 0 || arg2_align == 0)
4254 return NULL_RTX;
4255
4256 /* Make a place to write the result of the instruction. */
4257 result = target;
4258 if (! (result != 0
4259 && REG_P (result) && GET_MODE (result) == insn_mode
4260 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4261 result = gen_reg_rtx (insn_mode);
4262
4263 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4264 arg1 = builtin_save_expr (arg1);
4265 arg2 = builtin_save_expr (arg2);
4266 len = builtin_save_expr (len);
4267
4268 arg1_rtx = get_memory_rtx (arg1, len);
4269 arg2_rtx = get_memory_rtx (arg2, len);
4270 arg3_rtx = expand_normal (len);
4271 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4272 GEN_INT (MIN (arg1_align, arg2_align)));
4273 if (insn)
4274 {
4275 emit_insn (insn);
4276
4277 /* Return the value in the proper mode for this function. */
4278 mode = TYPE_MODE (TREE_TYPE (exp));
4279 if (GET_MODE (result) == mode)
4280 return result;
4281 if (target == 0)
4282 return convert_to_mode (mode, result, 0);
4283 convert_move (target, result, 0);
4284 return target;
4285 }
4286
4287 /* Expand the library call ourselves using a stabilized argument
4288 list to avoid re-evaluating the function's arguments twice. */
4289 fndecl = get_callee_fndecl (exp);
4290 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4291 arg1, arg2, len);
4292 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4293 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4294 return expand_call (fn, target, target == const0_rtx);
4295 }
4296 #endif
4297 return NULL_RTX;
4298 }
4299
4300 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4301 if that's convenient. */
4302
4303 rtx
4304 expand_builtin_saveregs (void)
4305 {
4306 rtx val;
4307 rtx_insn *seq;
4308
4309 /* Don't do __builtin_saveregs more than once in a function.
4310 Save the result of the first call and reuse it. */
4311 if (saveregs_value != 0)
4312 return saveregs_value;
4313
4314 /* When this function is called, it means that registers must be
4315 saved on entry to this function. So we migrate the call to the
4316 first insn of this function. */
4317
4318 start_sequence ();
4319
4320 /* Do whatever the machine needs done in this case. */
4321 val = targetm.calls.expand_builtin_saveregs ();
4322
4323 seq = get_insns ();
4324 end_sequence ();
4325
4326 saveregs_value = val;
4327
4328 /* Put the insns after the NOTE that starts the function. If this
4329 is inside a start_sequence, make the outer-level insn chain current, so
4330 the code is placed at the start of the function. */
4331 push_topmost_sequence ();
4332 emit_insn_after (seq, entry_of_function ());
4333 pop_topmost_sequence ();
4334
4335 return val;
4336 }
4337
4338 /* Expand a call to __builtin_next_arg. */
4339
4340 static rtx
4341 expand_builtin_next_arg (void)
4342 {
4343 /* Checking arguments is already done in fold_builtin_next_arg
4344 that must be called before this function. */
4345 return expand_binop (ptr_mode, add_optab,
4346 crtl->args.internal_arg_pointer,
4347 crtl->args.arg_offset_rtx,
4348 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4349 }
4350
4351 /* Make it easier for the backends by protecting the valist argument
4352 from multiple evaluations. */
4353
4354 static tree
4355 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4356 {
4357 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4358
4359 /* The current way of determining the type of valist is completely
4360 bogus. We should have the information on the va builtin instead. */
4361 if (!vatype)
4362 vatype = targetm.fn_abi_va_list (cfun->decl);
4363
4364 if (TREE_CODE (vatype) == ARRAY_TYPE)
4365 {
4366 if (TREE_SIDE_EFFECTS (valist))
4367 valist = save_expr (valist);
4368
4369 /* For this case, the backends will be expecting a pointer to
4370 vatype, but it's possible we've actually been given an array
4371 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4372 So fix it. */
4373 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4374 {
4375 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4376 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4377 }
4378 }
4379 else
4380 {
4381 tree pt = build_pointer_type (vatype);
4382
4383 if (! needs_lvalue)
4384 {
4385 if (! TREE_SIDE_EFFECTS (valist))
4386 return valist;
4387
4388 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4389 TREE_SIDE_EFFECTS (valist) = 1;
4390 }
4391
4392 if (TREE_SIDE_EFFECTS (valist))
4393 valist = save_expr (valist);
4394 valist = fold_build2_loc (loc, MEM_REF,
4395 vatype, valist, build_int_cst (pt, 0));
4396 }
4397
4398 return valist;
4399 }
4400
4401 /* The "standard" definition of va_list is void*. */
4402
4403 tree
4404 std_build_builtin_va_list (void)
4405 {
4406 return ptr_type_node;
4407 }
4408
4409 /* The "standard" abi va_list is va_list_type_node. */
4410
4411 tree
4412 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4413 {
4414 return va_list_type_node;
4415 }
4416
4417 /* The "standard" type of va_list is va_list_type_node. */
4418
4419 tree
4420 std_canonical_va_list_type (tree type)
4421 {
4422 tree wtype, htype;
4423
4424 if (INDIRECT_REF_P (type))
4425 type = TREE_TYPE (type);
4426 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4427 type = TREE_TYPE (type);
4428 wtype = va_list_type_node;
4429 htype = type;
4430 /* Treat structure va_list types. */
4431 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4432 htype = TREE_TYPE (htype);
4433 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4434 {
4435 /* If va_list is an array type, the argument may have decayed
4436 to a pointer type, e.g. by being passed to another function.
4437 In that case, unwrap both types so that we can compare the
4438 underlying records. */
4439 if (TREE_CODE (htype) == ARRAY_TYPE
4440 || POINTER_TYPE_P (htype))
4441 {
4442 wtype = TREE_TYPE (wtype);
4443 htype = TREE_TYPE (htype);
4444 }
4445 }
4446 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4447 return va_list_type_node;
4448
4449 return NULL_TREE;
4450 }
4451
4452 /* The "standard" implementation of va_start: just assign `nextarg' to
4453 the variable. */
4454
4455 void
4456 std_expand_builtin_va_start (tree valist, rtx nextarg)
4457 {
4458 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4459 convert_move (va_r, nextarg, 0);
4460
4461 /* We do not have any valid bounds for the pointer, so
4462 just store zero bounds for it. */
4463 if (chkp_function_instrumented_p (current_function_decl))
4464 chkp_expand_bounds_reset_for_mem (valist,
4465 make_tree (TREE_TYPE (valist),
4466 nextarg));
4467 }
4468
4469 /* Expand EXP, a call to __builtin_va_start. */
4470
4471 static rtx
4472 expand_builtin_va_start (tree exp)
4473 {
4474 rtx nextarg;
4475 tree valist;
4476 location_t loc = EXPR_LOCATION (exp);
4477
4478 if (call_expr_nargs (exp) < 2)
4479 {
4480 error_at (loc, "too few arguments to function %<va_start%>");
4481 return const0_rtx;
4482 }
4483
4484 if (fold_builtin_next_arg (exp, true))
4485 return const0_rtx;
4486
4487 nextarg = expand_builtin_next_arg ();
4488 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4489
4490 if (targetm.expand_builtin_va_start)
4491 targetm.expand_builtin_va_start (valist, nextarg);
4492 else
4493 std_expand_builtin_va_start (valist, nextarg);
4494
4495 return const0_rtx;
4496 }
4497
4498 /* Expand EXP, a call to __builtin_va_end. */
4499
4500 static rtx
4501 expand_builtin_va_end (tree exp)
4502 {
4503 tree valist = CALL_EXPR_ARG (exp, 0);
4504
4505 /* Evaluate for side effects, if needed. I hate macros that don't
4506 do that. */
4507 if (TREE_SIDE_EFFECTS (valist))
4508 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4509
4510 return const0_rtx;
4511 }
4512
4513 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4514 builtin rather than just as an assignment in stdarg.h because of the
4515 nastiness of array-type va_list types. */
4516
4517 static rtx
4518 expand_builtin_va_copy (tree exp)
4519 {
4520 tree dst, src, t;
4521 location_t loc = EXPR_LOCATION (exp);
4522
4523 dst = CALL_EXPR_ARG (exp, 0);
4524 src = CALL_EXPR_ARG (exp, 1);
4525
4526 dst = stabilize_va_list_loc (loc, dst, 1);
4527 src = stabilize_va_list_loc (loc, src, 0);
4528
4529 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4530
4531 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4532 {
4533 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4534 TREE_SIDE_EFFECTS (t) = 1;
4535 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4536 }
4537 else
4538 {
4539 rtx dstb, srcb, size;
4540
4541 /* Evaluate to pointers. */
4542 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4543 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4544 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4545 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4546
4547 dstb = convert_memory_address (Pmode, dstb);
4548 srcb = convert_memory_address (Pmode, srcb);
4549
4550 /* "Dereference" to BLKmode memories. */
4551 dstb = gen_rtx_MEM (BLKmode, dstb);
4552 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4553 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4554 srcb = gen_rtx_MEM (BLKmode, srcb);
4555 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4556 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4557
4558 /* Copy. */
4559 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4560 }
4561
4562 return const0_rtx;
4563 }
4564
4565 /* Expand a call to one of the builtin functions __builtin_frame_address or
4566 __builtin_return_address. */
4567
4568 static rtx
4569 expand_builtin_frame_address (tree fndecl, tree exp)
4570 {
4571 /* The argument must be a nonnegative integer constant.
4572 It counts the number of frames to scan up the stack.
4573 The value is the return address saved in that frame. */
4574 if (call_expr_nargs (exp) == 0)
4575 /* Warning about missing arg was already issued. */
4576 return const0_rtx;
4577 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4578 {
4579 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4580 error ("invalid argument to %<__builtin_frame_address%>");
4581 else
4582 error ("invalid argument to %<__builtin_return_address%>");
4583 return const0_rtx;
4584 }
4585 else
4586 {
4587 rtx tem
4588 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4589 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4590
4591 /* Some ports cannot access arbitrary stack frames. */
4592 if (tem == NULL)
4593 {
4594 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4595 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4596 else
4597 warning (0, "unsupported argument to %<__builtin_return_address%>");
4598 return const0_rtx;
4599 }
4600
4601 /* For __builtin_frame_address, return what we've got. */
4602 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4603 return tem;
4604
4605 if (!REG_P (tem)
4606 && ! CONSTANT_P (tem))
4607 tem = copy_addr_to_reg (tem);
4608 return tem;
4609 }
4610 }
4611
4612 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4613 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4614 is the same as for allocate_dynamic_stack_space. */
4615
4616 static rtx
4617 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4618 {
4619 rtx op0;
4620 rtx result;
4621 bool valid_arglist;
4622 unsigned int align;
4623 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4624 == BUILT_IN_ALLOCA_WITH_ALIGN);
4625
4626 valid_arglist
4627 = (alloca_with_align
4628 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4629 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4630
4631 if (!valid_arglist)
4632 return NULL_RTX;
4633
4634 /* Compute the argument. */
4635 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4636
4637 /* Compute the alignment. */
4638 align = (alloca_with_align
4639 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4640 : BIGGEST_ALIGNMENT);
4641
4642 /* Allocate the desired space. */
4643 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4644 result = convert_memory_address (ptr_mode, result);
4645
4646 return result;
4647 }
4648
4649 /* Expand a call to bswap builtin in EXP.
4650 Return NULL_RTX if a normal call should be emitted rather than expanding the
4651 function in-line. If convenient, the result should be placed in TARGET.
4652 SUBTARGET may be used as the target for computing one of EXP's operands. */
4653
4654 static rtx
4655 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4656 rtx subtarget)
4657 {
4658 tree arg;
4659 rtx op0;
4660
4661 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4662 return NULL_RTX;
4663
4664 arg = CALL_EXPR_ARG (exp, 0);
4665 op0 = expand_expr (arg,
4666 subtarget && GET_MODE (subtarget) == target_mode
4667 ? subtarget : NULL_RTX,
4668 target_mode, EXPAND_NORMAL);
4669 if (GET_MODE (op0) != target_mode)
4670 op0 = convert_to_mode (target_mode, op0, 1);
4671
4672 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4673
4674 gcc_assert (target);
4675
4676 return convert_to_mode (target_mode, target, 1);
4677 }
4678
4679 /* Expand a call to a unary builtin in EXP.
4680 Return NULL_RTX if a normal call should be emitted rather than expanding the
4681 function in-line. If convenient, the result should be placed in TARGET.
4682 SUBTARGET may be used as the target for computing one of EXP's operands. */
4683
4684 static rtx
4685 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4686 rtx subtarget, optab op_optab)
4687 {
4688 rtx op0;
4689
4690 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4691 return NULL_RTX;
4692
4693 /* Compute the argument. */
4694 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4695 (subtarget
4696 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4697 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4698 VOIDmode, EXPAND_NORMAL);
4699 /* Compute op, into TARGET if possible.
4700 Set TARGET to wherever the result comes back. */
4701 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4702 op_optab, op0, target, op_optab != clrsb_optab);
4703 gcc_assert (target);
4704
4705 return convert_to_mode (target_mode, target, 0);
4706 }
4707
4708 /* Expand a call to __builtin_expect. We just return our argument
4709 as the builtin_expect semantic should've been already executed by
4710 tree branch prediction pass. */
4711
4712 static rtx
4713 expand_builtin_expect (tree exp, rtx target)
4714 {
4715 tree arg;
4716
4717 if (call_expr_nargs (exp) < 2)
4718 return const0_rtx;
4719 arg = CALL_EXPR_ARG (exp, 0);
4720
4721 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4722 /* When guessing was done, the hints should be already stripped away. */
4723 gcc_assert (!flag_guess_branch_prob
4724 || optimize == 0 || seen_error ());
4725 return target;
4726 }
4727
4728 /* Expand a call to __builtin_assume_aligned. We just return our first
4729 argument as the builtin_assume_aligned semantic should've been already
4730 executed by CCP. */
4731
4732 static rtx
4733 expand_builtin_assume_aligned (tree exp, rtx target)
4734 {
4735 if (call_expr_nargs (exp) < 2)
4736 return const0_rtx;
4737 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4738 EXPAND_NORMAL);
4739 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4740 && (call_expr_nargs (exp) < 3
4741 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4742 return target;
4743 }
4744
4745 void
4746 expand_builtin_trap (void)
4747 {
4748 #ifdef HAVE_trap
4749 if (HAVE_trap)
4750 {
4751 rtx_insn *insn = emit_insn (gen_trap ());
4752 /* For trap insns when not accumulating outgoing args force
4753 REG_ARGS_SIZE note to prevent crossjumping of calls with
4754 different args sizes. */
4755 if (!ACCUMULATE_OUTGOING_ARGS)
4756 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4757 }
4758 else
4759 #endif
4760 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4761 emit_barrier ();
4762 }
4763
4764 /* Expand a call to __builtin_unreachable. We do nothing except emit
4765 a barrier saying that control flow will not pass here.
4766
4767 It is the responsibility of the program being compiled to ensure
4768 that control flow does never reach __builtin_unreachable. */
4769 static void
4770 expand_builtin_unreachable (void)
4771 {
4772 emit_barrier ();
4773 }
4774
4775 /* Expand EXP, a call to fabs, fabsf or fabsl.
4776 Return NULL_RTX if a normal call should be emitted rather than expanding
4777 the function inline. If convenient, the result should be placed
4778 in TARGET. SUBTARGET may be used as the target for computing
4779 the operand. */
4780
4781 static rtx
4782 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4783 {
4784 machine_mode mode;
4785 tree arg;
4786 rtx op0;
4787
4788 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4789 return NULL_RTX;
4790
4791 arg = CALL_EXPR_ARG (exp, 0);
4792 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4793 mode = TYPE_MODE (TREE_TYPE (arg));
4794 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4795 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4796 }
4797
4798 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4799 Return NULL is a normal call should be emitted rather than expanding the
4800 function inline. If convenient, the result should be placed in TARGET.
4801 SUBTARGET may be used as the target for computing the operand. */
4802
4803 static rtx
4804 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4805 {
4806 rtx op0, op1;
4807 tree arg;
4808
4809 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4810 return NULL_RTX;
4811
4812 arg = CALL_EXPR_ARG (exp, 0);
4813 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4814
4815 arg = CALL_EXPR_ARG (exp, 1);
4816 op1 = expand_normal (arg);
4817
4818 return expand_copysign (op0, op1, target);
4819 }
4820
4821 /* Expand a call to __builtin___clear_cache. */
4822
4823 static rtx
4824 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4825 {
4826 #ifndef HAVE_clear_cache
4827 #ifdef CLEAR_INSN_CACHE
4828 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4829 does something. Just do the default expansion to a call to
4830 __clear_cache(). */
4831 return NULL_RTX;
4832 #else
4833 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4834 does nothing. There is no need to call it. Do nothing. */
4835 return const0_rtx;
4836 #endif /* CLEAR_INSN_CACHE */
4837 #else
4838 /* We have a "clear_cache" insn, and it will handle everything. */
4839 tree begin, end;
4840 rtx begin_rtx, end_rtx;
4841
4842 /* We must not expand to a library call. If we did, any
4843 fallback library function in libgcc that might contain a call to
4844 __builtin___clear_cache() would recurse infinitely. */
4845 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4846 {
4847 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4848 return const0_rtx;
4849 }
4850
4851 if (HAVE_clear_cache)
4852 {
4853 struct expand_operand ops[2];
4854
4855 begin = CALL_EXPR_ARG (exp, 0);
4856 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4857
4858 end = CALL_EXPR_ARG (exp, 1);
4859 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4860
4861 create_address_operand (&ops[0], begin_rtx);
4862 create_address_operand (&ops[1], end_rtx);
4863 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4864 return const0_rtx;
4865 }
4866 return const0_rtx;
4867 #endif /* HAVE_clear_cache */
4868 }
4869
4870 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4871
4872 static rtx
4873 round_trampoline_addr (rtx tramp)
4874 {
4875 rtx temp, addend, mask;
4876
4877 /* If we don't need too much alignment, we'll have been guaranteed
4878 proper alignment by get_trampoline_type. */
4879 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4880 return tramp;
4881
4882 /* Round address up to desired boundary. */
4883 temp = gen_reg_rtx (Pmode);
4884 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4885 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4886
4887 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4888 temp, 0, OPTAB_LIB_WIDEN);
4889 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4890 temp, 0, OPTAB_LIB_WIDEN);
4891
4892 return tramp;
4893 }
4894
4895 static rtx
4896 expand_builtin_init_trampoline (tree exp, bool onstack)
4897 {
4898 tree t_tramp, t_func, t_chain;
4899 rtx m_tramp, r_tramp, r_chain, tmp;
4900
4901 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4902 POINTER_TYPE, VOID_TYPE))
4903 return NULL_RTX;
4904
4905 t_tramp = CALL_EXPR_ARG (exp, 0);
4906 t_func = CALL_EXPR_ARG (exp, 1);
4907 t_chain = CALL_EXPR_ARG (exp, 2);
4908
4909 r_tramp = expand_normal (t_tramp);
4910 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4911 MEM_NOTRAP_P (m_tramp) = 1;
4912
4913 /* If ONSTACK, the TRAMP argument should be the address of a field
4914 within the local function's FRAME decl. Either way, let's see if
4915 we can fill in the MEM_ATTRs for this memory. */
4916 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4917 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4918
4919 /* Creator of a heap trampoline is responsible for making sure the
4920 address is aligned to at least STACK_BOUNDARY. Normally malloc
4921 will ensure this anyhow. */
4922 tmp = round_trampoline_addr (r_tramp);
4923 if (tmp != r_tramp)
4924 {
4925 m_tramp = change_address (m_tramp, BLKmode, tmp);
4926 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4927 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4928 }
4929
4930 /* The FUNC argument should be the address of the nested function.
4931 Extract the actual function decl to pass to the hook. */
4932 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4933 t_func = TREE_OPERAND (t_func, 0);
4934 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4935
4936 r_chain = expand_normal (t_chain);
4937
4938 /* Generate insns to initialize the trampoline. */
4939 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4940
4941 if (onstack)
4942 {
4943 trampolines_created = 1;
4944
4945 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4946 "trampoline generated for nested function %qD", t_func);
4947 }
4948
4949 return const0_rtx;
4950 }
4951
4952 static rtx
4953 expand_builtin_adjust_trampoline (tree exp)
4954 {
4955 rtx tramp;
4956
4957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4958 return NULL_RTX;
4959
4960 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4961 tramp = round_trampoline_addr (tramp);
4962 if (targetm.calls.trampoline_adjust_address)
4963 tramp = targetm.calls.trampoline_adjust_address (tramp);
4964
4965 return tramp;
4966 }
4967
4968 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4969 function. The function first checks whether the back end provides
4970 an insn to implement signbit for the respective mode. If not, it
4971 checks whether the floating point format of the value is such that
4972 the sign bit can be extracted. If that is not the case, the
4973 function returns NULL_RTX to indicate that a normal call should be
4974 emitted rather than expanding the function in-line. EXP is the
4975 expression that is a call to the builtin function; if convenient,
4976 the result should be placed in TARGET. */
4977 static rtx
4978 expand_builtin_signbit (tree exp, rtx target)
4979 {
4980 const struct real_format *fmt;
4981 machine_mode fmode, imode, rmode;
4982 tree arg;
4983 int word, bitpos;
4984 enum insn_code icode;
4985 rtx temp;
4986 location_t loc = EXPR_LOCATION (exp);
4987
4988 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4989 return NULL_RTX;
4990
4991 arg = CALL_EXPR_ARG (exp, 0);
4992 fmode = TYPE_MODE (TREE_TYPE (arg));
4993 rmode = TYPE_MODE (TREE_TYPE (exp));
4994 fmt = REAL_MODE_FORMAT (fmode);
4995
4996 arg = builtin_save_expr (arg);
4997
4998 /* Expand the argument yielding a RTX expression. */
4999 temp = expand_normal (arg);
5000
5001 /* Check if the back end provides an insn that handles signbit for the
5002 argument's mode. */
5003 icode = optab_handler (signbit_optab, fmode);
5004 if (icode != CODE_FOR_nothing)
5005 {
5006 rtx_insn *last = get_last_insn ();
5007 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5008 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5009 return target;
5010 delete_insns_since (last);
5011 }
5012
5013 /* For floating point formats without a sign bit, implement signbit
5014 as "ARG < 0.0". */
5015 bitpos = fmt->signbit_ro;
5016 if (bitpos < 0)
5017 {
5018 /* But we can't do this if the format supports signed zero. */
5019 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5020 return NULL_RTX;
5021
5022 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5023 build_real (TREE_TYPE (arg), dconst0));
5024 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5025 }
5026
5027 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5028 {
5029 imode = int_mode_for_mode (fmode);
5030 if (imode == BLKmode)
5031 return NULL_RTX;
5032 temp = gen_lowpart (imode, temp);
5033 }
5034 else
5035 {
5036 imode = word_mode;
5037 /* Handle targets with different FP word orders. */
5038 if (FLOAT_WORDS_BIG_ENDIAN)
5039 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5040 else
5041 word = bitpos / BITS_PER_WORD;
5042 temp = operand_subword_force (temp, word, fmode);
5043 bitpos = bitpos % BITS_PER_WORD;
5044 }
5045
5046 /* Force the intermediate word_mode (or narrower) result into a
5047 register. This avoids attempting to create paradoxical SUBREGs
5048 of floating point modes below. */
5049 temp = force_reg (imode, temp);
5050
5051 /* If the bitpos is within the "result mode" lowpart, the operation
5052 can be implement with a single bitwise AND. Otherwise, we need
5053 a right shift and an AND. */
5054
5055 if (bitpos < GET_MODE_BITSIZE (rmode))
5056 {
5057 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5058
5059 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5060 temp = gen_lowpart (rmode, temp);
5061 temp = expand_binop (rmode, and_optab, temp,
5062 immed_wide_int_const (mask, rmode),
5063 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5064 }
5065 else
5066 {
5067 /* Perform a logical right shift to place the signbit in the least
5068 significant bit, then truncate the result to the desired mode
5069 and mask just this bit. */
5070 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5071 temp = gen_lowpart (rmode, temp);
5072 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5073 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5074 }
5075
5076 return temp;
5077 }
5078
5079 /* Expand fork or exec calls. TARGET is the desired target of the
5080 call. EXP is the call. FN is the
5081 identificator of the actual function. IGNORE is nonzero if the
5082 value is to be ignored. */
5083
5084 static rtx
5085 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5086 {
5087 tree id, decl;
5088 tree call;
5089
5090 /* If we are not profiling, just call the function. */
5091 if (!profile_arc_flag)
5092 return NULL_RTX;
5093
5094 /* Otherwise call the wrapper. This should be equivalent for the rest of
5095 compiler, so the code does not diverge, and the wrapper may run the
5096 code necessary for keeping the profiling sane. */
5097
5098 switch (DECL_FUNCTION_CODE (fn))
5099 {
5100 case BUILT_IN_FORK:
5101 id = get_identifier ("__gcov_fork");
5102 break;
5103
5104 case BUILT_IN_EXECL:
5105 id = get_identifier ("__gcov_execl");
5106 break;
5107
5108 case BUILT_IN_EXECV:
5109 id = get_identifier ("__gcov_execv");
5110 break;
5111
5112 case BUILT_IN_EXECLP:
5113 id = get_identifier ("__gcov_execlp");
5114 break;
5115
5116 case BUILT_IN_EXECLE:
5117 id = get_identifier ("__gcov_execle");
5118 break;
5119
5120 case BUILT_IN_EXECVP:
5121 id = get_identifier ("__gcov_execvp");
5122 break;
5123
5124 case BUILT_IN_EXECVE:
5125 id = get_identifier ("__gcov_execve");
5126 break;
5127
5128 default:
5129 gcc_unreachable ();
5130 }
5131
5132 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5133 FUNCTION_DECL, id, TREE_TYPE (fn));
5134 DECL_EXTERNAL (decl) = 1;
5135 TREE_PUBLIC (decl) = 1;
5136 DECL_ARTIFICIAL (decl) = 1;
5137 TREE_NOTHROW (decl) = 1;
5138 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5139 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5140 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5141 return expand_call (call, target, ignore);
5142 }
5143
5144
5145 \f
5146 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5147 the pointer in these functions is void*, the tree optimizers may remove
5148 casts. The mode computed in expand_builtin isn't reliable either, due
5149 to __sync_bool_compare_and_swap.
5150
5151 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5152 group of builtins. This gives us log2 of the mode size. */
5153
5154 static inline machine_mode
5155 get_builtin_sync_mode (int fcode_diff)
5156 {
5157 /* The size is not negotiable, so ask not to get BLKmode in return
5158 if the target indicates that a smaller size would be better. */
5159 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5160 }
5161
5162 /* Expand the memory expression LOC and return the appropriate memory operand
5163 for the builtin_sync operations. */
5164
5165 static rtx
5166 get_builtin_sync_mem (tree loc, machine_mode mode)
5167 {
5168 rtx addr, mem;
5169
5170 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5171 addr = convert_memory_address (Pmode, addr);
5172
5173 /* Note that we explicitly do not want any alias information for this
5174 memory, so that we kill all other live memories. Otherwise we don't
5175 satisfy the full barrier semantics of the intrinsic. */
5176 mem = validize_mem (gen_rtx_MEM (mode, addr));
5177
5178 /* The alignment needs to be at least according to that of the mode. */
5179 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5180 get_pointer_alignment (loc)));
5181 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5182 MEM_VOLATILE_P (mem) = 1;
5183
5184 return mem;
5185 }
5186
5187 /* Make sure an argument is in the right mode.
5188 EXP is the tree argument.
5189 MODE is the mode it should be in. */
5190
5191 static rtx
5192 expand_expr_force_mode (tree exp, machine_mode mode)
5193 {
5194 rtx val;
5195 machine_mode old_mode;
5196
5197 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5198 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5199 of CONST_INTs, where we know the old_mode only from the call argument. */
5200
5201 old_mode = GET_MODE (val);
5202 if (old_mode == VOIDmode)
5203 old_mode = TYPE_MODE (TREE_TYPE (exp));
5204 val = convert_modes (mode, old_mode, val, 1);
5205 return val;
5206 }
5207
5208
5209 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5210 EXP is the CALL_EXPR. CODE is the rtx code
5211 that corresponds to the arithmetic or logical operation from the name;
5212 an exception here is that NOT actually means NAND. TARGET is an optional
5213 place for us to store the results; AFTER is true if this is the
5214 fetch_and_xxx form. */
5215
5216 static rtx
5217 expand_builtin_sync_operation (machine_mode mode, tree exp,
5218 enum rtx_code code, bool after,
5219 rtx target)
5220 {
5221 rtx val, mem;
5222 location_t loc = EXPR_LOCATION (exp);
5223
5224 if (code == NOT && warn_sync_nand)
5225 {
5226 tree fndecl = get_callee_fndecl (exp);
5227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5228
5229 static bool warned_f_a_n, warned_n_a_f;
5230
5231 switch (fcode)
5232 {
5233 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5234 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5235 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5236 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5237 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5238 if (warned_f_a_n)
5239 break;
5240
5241 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5242 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5243 warned_f_a_n = true;
5244 break;
5245
5246 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5247 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5248 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5249 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5250 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5251 if (warned_n_a_f)
5252 break;
5253
5254 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5255 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5256 warned_n_a_f = true;
5257 break;
5258
5259 default:
5260 gcc_unreachable ();
5261 }
5262 }
5263
5264 /* Expand the operands. */
5265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5266 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5267
5268 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5269 after);
5270 }
5271
5272 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5273 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5274 true if this is the boolean form. TARGET is a place for us to store the
5275 results; this is NOT optional if IS_BOOL is true. */
5276
5277 static rtx
5278 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5279 bool is_bool, rtx target)
5280 {
5281 rtx old_val, new_val, mem;
5282 rtx *pbool, *poval;
5283
5284 /* Expand the operands. */
5285 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5286 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5287 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5288
5289 pbool = poval = NULL;
5290 if (target != const0_rtx)
5291 {
5292 if (is_bool)
5293 pbool = &target;
5294 else
5295 poval = &target;
5296 }
5297 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5298 false, MEMMODEL_SYNC_SEQ_CST,
5299 MEMMODEL_SYNC_SEQ_CST))
5300 return NULL_RTX;
5301
5302 return target;
5303 }
5304
5305 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5306 general form is actually an atomic exchange, and some targets only
5307 support a reduced form with the second argument being a constant 1.
5308 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5309 the results. */
5310
5311 static rtx
5312 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5313 rtx target)
5314 {
5315 rtx val, mem;
5316
5317 /* Expand the operands. */
5318 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5319 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5320
5321 return expand_sync_lock_test_and_set (target, mem, val);
5322 }
5323
5324 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5325
5326 static void
5327 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5328 {
5329 rtx mem;
5330
5331 /* Expand the operands. */
5332 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5333
5334 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5335 }
5336
5337 /* Given an integer representing an ``enum memmodel'', verify its
5338 correctness and return the memory model enum. */
5339
5340 static enum memmodel
5341 get_memmodel (tree exp)
5342 {
5343 rtx op;
5344 unsigned HOST_WIDE_INT val;
5345
5346 /* If the parameter is not a constant, it's a run time value so we'll just
5347 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5348 if (TREE_CODE (exp) != INTEGER_CST)
5349 return MEMMODEL_SEQ_CST;
5350
5351 op = expand_normal (exp);
5352
5353 val = INTVAL (op);
5354 if (targetm.memmodel_check)
5355 val = targetm.memmodel_check (val);
5356 else if (val & ~MEMMODEL_MASK)
5357 {
5358 warning (OPT_Winvalid_memory_model,
5359 "Unknown architecture specifier in memory model to builtin.");
5360 return MEMMODEL_SEQ_CST;
5361 }
5362
5363 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5364 if (memmodel_base (val) >= MEMMODEL_LAST)
5365 {
5366 warning (OPT_Winvalid_memory_model,
5367 "invalid memory model argument to builtin");
5368 return MEMMODEL_SEQ_CST;
5369 }
5370
5371 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5372 be conservative and promote consume to acquire. */
5373 if (val == MEMMODEL_CONSUME)
5374 val = MEMMODEL_ACQUIRE;
5375
5376 return (enum memmodel) val;
5377 }
5378
5379 /* Expand the __atomic_exchange intrinsic:
5380 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5381 EXP is the CALL_EXPR.
5382 TARGET is an optional place for us to store the results. */
5383
5384 static rtx
5385 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5386 {
5387 rtx val, mem;
5388 enum memmodel model;
5389
5390 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5391
5392 if (!flag_inline_atomics)
5393 return NULL_RTX;
5394
5395 /* Expand the operands. */
5396 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5397 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5398
5399 return expand_atomic_exchange (target, mem, val, model);
5400 }
5401
5402 /* Expand the __atomic_compare_exchange intrinsic:
5403 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5404 TYPE desired, BOOL weak,
5405 enum memmodel success,
5406 enum memmodel failure)
5407 EXP is the CALL_EXPR.
5408 TARGET is an optional place for us to store the results. */
5409
5410 static rtx
5411 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5412 rtx target)
5413 {
5414 rtx expect, desired, mem, oldval;
5415 rtx_code_label *label;
5416 enum memmodel success, failure;
5417 tree weak;
5418 bool is_weak;
5419
5420 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5421 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5422
5423 if (failure > success)
5424 {
5425 warning (OPT_Winvalid_memory_model,
5426 "failure memory model cannot be stronger than success memory "
5427 "model for %<__atomic_compare_exchange%>");
5428 success = MEMMODEL_SEQ_CST;
5429 }
5430
5431 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5432 {
5433 warning (OPT_Winvalid_memory_model,
5434 "invalid failure memory model for "
5435 "%<__atomic_compare_exchange%>");
5436 failure = MEMMODEL_SEQ_CST;
5437 success = MEMMODEL_SEQ_CST;
5438 }
5439
5440
5441 if (!flag_inline_atomics)
5442 return NULL_RTX;
5443
5444 /* Expand the operands. */
5445 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5446
5447 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5448 expect = convert_memory_address (Pmode, expect);
5449 expect = gen_rtx_MEM (mode, expect);
5450 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5451
5452 weak = CALL_EXPR_ARG (exp, 3);
5453 is_weak = false;
5454 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5455 is_weak = true;
5456
5457 if (target == const0_rtx)
5458 target = NULL;
5459
5460 /* Lest the rtl backend create a race condition with an imporoper store
5461 to memory, always create a new pseudo for OLDVAL. */
5462 oldval = NULL;
5463
5464 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5465 is_weak, success, failure))
5466 return NULL_RTX;
5467
5468 /* Conditionally store back to EXPECT, lest we create a race condition
5469 with an improper store to memory. */
5470 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5471 the normal case where EXPECT is totally private, i.e. a register. At
5472 which point the store can be unconditional. */
5473 label = gen_label_rtx ();
5474 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5475 GET_MODE (target), 1, label);
5476 emit_move_insn (expect, oldval);
5477 emit_label (label);
5478
5479 return target;
5480 }
5481
5482 /* Expand the __atomic_load intrinsic:
5483 TYPE __atomic_load (TYPE *object, enum memmodel)
5484 EXP is the CALL_EXPR.
5485 TARGET is an optional place for us to store the results. */
5486
5487 static rtx
5488 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5489 {
5490 rtx mem;
5491 enum memmodel model;
5492
5493 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5494 if (is_mm_release (model) || is_mm_acq_rel (model))
5495 {
5496 warning (OPT_Winvalid_memory_model,
5497 "invalid memory model for %<__atomic_load%>");
5498 model = MEMMODEL_SEQ_CST;
5499 }
5500
5501 if (!flag_inline_atomics)
5502 return NULL_RTX;
5503
5504 /* Expand the operand. */
5505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5506
5507 return expand_atomic_load (target, mem, model);
5508 }
5509
5510
5511 /* Expand the __atomic_store intrinsic:
5512 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5513 EXP is the CALL_EXPR.
5514 TARGET is an optional place for us to store the results. */
5515
5516 static rtx
5517 expand_builtin_atomic_store (machine_mode mode, tree exp)
5518 {
5519 rtx mem, val;
5520 enum memmodel model;
5521
5522 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5523 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5524 || is_mm_release (model)))
5525 {
5526 warning (OPT_Winvalid_memory_model,
5527 "invalid memory model for %<__atomic_store%>");
5528 model = MEMMODEL_SEQ_CST;
5529 }
5530
5531 if (!flag_inline_atomics)
5532 return NULL_RTX;
5533
5534 /* Expand the operands. */
5535 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5536 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5537
5538 return expand_atomic_store (mem, val, model, false);
5539 }
5540
5541 /* Expand the __atomic_fetch_XXX intrinsic:
5542 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5543 EXP is the CALL_EXPR.
5544 TARGET is an optional place for us to store the results.
5545 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5546 FETCH_AFTER is true if returning the result of the operation.
5547 FETCH_AFTER is false if returning the value before the operation.
5548 IGNORE is true if the result is not used.
5549 EXT_CALL is the correct builtin for an external call if this cannot be
5550 resolved to an instruction sequence. */
5551
5552 static rtx
5553 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5554 enum rtx_code code, bool fetch_after,
5555 bool ignore, enum built_in_function ext_call)
5556 {
5557 rtx val, mem, ret;
5558 enum memmodel model;
5559 tree fndecl;
5560 tree addr;
5561
5562 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5563
5564 /* Expand the operands. */
5565 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5566 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5567
5568 /* Only try generating instructions if inlining is turned on. */
5569 if (flag_inline_atomics)
5570 {
5571 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5572 if (ret)
5573 return ret;
5574 }
5575
5576 /* Return if a different routine isn't needed for the library call. */
5577 if (ext_call == BUILT_IN_NONE)
5578 return NULL_RTX;
5579
5580 /* Change the call to the specified function. */
5581 fndecl = get_callee_fndecl (exp);
5582 addr = CALL_EXPR_FN (exp);
5583 STRIP_NOPS (addr);
5584
5585 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5586 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5587
5588 /* Expand the call here so we can emit trailing code. */
5589 ret = expand_call (exp, target, ignore);
5590
5591 /* Replace the original function just in case it matters. */
5592 TREE_OPERAND (addr, 0) = fndecl;
5593
5594 /* Then issue the arithmetic correction to return the right result. */
5595 if (!ignore)
5596 {
5597 if (code == NOT)
5598 {
5599 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5600 OPTAB_LIB_WIDEN);
5601 ret = expand_simple_unop (mode, NOT, ret, target, true);
5602 }
5603 else
5604 ret = expand_simple_binop (mode, code, ret, val, target, true,
5605 OPTAB_LIB_WIDEN);
5606 }
5607 return ret;
5608 }
5609
5610
5611 #ifndef HAVE_atomic_clear
5612 # define HAVE_atomic_clear 0
5613 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5614 #endif
5615
5616 /* Expand an atomic clear operation.
5617 void _atomic_clear (BOOL *obj, enum memmodel)
5618 EXP is the call expression. */
5619
5620 static rtx
5621 expand_builtin_atomic_clear (tree exp)
5622 {
5623 machine_mode mode;
5624 rtx mem, ret;
5625 enum memmodel model;
5626
5627 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5628 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5629 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5630
5631 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5632 {
5633 warning (OPT_Winvalid_memory_model,
5634 "invalid memory model for %<__atomic_store%>");
5635 model = MEMMODEL_SEQ_CST;
5636 }
5637
5638 if (HAVE_atomic_clear)
5639 {
5640 emit_insn (gen_atomic_clear (mem, model));
5641 return const0_rtx;
5642 }
5643
5644 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5645 Failing that, a store is issued by __atomic_store. The only way this can
5646 fail is if the bool type is larger than a word size. Unlikely, but
5647 handle it anyway for completeness. Assume a single threaded model since
5648 there is no atomic support in this case, and no barriers are required. */
5649 ret = expand_atomic_store (mem, const0_rtx, model, true);
5650 if (!ret)
5651 emit_move_insn (mem, const0_rtx);
5652 return const0_rtx;
5653 }
5654
5655 /* Expand an atomic test_and_set operation.
5656 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5657 EXP is the call expression. */
5658
5659 static rtx
5660 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5661 {
5662 rtx mem;
5663 enum memmodel model;
5664 machine_mode mode;
5665
5666 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5667 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5668 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5669
5670 return expand_atomic_test_and_set (target, mem, model);
5671 }
5672
5673
5674 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5675 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5676
5677 static tree
5678 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5679 {
5680 int size;
5681 machine_mode mode;
5682 unsigned int mode_align, type_align;
5683
5684 if (TREE_CODE (arg0) != INTEGER_CST)
5685 return NULL_TREE;
5686
5687 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5688 mode = mode_for_size (size, MODE_INT, 0);
5689 mode_align = GET_MODE_ALIGNMENT (mode);
5690
5691 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5692 type_align = mode_align;
5693 else
5694 {
5695 tree ttype = TREE_TYPE (arg1);
5696
5697 /* This function is usually invoked and folded immediately by the front
5698 end before anything else has a chance to look at it. The pointer
5699 parameter at this point is usually cast to a void *, so check for that
5700 and look past the cast. */
5701 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5702 && VOID_TYPE_P (TREE_TYPE (ttype)))
5703 arg1 = TREE_OPERAND (arg1, 0);
5704
5705 ttype = TREE_TYPE (arg1);
5706 gcc_assert (POINTER_TYPE_P (ttype));
5707
5708 /* Get the underlying type of the object. */
5709 ttype = TREE_TYPE (ttype);
5710 type_align = TYPE_ALIGN (ttype);
5711 }
5712
5713 /* If the object has smaller alignment, the the lock free routines cannot
5714 be used. */
5715 if (type_align < mode_align)
5716 return boolean_false_node;
5717
5718 /* Check if a compare_and_swap pattern exists for the mode which represents
5719 the required size. The pattern is not allowed to fail, so the existence
5720 of the pattern indicates support is present. */
5721 if (can_compare_and_swap_p (mode, true))
5722 return boolean_true_node;
5723 else
5724 return boolean_false_node;
5725 }
5726
5727 /* Return true if the parameters to call EXP represent an object which will
5728 always generate lock free instructions. The first argument represents the
5729 size of the object, and the second parameter is a pointer to the object
5730 itself. If NULL is passed for the object, then the result is based on
5731 typical alignment for an object of the specified size. Otherwise return
5732 false. */
5733
5734 static rtx
5735 expand_builtin_atomic_always_lock_free (tree exp)
5736 {
5737 tree size;
5738 tree arg0 = CALL_EXPR_ARG (exp, 0);
5739 tree arg1 = CALL_EXPR_ARG (exp, 1);
5740
5741 if (TREE_CODE (arg0) != INTEGER_CST)
5742 {
5743 error ("non-constant argument 1 to __atomic_always_lock_free");
5744 return const0_rtx;
5745 }
5746
5747 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5748 if (size == boolean_true_node)
5749 return const1_rtx;
5750 return const0_rtx;
5751 }
5752
5753 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5754 is lock free on this architecture. */
5755
5756 static tree
5757 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5758 {
5759 if (!flag_inline_atomics)
5760 return NULL_TREE;
5761
5762 /* If it isn't always lock free, don't generate a result. */
5763 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5764 return boolean_true_node;
5765
5766 return NULL_TREE;
5767 }
5768
5769 /* Return true if the parameters to call EXP represent an object which will
5770 always generate lock free instructions. The first argument represents the
5771 size of the object, and the second parameter is a pointer to the object
5772 itself. If NULL is passed for the object, then the result is based on
5773 typical alignment for an object of the specified size. Otherwise return
5774 NULL*/
5775
5776 static rtx
5777 expand_builtin_atomic_is_lock_free (tree exp)
5778 {
5779 tree size;
5780 tree arg0 = CALL_EXPR_ARG (exp, 0);
5781 tree arg1 = CALL_EXPR_ARG (exp, 1);
5782
5783 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5784 {
5785 error ("non-integer argument 1 to __atomic_is_lock_free");
5786 return NULL_RTX;
5787 }
5788
5789 if (!flag_inline_atomics)
5790 return NULL_RTX;
5791
5792 /* If the value is known at compile time, return the RTX for it. */
5793 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5794 if (size == boolean_true_node)
5795 return const1_rtx;
5796
5797 return NULL_RTX;
5798 }
5799
5800 /* Expand the __atomic_thread_fence intrinsic:
5801 void __atomic_thread_fence (enum memmodel)
5802 EXP is the CALL_EXPR. */
5803
5804 static void
5805 expand_builtin_atomic_thread_fence (tree exp)
5806 {
5807 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5808 expand_mem_thread_fence (model);
5809 }
5810
5811 /* Expand the __atomic_signal_fence intrinsic:
5812 void __atomic_signal_fence (enum memmodel)
5813 EXP is the CALL_EXPR. */
5814
5815 static void
5816 expand_builtin_atomic_signal_fence (tree exp)
5817 {
5818 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5819 expand_mem_signal_fence (model);
5820 }
5821
5822 /* Expand the __sync_synchronize intrinsic. */
5823
5824 static void
5825 expand_builtin_sync_synchronize (void)
5826 {
5827 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5828 }
5829
5830 static rtx
5831 expand_builtin_thread_pointer (tree exp, rtx target)
5832 {
5833 enum insn_code icode;
5834 if (!validate_arglist (exp, VOID_TYPE))
5835 return const0_rtx;
5836 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5837 if (icode != CODE_FOR_nothing)
5838 {
5839 struct expand_operand op;
5840 /* If the target is not sutitable then create a new target. */
5841 if (target == NULL_RTX
5842 || !REG_P (target)
5843 || GET_MODE (target) != Pmode)
5844 target = gen_reg_rtx (Pmode);
5845 create_output_operand (&op, target, Pmode);
5846 expand_insn (icode, 1, &op);
5847 return target;
5848 }
5849 error ("__builtin_thread_pointer is not supported on this target");
5850 return const0_rtx;
5851 }
5852
5853 static void
5854 expand_builtin_set_thread_pointer (tree exp)
5855 {
5856 enum insn_code icode;
5857 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5858 return;
5859 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5860 if (icode != CODE_FOR_nothing)
5861 {
5862 struct expand_operand op;
5863 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5864 Pmode, EXPAND_NORMAL);
5865 create_input_operand (&op, val, Pmode);
5866 expand_insn (icode, 1, &op);
5867 return;
5868 }
5869 error ("__builtin_set_thread_pointer is not supported on this target");
5870 }
5871
5872 \f
5873 /* Emit code to restore the current value of stack. */
5874
5875 static void
5876 expand_stack_restore (tree var)
5877 {
5878 rtx_insn *prev;
5879 rtx sa = expand_normal (var);
5880
5881 sa = convert_memory_address (Pmode, sa);
5882
5883 prev = get_last_insn ();
5884 emit_stack_restore (SAVE_BLOCK, sa);
5885
5886 record_new_stack_level ();
5887
5888 fixup_args_size_notes (prev, get_last_insn (), 0);
5889 }
5890
5891 /* Emit code to save the current value of stack. */
5892
5893 static rtx
5894 expand_stack_save (void)
5895 {
5896 rtx ret = NULL_RTX;
5897
5898 emit_stack_save (SAVE_BLOCK, &ret);
5899 return ret;
5900 }
5901
5902
5903 /* Expand OpenACC acc_on_device.
5904
5905 This has to happen late (that is, not in early folding; expand_builtin_*,
5906 rather than fold_builtin_*), as we have to act differently for host and
5907 acceleration device (ACCEL_COMPILER conditional). */
5908
5909 static rtx
5910 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5911 rtx target ATTRIBUTE_UNUSED)
5912 {
5913 #ifdef ACCEL_COMPILER
5914 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5915 return NULL_RTX;
5916
5917 tree arg = CALL_EXPR_ARG (exp, 0);
5918
5919 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5920 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5921 rtx v = expand_normal (arg), v1, v2;
5922 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5923 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5924 machine_mode target_mode = TYPE_MODE (integer_type_node);
5925 if (!target || !register_operand (target, target_mode))
5926 target = gen_reg_rtx (target_mode);
5927 emit_move_insn (target, const1_rtx);
5928 rtx_code_label *done_label = gen_label_rtx ();
5929 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5930 NULL, done_label, PROB_EVEN);
5931 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5932 NULL, done_label, PROB_EVEN);
5933 emit_move_insn (target, const0_rtx);
5934 emit_label (done_label);
5935
5936 return target;
5937 #else
5938 return NULL;
5939 #endif
5940 }
5941
5942
5943 /* Expand an expression EXP that calls a built-in function,
5944 with result going to TARGET if that's convenient
5945 (and in mode MODE if that's convenient).
5946 SUBTARGET may be used as the target for computing one of EXP's operands.
5947 IGNORE is nonzero if the value is to be ignored. */
5948
5949 rtx
5950 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5951 int ignore)
5952 {
5953 tree fndecl = get_callee_fndecl (exp);
5954 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5955 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5956 int flags;
5957
5958 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5959 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5960
5961 /* When ASan is enabled, we don't want to expand some memory/string
5962 builtins and rely on libsanitizer's hooks. This allows us to avoid
5963 redundant checks and be sure, that possible overflow will be detected
5964 by ASan. */
5965
5966 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5967 return expand_call (exp, target, ignore);
5968
5969 /* When not optimizing, generate calls to library functions for a certain
5970 set of builtins. */
5971 if (!optimize
5972 && !called_as_built_in (fndecl)
5973 && fcode != BUILT_IN_FORK
5974 && fcode != BUILT_IN_EXECL
5975 && fcode != BUILT_IN_EXECV
5976 && fcode != BUILT_IN_EXECLP
5977 && fcode != BUILT_IN_EXECLE
5978 && fcode != BUILT_IN_EXECVP
5979 && fcode != BUILT_IN_EXECVE
5980 && fcode != BUILT_IN_ALLOCA
5981 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5982 && fcode != BUILT_IN_FREE
5983 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5984 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5985 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5986 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5987 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5988 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5989 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5990 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5991 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5992 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5993 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5994 && fcode != BUILT_IN_CHKP_BNDRET)
5995 return expand_call (exp, target, ignore);
5996
5997 /* The built-in function expanders test for target == const0_rtx
5998 to determine whether the function's result will be ignored. */
5999 if (ignore)
6000 target = const0_rtx;
6001
6002 /* If the result of a pure or const built-in function is ignored, and
6003 none of its arguments are volatile, we can avoid expanding the
6004 built-in call and just evaluate the arguments for side-effects. */
6005 if (target == const0_rtx
6006 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6007 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6008 {
6009 bool volatilep = false;
6010 tree arg;
6011 call_expr_arg_iterator iter;
6012
6013 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6014 if (TREE_THIS_VOLATILE (arg))
6015 {
6016 volatilep = true;
6017 break;
6018 }
6019
6020 if (! volatilep)
6021 {
6022 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6023 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6024 return const0_rtx;
6025 }
6026 }
6027
6028 /* expand_builtin_with_bounds is supposed to be used for
6029 instrumented builtin calls. */
6030 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6031
6032 switch (fcode)
6033 {
6034 CASE_FLT_FN (BUILT_IN_FABS):
6035 case BUILT_IN_FABSD32:
6036 case BUILT_IN_FABSD64:
6037 case BUILT_IN_FABSD128:
6038 target = expand_builtin_fabs (exp, target, subtarget);
6039 if (target)
6040 return target;
6041 break;
6042
6043 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6044 target = expand_builtin_copysign (exp, target, subtarget);
6045 if (target)
6046 return target;
6047 break;
6048
6049 /* Just do a normal library call if we were unable to fold
6050 the values. */
6051 CASE_FLT_FN (BUILT_IN_CABS):
6052 break;
6053
6054 CASE_FLT_FN (BUILT_IN_EXP):
6055 CASE_FLT_FN (BUILT_IN_EXP10):
6056 CASE_FLT_FN (BUILT_IN_POW10):
6057 CASE_FLT_FN (BUILT_IN_EXP2):
6058 CASE_FLT_FN (BUILT_IN_EXPM1):
6059 CASE_FLT_FN (BUILT_IN_LOGB):
6060 CASE_FLT_FN (BUILT_IN_LOG):
6061 CASE_FLT_FN (BUILT_IN_LOG10):
6062 CASE_FLT_FN (BUILT_IN_LOG2):
6063 CASE_FLT_FN (BUILT_IN_LOG1P):
6064 CASE_FLT_FN (BUILT_IN_TAN):
6065 CASE_FLT_FN (BUILT_IN_ASIN):
6066 CASE_FLT_FN (BUILT_IN_ACOS):
6067 CASE_FLT_FN (BUILT_IN_ATAN):
6068 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6069 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6070 because of possible accuracy problems. */
6071 if (! flag_unsafe_math_optimizations)
6072 break;
6073 CASE_FLT_FN (BUILT_IN_SQRT):
6074 CASE_FLT_FN (BUILT_IN_FLOOR):
6075 CASE_FLT_FN (BUILT_IN_CEIL):
6076 CASE_FLT_FN (BUILT_IN_TRUNC):
6077 CASE_FLT_FN (BUILT_IN_ROUND):
6078 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6079 CASE_FLT_FN (BUILT_IN_RINT):
6080 target = expand_builtin_mathfn (exp, target, subtarget);
6081 if (target)
6082 return target;
6083 break;
6084
6085 CASE_FLT_FN (BUILT_IN_FMA):
6086 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6087 if (target)
6088 return target;
6089 break;
6090
6091 CASE_FLT_FN (BUILT_IN_ILOGB):
6092 if (! flag_unsafe_math_optimizations)
6093 break;
6094 CASE_FLT_FN (BUILT_IN_ISINF):
6095 CASE_FLT_FN (BUILT_IN_FINITE):
6096 case BUILT_IN_ISFINITE:
6097 case BUILT_IN_ISNORMAL:
6098 target = expand_builtin_interclass_mathfn (exp, target);
6099 if (target)
6100 return target;
6101 break;
6102
6103 CASE_FLT_FN (BUILT_IN_ICEIL):
6104 CASE_FLT_FN (BUILT_IN_LCEIL):
6105 CASE_FLT_FN (BUILT_IN_LLCEIL):
6106 CASE_FLT_FN (BUILT_IN_LFLOOR):
6107 CASE_FLT_FN (BUILT_IN_IFLOOR):
6108 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6109 target = expand_builtin_int_roundingfn (exp, target);
6110 if (target)
6111 return target;
6112 break;
6113
6114 CASE_FLT_FN (BUILT_IN_IRINT):
6115 CASE_FLT_FN (BUILT_IN_LRINT):
6116 CASE_FLT_FN (BUILT_IN_LLRINT):
6117 CASE_FLT_FN (BUILT_IN_IROUND):
6118 CASE_FLT_FN (BUILT_IN_LROUND):
6119 CASE_FLT_FN (BUILT_IN_LLROUND):
6120 target = expand_builtin_int_roundingfn_2 (exp, target);
6121 if (target)
6122 return target;
6123 break;
6124
6125 CASE_FLT_FN (BUILT_IN_POWI):
6126 target = expand_builtin_powi (exp, target);
6127 if (target)
6128 return target;
6129 break;
6130
6131 CASE_FLT_FN (BUILT_IN_ATAN2):
6132 CASE_FLT_FN (BUILT_IN_LDEXP):
6133 CASE_FLT_FN (BUILT_IN_SCALB):
6134 CASE_FLT_FN (BUILT_IN_SCALBN):
6135 CASE_FLT_FN (BUILT_IN_SCALBLN):
6136 if (! flag_unsafe_math_optimizations)
6137 break;
6138
6139 CASE_FLT_FN (BUILT_IN_FMOD):
6140 CASE_FLT_FN (BUILT_IN_REMAINDER):
6141 CASE_FLT_FN (BUILT_IN_DREM):
6142 CASE_FLT_FN (BUILT_IN_POW):
6143 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6144 if (target)
6145 return target;
6146 break;
6147
6148 CASE_FLT_FN (BUILT_IN_CEXPI):
6149 target = expand_builtin_cexpi (exp, target);
6150 gcc_assert (target);
6151 return target;
6152
6153 CASE_FLT_FN (BUILT_IN_SIN):
6154 CASE_FLT_FN (BUILT_IN_COS):
6155 if (! flag_unsafe_math_optimizations)
6156 break;
6157 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6158 if (target)
6159 return target;
6160 break;
6161
6162 CASE_FLT_FN (BUILT_IN_SINCOS):
6163 if (! flag_unsafe_math_optimizations)
6164 break;
6165 target = expand_builtin_sincos (exp);
6166 if (target)
6167 return target;
6168 break;
6169
6170 case BUILT_IN_APPLY_ARGS:
6171 return expand_builtin_apply_args ();
6172
6173 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6174 FUNCTION with a copy of the parameters described by
6175 ARGUMENTS, and ARGSIZE. It returns a block of memory
6176 allocated on the stack into which is stored all the registers
6177 that might possibly be used for returning the result of a
6178 function. ARGUMENTS is the value returned by
6179 __builtin_apply_args. ARGSIZE is the number of bytes of
6180 arguments that must be copied. ??? How should this value be
6181 computed? We'll also need a safe worst case value for varargs
6182 functions. */
6183 case BUILT_IN_APPLY:
6184 if (!validate_arglist (exp, POINTER_TYPE,
6185 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6186 && !validate_arglist (exp, REFERENCE_TYPE,
6187 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6188 return const0_rtx;
6189 else
6190 {
6191 rtx ops[3];
6192
6193 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6194 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6195 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6196
6197 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6198 }
6199
6200 /* __builtin_return (RESULT) causes the function to return the
6201 value described by RESULT. RESULT is address of the block of
6202 memory returned by __builtin_apply. */
6203 case BUILT_IN_RETURN:
6204 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6205 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6206 return const0_rtx;
6207
6208 case BUILT_IN_SAVEREGS:
6209 return expand_builtin_saveregs ();
6210
6211 case BUILT_IN_VA_ARG_PACK:
6212 /* All valid uses of __builtin_va_arg_pack () are removed during
6213 inlining. */
6214 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6215 return const0_rtx;
6216
6217 case BUILT_IN_VA_ARG_PACK_LEN:
6218 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6219 inlining. */
6220 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6221 return const0_rtx;
6222
6223 /* Return the address of the first anonymous stack arg. */
6224 case BUILT_IN_NEXT_ARG:
6225 if (fold_builtin_next_arg (exp, false))
6226 return const0_rtx;
6227 return expand_builtin_next_arg ();
6228
6229 case BUILT_IN_CLEAR_CACHE:
6230 target = expand_builtin___clear_cache (exp);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_CLASSIFY_TYPE:
6236 return expand_builtin_classify_type (exp);
6237
6238 case BUILT_IN_CONSTANT_P:
6239 return const0_rtx;
6240
6241 case BUILT_IN_FRAME_ADDRESS:
6242 case BUILT_IN_RETURN_ADDRESS:
6243 return expand_builtin_frame_address (fndecl, exp);
6244
6245 /* Returns the address of the area where the structure is returned.
6246 0 otherwise. */
6247 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6248 if (call_expr_nargs (exp) != 0
6249 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6250 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6251 return const0_rtx;
6252 else
6253 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6254
6255 case BUILT_IN_ALLOCA:
6256 case BUILT_IN_ALLOCA_WITH_ALIGN:
6257 /* If the allocation stems from the declaration of a variable-sized
6258 object, it cannot accumulate. */
6259 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6260 if (target)
6261 return target;
6262 break;
6263
6264 case BUILT_IN_STACK_SAVE:
6265 return expand_stack_save ();
6266
6267 case BUILT_IN_STACK_RESTORE:
6268 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6269 return const0_rtx;
6270
6271 case BUILT_IN_BSWAP16:
6272 case BUILT_IN_BSWAP32:
6273 case BUILT_IN_BSWAP64:
6274 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6275 if (target)
6276 return target;
6277 break;
6278
6279 CASE_INT_FN (BUILT_IN_FFS):
6280 target = expand_builtin_unop (target_mode, exp, target,
6281 subtarget, ffs_optab);
6282 if (target)
6283 return target;
6284 break;
6285
6286 CASE_INT_FN (BUILT_IN_CLZ):
6287 target = expand_builtin_unop (target_mode, exp, target,
6288 subtarget, clz_optab);
6289 if (target)
6290 return target;
6291 break;
6292
6293 CASE_INT_FN (BUILT_IN_CTZ):
6294 target = expand_builtin_unop (target_mode, exp, target,
6295 subtarget, ctz_optab);
6296 if (target)
6297 return target;
6298 break;
6299
6300 CASE_INT_FN (BUILT_IN_CLRSB):
6301 target = expand_builtin_unop (target_mode, exp, target,
6302 subtarget, clrsb_optab);
6303 if (target)
6304 return target;
6305 break;
6306
6307 CASE_INT_FN (BUILT_IN_POPCOUNT):
6308 target = expand_builtin_unop (target_mode, exp, target,
6309 subtarget, popcount_optab);
6310 if (target)
6311 return target;
6312 break;
6313
6314 CASE_INT_FN (BUILT_IN_PARITY):
6315 target = expand_builtin_unop (target_mode, exp, target,
6316 subtarget, parity_optab);
6317 if (target)
6318 return target;
6319 break;
6320
6321 case BUILT_IN_STRLEN:
6322 target = expand_builtin_strlen (exp, target, target_mode);
6323 if (target)
6324 return target;
6325 break;
6326
6327 case BUILT_IN_STRCPY:
6328 target = expand_builtin_strcpy (exp, target);
6329 if (target)
6330 return target;
6331 break;
6332
6333 case BUILT_IN_STRNCPY:
6334 target = expand_builtin_strncpy (exp, target);
6335 if (target)
6336 return target;
6337 break;
6338
6339 case BUILT_IN_STPCPY:
6340 target = expand_builtin_stpcpy (exp, target, mode);
6341 if (target)
6342 return target;
6343 break;
6344
6345 case BUILT_IN_MEMCPY:
6346 target = expand_builtin_memcpy (exp, target);
6347 if (target)
6348 return target;
6349 break;
6350
6351 case BUILT_IN_MEMPCPY:
6352 target = expand_builtin_mempcpy (exp, target, mode);
6353 if (target)
6354 return target;
6355 break;
6356
6357 case BUILT_IN_MEMSET:
6358 target = expand_builtin_memset (exp, target, mode);
6359 if (target)
6360 return target;
6361 break;
6362
6363 case BUILT_IN_BZERO:
6364 target = expand_builtin_bzero (exp);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_STRCMP:
6370 target = expand_builtin_strcmp (exp, target);
6371 if (target)
6372 return target;
6373 break;
6374
6375 case BUILT_IN_STRNCMP:
6376 target = expand_builtin_strncmp (exp, target, mode);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_BCMP:
6382 case BUILT_IN_MEMCMP:
6383 target = expand_builtin_memcmp (exp, target, mode);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_SETJMP:
6389 /* This should have been lowered to the builtins below. */
6390 gcc_unreachable ();
6391
6392 case BUILT_IN_SETJMP_SETUP:
6393 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6394 and the receiver label. */
6395 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6396 {
6397 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6398 VOIDmode, EXPAND_NORMAL);
6399 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6400 rtx label_r = label_rtx (label);
6401
6402 /* This is copied from the handling of non-local gotos. */
6403 expand_builtin_setjmp_setup (buf_addr, label_r);
6404 nonlocal_goto_handler_labels
6405 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6406 nonlocal_goto_handler_labels);
6407 /* ??? Do not let expand_label treat us as such since we would
6408 not want to be both on the list of non-local labels and on
6409 the list of forced labels. */
6410 FORCED_LABEL (label) = 0;
6411 return const0_rtx;
6412 }
6413 break;
6414
6415 case BUILT_IN_SETJMP_RECEIVER:
6416 /* __builtin_setjmp_receiver is passed the receiver label. */
6417 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6418 {
6419 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6420 rtx label_r = label_rtx (label);
6421
6422 expand_builtin_setjmp_receiver (label_r);
6423 return const0_rtx;
6424 }
6425 break;
6426
6427 /* __builtin_longjmp is passed a pointer to an array of five words.
6428 It's similar to the C library longjmp function but works with
6429 __builtin_setjmp above. */
6430 case BUILT_IN_LONGJMP:
6431 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6432 {
6433 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6434 VOIDmode, EXPAND_NORMAL);
6435 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6436
6437 if (value != const1_rtx)
6438 {
6439 error ("%<__builtin_longjmp%> second argument must be 1");
6440 return const0_rtx;
6441 }
6442
6443 expand_builtin_longjmp (buf_addr, value);
6444 return const0_rtx;
6445 }
6446 break;
6447
6448 case BUILT_IN_NONLOCAL_GOTO:
6449 target = expand_builtin_nonlocal_goto (exp);
6450 if (target)
6451 return target;
6452 break;
6453
6454 /* This updates the setjmp buffer that is its argument with the value
6455 of the current stack pointer. */
6456 case BUILT_IN_UPDATE_SETJMP_BUF:
6457 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6458 {
6459 rtx buf_addr
6460 = expand_normal (CALL_EXPR_ARG (exp, 0));
6461
6462 expand_builtin_update_setjmp_buf (buf_addr);
6463 return const0_rtx;
6464 }
6465 break;
6466
6467 case BUILT_IN_TRAP:
6468 expand_builtin_trap ();
6469 return const0_rtx;
6470
6471 case BUILT_IN_UNREACHABLE:
6472 expand_builtin_unreachable ();
6473 return const0_rtx;
6474
6475 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6476 case BUILT_IN_SIGNBITD32:
6477 case BUILT_IN_SIGNBITD64:
6478 case BUILT_IN_SIGNBITD128:
6479 target = expand_builtin_signbit (exp, target);
6480 if (target)
6481 return target;
6482 break;
6483
6484 /* Various hooks for the DWARF 2 __throw routine. */
6485 case BUILT_IN_UNWIND_INIT:
6486 expand_builtin_unwind_init ();
6487 return const0_rtx;
6488 case BUILT_IN_DWARF_CFA:
6489 return virtual_cfa_rtx;
6490 #ifdef DWARF2_UNWIND_INFO
6491 case BUILT_IN_DWARF_SP_COLUMN:
6492 return expand_builtin_dwarf_sp_column ();
6493 case BUILT_IN_INIT_DWARF_REG_SIZES:
6494 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6495 return const0_rtx;
6496 #endif
6497 case BUILT_IN_FROB_RETURN_ADDR:
6498 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6499 case BUILT_IN_EXTRACT_RETURN_ADDR:
6500 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6501 case BUILT_IN_EH_RETURN:
6502 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6503 CALL_EXPR_ARG (exp, 1));
6504 return const0_rtx;
6505 case BUILT_IN_EH_RETURN_DATA_REGNO:
6506 return expand_builtin_eh_return_data_regno (exp);
6507 case BUILT_IN_EXTEND_POINTER:
6508 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6509 case BUILT_IN_EH_POINTER:
6510 return expand_builtin_eh_pointer (exp);
6511 case BUILT_IN_EH_FILTER:
6512 return expand_builtin_eh_filter (exp);
6513 case BUILT_IN_EH_COPY_VALUES:
6514 return expand_builtin_eh_copy_values (exp);
6515
6516 case BUILT_IN_VA_START:
6517 return expand_builtin_va_start (exp);
6518 case BUILT_IN_VA_END:
6519 return expand_builtin_va_end (exp);
6520 case BUILT_IN_VA_COPY:
6521 return expand_builtin_va_copy (exp);
6522 case BUILT_IN_EXPECT:
6523 return expand_builtin_expect (exp, target);
6524 case BUILT_IN_ASSUME_ALIGNED:
6525 return expand_builtin_assume_aligned (exp, target);
6526 case BUILT_IN_PREFETCH:
6527 expand_builtin_prefetch (exp);
6528 return const0_rtx;
6529
6530 case BUILT_IN_INIT_TRAMPOLINE:
6531 return expand_builtin_init_trampoline (exp, true);
6532 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6533 return expand_builtin_init_trampoline (exp, false);
6534 case BUILT_IN_ADJUST_TRAMPOLINE:
6535 return expand_builtin_adjust_trampoline (exp);
6536
6537 case BUILT_IN_FORK:
6538 case BUILT_IN_EXECL:
6539 case BUILT_IN_EXECV:
6540 case BUILT_IN_EXECLP:
6541 case BUILT_IN_EXECLE:
6542 case BUILT_IN_EXECVP:
6543 case BUILT_IN_EXECVE:
6544 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6545 if (target)
6546 return target;
6547 break;
6548
6549 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6550 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6551 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6552 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6553 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6555 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6561 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6562 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6563 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6564 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6566 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6567 if (target)
6568 return target;
6569 break;
6570
6571 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6572 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6573 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6574 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6575 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6577 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6578 if (target)
6579 return target;
6580 break;
6581
6582 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6583 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6584 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6585 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6586 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6588 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6594 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6595 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6596 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6597 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6599 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6600 if (target)
6601 return target;
6602 break;
6603
6604 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6605 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6606 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6607 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6608 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6610 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6611 if (target)
6612 return target;
6613 break;
6614
6615 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6616 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6617 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6618 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6619 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6621 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6622 if (target)
6623 return target;
6624 break;
6625
6626 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6627 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6628 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6629 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6630 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6632 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6633 if (target)
6634 return target;
6635 break;
6636
6637 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6638 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6639 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6640 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6641 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6642 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6643 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6644 if (target)
6645 return target;
6646 break;
6647
6648 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6649 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6650 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6651 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6652 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6653 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6654 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6655 if (target)
6656 return target;
6657 break;
6658
6659 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6660 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6661 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6662 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6663 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6664 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6665 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6666 if (target)
6667 return target;
6668 break;
6669
6670 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6671 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6672 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6673 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6674 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6676 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6677 if (target)
6678 return target;
6679 break;
6680
6681 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6686 if (mode == VOIDmode)
6687 mode = TYPE_MODE (boolean_type_node);
6688 if (!target || !register_operand (target, mode))
6689 target = gen_reg_rtx (mode);
6690
6691 mode = get_builtin_sync_mode
6692 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6693 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6694 if (target)
6695 return target;
6696 break;
6697
6698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6700 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6701 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6702 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6703 mode = get_builtin_sync_mode
6704 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6705 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6706 if (target)
6707 return target;
6708 break;
6709
6710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6712 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6713 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6714 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6715 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6716 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6717 if (target)
6718 return target;
6719 break;
6720
6721 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6722 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6723 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6724 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6725 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6727 expand_builtin_sync_lock_release (mode, exp);
6728 return const0_rtx;
6729
6730 case BUILT_IN_SYNC_SYNCHRONIZE:
6731 expand_builtin_sync_synchronize ();
6732 return const0_rtx;
6733
6734 case BUILT_IN_ATOMIC_EXCHANGE_1:
6735 case BUILT_IN_ATOMIC_EXCHANGE_2:
6736 case BUILT_IN_ATOMIC_EXCHANGE_4:
6737 case BUILT_IN_ATOMIC_EXCHANGE_8:
6738 case BUILT_IN_ATOMIC_EXCHANGE_16:
6739 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6740 target = expand_builtin_atomic_exchange (mode, exp, target);
6741 if (target)
6742 return target;
6743 break;
6744
6745 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6746 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6747 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6748 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6749 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6750 {
6751 unsigned int nargs, z;
6752 vec<tree, va_gc> *vec;
6753
6754 mode =
6755 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6756 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6757 if (target)
6758 return target;
6759
6760 /* If this is turned into an external library call, the weak parameter
6761 must be dropped to match the expected parameter list. */
6762 nargs = call_expr_nargs (exp);
6763 vec_alloc (vec, nargs - 1);
6764 for (z = 0; z < 3; z++)
6765 vec->quick_push (CALL_EXPR_ARG (exp, z));
6766 /* Skip the boolean weak parameter. */
6767 for (z = 4; z < 6; z++)
6768 vec->quick_push (CALL_EXPR_ARG (exp, z));
6769 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6770 break;
6771 }
6772
6773 case BUILT_IN_ATOMIC_LOAD_1:
6774 case BUILT_IN_ATOMIC_LOAD_2:
6775 case BUILT_IN_ATOMIC_LOAD_4:
6776 case BUILT_IN_ATOMIC_LOAD_8:
6777 case BUILT_IN_ATOMIC_LOAD_16:
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6779 target = expand_builtin_atomic_load (mode, exp, target);
6780 if (target)
6781 return target;
6782 break;
6783
6784 case BUILT_IN_ATOMIC_STORE_1:
6785 case BUILT_IN_ATOMIC_STORE_2:
6786 case BUILT_IN_ATOMIC_STORE_4:
6787 case BUILT_IN_ATOMIC_STORE_8:
6788 case BUILT_IN_ATOMIC_STORE_16:
6789 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6790 target = expand_builtin_atomic_store (mode, exp);
6791 if (target)
6792 return const0_rtx;
6793 break;
6794
6795 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6796 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6797 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6798 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6799 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6800 {
6801 enum built_in_function lib;
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6803 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6804 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6805 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6806 ignore, lib);
6807 if (target)
6808 return target;
6809 break;
6810 }
6811 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6812 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6813 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6814 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6815 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6816 {
6817 enum built_in_function lib;
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6819 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6820 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6821 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6822 ignore, lib);
6823 if (target)
6824 return target;
6825 break;
6826 }
6827 case BUILT_IN_ATOMIC_AND_FETCH_1:
6828 case BUILT_IN_ATOMIC_AND_FETCH_2:
6829 case BUILT_IN_ATOMIC_AND_FETCH_4:
6830 case BUILT_IN_ATOMIC_AND_FETCH_8:
6831 case BUILT_IN_ATOMIC_AND_FETCH_16:
6832 {
6833 enum built_in_function lib;
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6835 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6836 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6837 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6838 ignore, lib);
6839 if (target)
6840 return target;
6841 break;
6842 }
6843 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6844 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6845 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6846 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6847 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6848 {
6849 enum built_in_function lib;
6850 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6851 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6852 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6853 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6854 ignore, lib);
6855 if (target)
6856 return target;
6857 break;
6858 }
6859 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6860 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6861 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6862 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6863 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6864 {
6865 enum built_in_function lib;
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6867 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6868 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6869 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6870 ignore, lib);
6871 if (target)
6872 return target;
6873 break;
6874 }
6875 case BUILT_IN_ATOMIC_OR_FETCH_1:
6876 case BUILT_IN_ATOMIC_OR_FETCH_2:
6877 case BUILT_IN_ATOMIC_OR_FETCH_4:
6878 case BUILT_IN_ATOMIC_OR_FETCH_8:
6879 case BUILT_IN_ATOMIC_OR_FETCH_16:
6880 {
6881 enum built_in_function lib;
6882 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6883 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6884 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6885 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6886 ignore, lib);
6887 if (target)
6888 return target;
6889 break;
6890 }
6891 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6892 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6893 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6894 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6895 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6896 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6897 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6898 ignore, BUILT_IN_NONE);
6899 if (target)
6900 return target;
6901 break;
6902
6903 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6904 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6905 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6906 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6907 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6908 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6909 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6910 ignore, BUILT_IN_NONE);
6911 if (target)
6912 return target;
6913 break;
6914
6915 case BUILT_IN_ATOMIC_FETCH_AND_1:
6916 case BUILT_IN_ATOMIC_FETCH_AND_2:
6917 case BUILT_IN_ATOMIC_FETCH_AND_4:
6918 case BUILT_IN_ATOMIC_FETCH_AND_8:
6919 case BUILT_IN_ATOMIC_FETCH_AND_16:
6920 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6921 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6922 ignore, BUILT_IN_NONE);
6923 if (target)
6924 return target;
6925 break;
6926
6927 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6928 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6929 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6930 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6931 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6932 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6933 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6934 ignore, BUILT_IN_NONE);
6935 if (target)
6936 return target;
6937 break;
6938
6939 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6940 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6941 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6942 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6943 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6944 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6945 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6946 ignore, BUILT_IN_NONE);
6947 if (target)
6948 return target;
6949 break;
6950
6951 case BUILT_IN_ATOMIC_FETCH_OR_1:
6952 case BUILT_IN_ATOMIC_FETCH_OR_2:
6953 case BUILT_IN_ATOMIC_FETCH_OR_4:
6954 case BUILT_IN_ATOMIC_FETCH_OR_8:
6955 case BUILT_IN_ATOMIC_FETCH_OR_16:
6956 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6957 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6958 ignore, BUILT_IN_NONE);
6959 if (target)
6960 return target;
6961 break;
6962
6963 case BUILT_IN_ATOMIC_TEST_AND_SET:
6964 return expand_builtin_atomic_test_and_set (exp, target);
6965
6966 case BUILT_IN_ATOMIC_CLEAR:
6967 return expand_builtin_atomic_clear (exp);
6968
6969 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6970 return expand_builtin_atomic_always_lock_free (exp);
6971
6972 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6973 target = expand_builtin_atomic_is_lock_free (exp);
6974 if (target)
6975 return target;
6976 break;
6977
6978 case BUILT_IN_ATOMIC_THREAD_FENCE:
6979 expand_builtin_atomic_thread_fence (exp);
6980 return const0_rtx;
6981
6982 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6983 expand_builtin_atomic_signal_fence (exp);
6984 return const0_rtx;
6985
6986 case BUILT_IN_OBJECT_SIZE:
6987 return expand_builtin_object_size (exp);
6988
6989 case BUILT_IN_MEMCPY_CHK:
6990 case BUILT_IN_MEMPCPY_CHK:
6991 case BUILT_IN_MEMMOVE_CHK:
6992 case BUILT_IN_MEMSET_CHK:
6993 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6994 if (target)
6995 return target;
6996 break;
6997
6998 case BUILT_IN_STRCPY_CHK:
6999 case BUILT_IN_STPCPY_CHK:
7000 case BUILT_IN_STRNCPY_CHK:
7001 case BUILT_IN_STPNCPY_CHK:
7002 case BUILT_IN_STRCAT_CHK:
7003 case BUILT_IN_STRNCAT_CHK:
7004 case BUILT_IN_SNPRINTF_CHK:
7005 case BUILT_IN_VSNPRINTF_CHK:
7006 maybe_emit_chk_warning (exp, fcode);
7007 break;
7008
7009 case BUILT_IN_SPRINTF_CHK:
7010 case BUILT_IN_VSPRINTF_CHK:
7011 maybe_emit_sprintf_chk_warning (exp, fcode);
7012 break;
7013
7014 case BUILT_IN_FREE:
7015 if (warn_free_nonheap_object)
7016 maybe_emit_free_warning (exp);
7017 break;
7018
7019 case BUILT_IN_THREAD_POINTER:
7020 return expand_builtin_thread_pointer (exp, target);
7021
7022 case BUILT_IN_SET_THREAD_POINTER:
7023 expand_builtin_set_thread_pointer (exp);
7024 return const0_rtx;
7025
7026 case BUILT_IN_CILK_DETACH:
7027 expand_builtin_cilk_detach (exp);
7028 return const0_rtx;
7029
7030 case BUILT_IN_CILK_POP_FRAME:
7031 expand_builtin_cilk_pop_frame (exp);
7032 return const0_rtx;
7033
7034 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7035 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7036 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7037 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7038 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7039 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7040 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7041 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7042 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7043 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7044 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7045 /* We allow user CHKP builtins if Pointer Bounds
7046 Checker is off. */
7047 if (!chkp_function_instrumented_p (current_function_decl))
7048 {
7049 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7050 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7051 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7052 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7053 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7054 return expand_normal (CALL_EXPR_ARG (exp, 0));
7055 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7056 return expand_normal (size_zero_node);
7057 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7058 return expand_normal (size_int (-1));
7059 else
7060 return const0_rtx;
7061 }
7062 /* FALLTHROUGH */
7063
7064 case BUILT_IN_CHKP_BNDMK:
7065 case BUILT_IN_CHKP_BNDSTX:
7066 case BUILT_IN_CHKP_BNDCL:
7067 case BUILT_IN_CHKP_BNDCU:
7068 case BUILT_IN_CHKP_BNDLDX:
7069 case BUILT_IN_CHKP_BNDRET:
7070 case BUILT_IN_CHKP_INTERSECT:
7071 case BUILT_IN_CHKP_NARROW:
7072 case BUILT_IN_CHKP_EXTRACT_LOWER:
7073 case BUILT_IN_CHKP_EXTRACT_UPPER:
7074 /* Software implementation of Pointer Bounds Checker is NYI.
7075 Target support is required. */
7076 error ("Your target platform does not support -fcheck-pointer-bounds");
7077 break;
7078
7079 case BUILT_IN_ACC_ON_DEVICE:
7080 target = expand_builtin_acc_on_device (exp, target);
7081 if (target)
7082 return target;
7083 break;
7084
7085 default: /* just do library call, if unknown builtin */
7086 break;
7087 }
7088
7089 /* The switch statement above can drop through to cause the function
7090 to be called normally. */
7091 return expand_call (exp, target, ignore);
7092 }
7093
7094 /* Similar to expand_builtin but is used for instrumented calls. */
7095
7096 rtx
7097 expand_builtin_with_bounds (tree exp, rtx target,
7098 rtx subtarget ATTRIBUTE_UNUSED,
7099 machine_mode mode, int ignore)
7100 {
7101 tree fndecl = get_callee_fndecl (exp);
7102 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7103
7104 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7105
7106 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7107 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7108
7109 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7110 && fcode < END_CHKP_BUILTINS);
7111
7112 switch (fcode)
7113 {
7114 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7115 target = expand_builtin_memcpy_with_bounds (exp, target);
7116 if (target)
7117 return target;
7118 break;
7119
7120 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7121 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7122 if (target)
7123 return target;
7124 break;
7125
7126 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7127 target = expand_builtin_memset_with_bounds (exp, target, mode);
7128 if (target)
7129 return target;
7130 break;
7131
7132 default:
7133 break;
7134 }
7135
7136 /* The switch statement above can drop through to cause the function
7137 to be called normally. */
7138 return expand_call (exp, target, ignore);
7139 }
7140
7141 /* Determine whether a tree node represents a call to a built-in
7142 function. If the tree T is a call to a built-in function with
7143 the right number of arguments of the appropriate types, return
7144 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7145 Otherwise the return value is END_BUILTINS. */
7146
7147 enum built_in_function
7148 builtin_mathfn_code (const_tree t)
7149 {
7150 const_tree fndecl, arg, parmlist;
7151 const_tree argtype, parmtype;
7152 const_call_expr_arg_iterator iter;
7153
7154 if (TREE_CODE (t) != CALL_EXPR
7155 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7156 return END_BUILTINS;
7157
7158 fndecl = get_callee_fndecl (t);
7159 if (fndecl == NULL_TREE
7160 || TREE_CODE (fndecl) != FUNCTION_DECL
7161 || ! DECL_BUILT_IN (fndecl)
7162 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7163 return END_BUILTINS;
7164
7165 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7166 init_const_call_expr_arg_iterator (t, &iter);
7167 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7168 {
7169 /* If a function doesn't take a variable number of arguments,
7170 the last element in the list will have type `void'. */
7171 parmtype = TREE_VALUE (parmlist);
7172 if (VOID_TYPE_P (parmtype))
7173 {
7174 if (more_const_call_expr_args_p (&iter))
7175 return END_BUILTINS;
7176 return DECL_FUNCTION_CODE (fndecl);
7177 }
7178
7179 if (! more_const_call_expr_args_p (&iter))
7180 return END_BUILTINS;
7181
7182 arg = next_const_call_expr_arg (&iter);
7183 argtype = TREE_TYPE (arg);
7184
7185 if (SCALAR_FLOAT_TYPE_P (parmtype))
7186 {
7187 if (! SCALAR_FLOAT_TYPE_P (argtype))
7188 return END_BUILTINS;
7189 }
7190 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7191 {
7192 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7193 return END_BUILTINS;
7194 }
7195 else if (POINTER_TYPE_P (parmtype))
7196 {
7197 if (! POINTER_TYPE_P (argtype))
7198 return END_BUILTINS;
7199 }
7200 else if (INTEGRAL_TYPE_P (parmtype))
7201 {
7202 if (! INTEGRAL_TYPE_P (argtype))
7203 return END_BUILTINS;
7204 }
7205 else
7206 return END_BUILTINS;
7207 }
7208
7209 /* Variable-length argument list. */
7210 return DECL_FUNCTION_CODE (fndecl);
7211 }
7212
7213 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7214 evaluate to a constant. */
7215
7216 static tree
7217 fold_builtin_constant_p (tree arg)
7218 {
7219 /* We return 1 for a numeric type that's known to be a constant
7220 value at compile-time or for an aggregate type that's a
7221 literal constant. */
7222 STRIP_NOPS (arg);
7223
7224 /* If we know this is a constant, emit the constant of one. */
7225 if (CONSTANT_CLASS_P (arg)
7226 || (TREE_CODE (arg) == CONSTRUCTOR
7227 && TREE_CONSTANT (arg)))
7228 return integer_one_node;
7229 if (TREE_CODE (arg) == ADDR_EXPR)
7230 {
7231 tree op = TREE_OPERAND (arg, 0);
7232 if (TREE_CODE (op) == STRING_CST
7233 || (TREE_CODE (op) == ARRAY_REF
7234 && integer_zerop (TREE_OPERAND (op, 1))
7235 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7236 return integer_one_node;
7237 }
7238
7239 /* If this expression has side effects, show we don't know it to be a
7240 constant. Likewise if it's a pointer or aggregate type since in
7241 those case we only want literals, since those are only optimized
7242 when generating RTL, not later.
7243 And finally, if we are compiling an initializer, not code, we
7244 need to return a definite result now; there's not going to be any
7245 more optimization done. */
7246 if (TREE_SIDE_EFFECTS (arg)
7247 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7248 || POINTER_TYPE_P (TREE_TYPE (arg))
7249 || cfun == 0
7250 || folding_initializer
7251 || force_folding_builtin_constant_p)
7252 return integer_zero_node;
7253
7254 return NULL_TREE;
7255 }
7256
7257 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7258 return it as a truthvalue. */
7259
7260 static tree
7261 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7262 tree predictor)
7263 {
7264 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7265
7266 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7267 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7268 ret_type = TREE_TYPE (TREE_TYPE (fn));
7269 pred_type = TREE_VALUE (arg_types);
7270 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7271
7272 pred = fold_convert_loc (loc, pred_type, pred);
7273 expected = fold_convert_loc (loc, expected_type, expected);
7274 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7275 predictor);
7276
7277 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7278 build_int_cst (ret_type, 0));
7279 }
7280
7281 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7282 NULL_TREE if no simplification is possible. */
7283
7284 tree
7285 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7286 {
7287 tree inner, fndecl, inner_arg0;
7288 enum tree_code code;
7289
7290 /* Distribute the expected value over short-circuiting operators.
7291 See through the cast from truthvalue_type_node to long. */
7292 inner_arg0 = arg0;
7293 while (CONVERT_EXPR_P (inner_arg0)
7294 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7295 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7296 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7297
7298 /* If this is a builtin_expect within a builtin_expect keep the
7299 inner one. See through a comparison against a constant. It
7300 might have been added to create a thruthvalue. */
7301 inner = inner_arg0;
7302
7303 if (COMPARISON_CLASS_P (inner)
7304 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7305 inner = TREE_OPERAND (inner, 0);
7306
7307 if (TREE_CODE (inner) == CALL_EXPR
7308 && (fndecl = get_callee_fndecl (inner))
7309 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7310 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7311 return arg0;
7312
7313 inner = inner_arg0;
7314 code = TREE_CODE (inner);
7315 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7316 {
7317 tree op0 = TREE_OPERAND (inner, 0);
7318 tree op1 = TREE_OPERAND (inner, 1);
7319
7320 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7321 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7322 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7323
7324 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7325 }
7326
7327 /* If the argument isn't invariant then there's nothing else we can do. */
7328 if (!TREE_CONSTANT (inner_arg0))
7329 return NULL_TREE;
7330
7331 /* If we expect that a comparison against the argument will fold to
7332 a constant return the constant. In practice, this means a true
7333 constant or the address of a non-weak symbol. */
7334 inner = inner_arg0;
7335 STRIP_NOPS (inner);
7336 if (TREE_CODE (inner) == ADDR_EXPR)
7337 {
7338 do
7339 {
7340 inner = TREE_OPERAND (inner, 0);
7341 }
7342 while (TREE_CODE (inner) == COMPONENT_REF
7343 || TREE_CODE (inner) == ARRAY_REF);
7344 if ((TREE_CODE (inner) == VAR_DECL
7345 || TREE_CODE (inner) == FUNCTION_DECL)
7346 && DECL_WEAK (inner))
7347 return NULL_TREE;
7348 }
7349
7350 /* Otherwise, ARG0 already has the proper type for the return value. */
7351 return arg0;
7352 }
7353
7354 /* Fold a call to __builtin_classify_type with argument ARG. */
7355
7356 static tree
7357 fold_builtin_classify_type (tree arg)
7358 {
7359 if (arg == 0)
7360 return build_int_cst (integer_type_node, no_type_class);
7361
7362 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7363 }
7364
7365 /* Fold a call to __builtin_strlen with argument ARG. */
7366
7367 static tree
7368 fold_builtin_strlen (location_t loc, tree type, tree arg)
7369 {
7370 if (!validate_arg (arg, POINTER_TYPE))
7371 return NULL_TREE;
7372 else
7373 {
7374 tree len = c_strlen (arg, 0);
7375
7376 if (len)
7377 return fold_convert_loc (loc, type, len);
7378
7379 return NULL_TREE;
7380 }
7381 }
7382
7383 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7384
7385 static tree
7386 fold_builtin_inf (location_t loc, tree type, int warn)
7387 {
7388 REAL_VALUE_TYPE real;
7389
7390 /* __builtin_inff is intended to be usable to define INFINITY on all
7391 targets. If an infinity is not available, INFINITY expands "to a
7392 positive constant of type float that overflows at translation
7393 time", footnote "In this case, using INFINITY will violate the
7394 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7395 Thus we pedwarn to ensure this constraint violation is
7396 diagnosed. */
7397 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7398 pedwarn (loc, 0, "target format does not support infinity");
7399
7400 real_inf (&real);
7401 return build_real (type, real);
7402 }
7403
7404 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7405
7406 static tree
7407 fold_builtin_nan (tree arg, tree type, int quiet)
7408 {
7409 REAL_VALUE_TYPE real;
7410 const char *str;
7411
7412 if (!validate_arg (arg, POINTER_TYPE))
7413 return NULL_TREE;
7414 str = c_getstr (arg);
7415 if (!str)
7416 return NULL_TREE;
7417
7418 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7419 return NULL_TREE;
7420
7421 return build_real (type, real);
7422 }
7423
7424 /* Return true if the floating point expression T has an integer value.
7425 We also allow +Inf, -Inf and NaN to be considered integer values. */
7426
7427 static bool
7428 integer_valued_real_p (tree t)
7429 {
7430 switch (TREE_CODE (t))
7431 {
7432 case FLOAT_EXPR:
7433 return true;
7434
7435 case ABS_EXPR:
7436 case SAVE_EXPR:
7437 return integer_valued_real_p (TREE_OPERAND (t, 0));
7438
7439 case COMPOUND_EXPR:
7440 case MODIFY_EXPR:
7441 case BIND_EXPR:
7442 return integer_valued_real_p (TREE_OPERAND (t, 1));
7443
7444 case PLUS_EXPR:
7445 case MINUS_EXPR:
7446 case MULT_EXPR:
7447 case MIN_EXPR:
7448 case MAX_EXPR:
7449 return integer_valued_real_p (TREE_OPERAND (t, 0))
7450 && integer_valued_real_p (TREE_OPERAND (t, 1));
7451
7452 case COND_EXPR:
7453 return integer_valued_real_p (TREE_OPERAND (t, 1))
7454 && integer_valued_real_p (TREE_OPERAND (t, 2));
7455
7456 case REAL_CST:
7457 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7458
7459 CASE_CONVERT:
7460 {
7461 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7462 if (TREE_CODE (type) == INTEGER_TYPE)
7463 return true;
7464 if (TREE_CODE (type) == REAL_TYPE)
7465 return integer_valued_real_p (TREE_OPERAND (t, 0));
7466 break;
7467 }
7468
7469 case CALL_EXPR:
7470 switch (builtin_mathfn_code (t))
7471 {
7472 CASE_FLT_FN (BUILT_IN_CEIL):
7473 CASE_FLT_FN (BUILT_IN_FLOOR):
7474 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7475 CASE_FLT_FN (BUILT_IN_RINT):
7476 CASE_FLT_FN (BUILT_IN_ROUND):
7477 CASE_FLT_FN (BUILT_IN_TRUNC):
7478 return true;
7479
7480 CASE_FLT_FN (BUILT_IN_FMIN):
7481 CASE_FLT_FN (BUILT_IN_FMAX):
7482 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7483 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7484
7485 default:
7486 break;
7487 }
7488 break;
7489
7490 default:
7491 break;
7492 }
7493 return false;
7494 }
7495
7496 /* FNDECL is assumed to be a builtin where truncation can be propagated
7497 across (for instance floor((double)f) == (double)floorf (f).
7498 Do the transformation for a call with argument ARG. */
7499
7500 static tree
7501 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7502 {
7503 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7504
7505 if (!validate_arg (arg, REAL_TYPE))
7506 return NULL_TREE;
7507
7508 /* Integer rounding functions are idempotent. */
7509 if (fcode == builtin_mathfn_code (arg))
7510 return arg;
7511
7512 /* If argument is already integer valued, and we don't need to worry
7513 about setting errno, there's no need to perform rounding. */
7514 if (! flag_errno_math && integer_valued_real_p (arg))
7515 return arg;
7516
7517 if (optimize)
7518 {
7519 tree arg0 = strip_float_extensions (arg);
7520 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7521 tree newtype = TREE_TYPE (arg0);
7522 tree decl;
7523
7524 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7525 && (decl = mathfn_built_in (newtype, fcode)))
7526 return fold_convert_loc (loc, ftype,
7527 build_call_expr_loc (loc, decl, 1,
7528 fold_convert_loc (loc,
7529 newtype,
7530 arg0)));
7531 }
7532 return NULL_TREE;
7533 }
7534
7535 /* FNDECL is assumed to be builtin which can narrow the FP type of
7536 the argument, for instance lround((double)f) -> lroundf (f).
7537 Do the transformation for a call with argument ARG. */
7538
7539 static tree
7540 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7541 {
7542 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7543
7544 if (!validate_arg (arg, REAL_TYPE))
7545 return NULL_TREE;
7546
7547 /* If argument is already integer valued, and we don't need to worry
7548 about setting errno, there's no need to perform rounding. */
7549 if (! flag_errno_math && integer_valued_real_p (arg))
7550 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7551 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7552
7553 if (optimize)
7554 {
7555 tree ftype = TREE_TYPE (arg);
7556 tree arg0 = strip_float_extensions (arg);
7557 tree newtype = TREE_TYPE (arg0);
7558 tree decl;
7559
7560 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7561 && (decl = mathfn_built_in (newtype, fcode)))
7562 return build_call_expr_loc (loc, decl, 1,
7563 fold_convert_loc (loc, newtype, arg0));
7564 }
7565
7566 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7567 sizeof (int) == sizeof (long). */
7568 if (TYPE_PRECISION (integer_type_node)
7569 == TYPE_PRECISION (long_integer_type_node))
7570 {
7571 tree newfn = NULL_TREE;
7572 switch (fcode)
7573 {
7574 CASE_FLT_FN (BUILT_IN_ICEIL):
7575 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7576 break;
7577
7578 CASE_FLT_FN (BUILT_IN_IFLOOR):
7579 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7580 break;
7581
7582 CASE_FLT_FN (BUILT_IN_IROUND):
7583 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7584 break;
7585
7586 CASE_FLT_FN (BUILT_IN_IRINT):
7587 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7588 break;
7589
7590 default:
7591 break;
7592 }
7593
7594 if (newfn)
7595 {
7596 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7597 return fold_convert_loc (loc,
7598 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7599 }
7600 }
7601
7602 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7603 sizeof (long long) == sizeof (long). */
7604 if (TYPE_PRECISION (long_long_integer_type_node)
7605 == TYPE_PRECISION (long_integer_type_node))
7606 {
7607 tree newfn = NULL_TREE;
7608 switch (fcode)
7609 {
7610 CASE_FLT_FN (BUILT_IN_LLCEIL):
7611 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7612 break;
7613
7614 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7615 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7616 break;
7617
7618 CASE_FLT_FN (BUILT_IN_LLROUND):
7619 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7620 break;
7621
7622 CASE_FLT_FN (BUILT_IN_LLRINT):
7623 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7624 break;
7625
7626 default:
7627 break;
7628 }
7629
7630 if (newfn)
7631 {
7632 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7633 return fold_convert_loc (loc,
7634 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7635 }
7636 }
7637
7638 return NULL_TREE;
7639 }
7640
7641 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7642 return type. Return NULL_TREE if no simplification can be made. */
7643
7644 static tree
7645 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7646 {
7647 tree res;
7648
7649 if (!validate_arg (arg, COMPLEX_TYPE)
7650 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7651 return NULL_TREE;
7652
7653 /* Calculate the result when the argument is a constant. */
7654 if (TREE_CODE (arg) == COMPLEX_CST
7655 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7656 type, mpfr_hypot)))
7657 return res;
7658
7659 if (TREE_CODE (arg) == COMPLEX_EXPR)
7660 {
7661 tree real = TREE_OPERAND (arg, 0);
7662 tree imag = TREE_OPERAND (arg, 1);
7663
7664 /* If either part is zero, cabs is fabs of the other. */
7665 if (real_zerop (real))
7666 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7667 if (real_zerop (imag))
7668 return fold_build1_loc (loc, ABS_EXPR, type, real);
7669
7670 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7671 if (flag_unsafe_math_optimizations
7672 && operand_equal_p (real, imag, OEP_PURE_SAME))
7673 {
7674 const REAL_VALUE_TYPE sqrt2_trunc
7675 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7676 STRIP_NOPS (real);
7677 return fold_build2_loc (loc, MULT_EXPR, type,
7678 fold_build1_loc (loc, ABS_EXPR, type, real),
7679 build_real (type, sqrt2_trunc));
7680 }
7681 }
7682
7683 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7684 if (TREE_CODE (arg) == NEGATE_EXPR
7685 || TREE_CODE (arg) == CONJ_EXPR)
7686 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7687
7688 /* Don't do this when optimizing for size. */
7689 if (flag_unsafe_math_optimizations
7690 && optimize && optimize_function_for_speed_p (cfun))
7691 {
7692 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7693
7694 if (sqrtfn != NULL_TREE)
7695 {
7696 tree rpart, ipart, result;
7697
7698 arg = builtin_save_expr (arg);
7699
7700 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7701 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7702
7703 rpart = builtin_save_expr (rpart);
7704 ipart = builtin_save_expr (ipart);
7705
7706 result = fold_build2_loc (loc, PLUS_EXPR, type,
7707 fold_build2_loc (loc, MULT_EXPR, type,
7708 rpart, rpart),
7709 fold_build2_loc (loc, MULT_EXPR, type,
7710 ipart, ipart));
7711
7712 return build_call_expr_loc (loc, sqrtfn, 1, result);
7713 }
7714 }
7715
7716 return NULL_TREE;
7717 }
7718
7719 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7720 complex tree type of the result. If NEG is true, the imaginary
7721 zero is negative. */
7722
7723 static tree
7724 build_complex_cproj (tree type, bool neg)
7725 {
7726 REAL_VALUE_TYPE rinf, rzero = dconst0;
7727
7728 real_inf (&rinf);
7729 rzero.sign = neg;
7730 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7731 build_real (TREE_TYPE (type), rzero));
7732 }
7733
7734 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7735 return type. Return NULL_TREE if no simplification can be made. */
7736
7737 static tree
7738 fold_builtin_cproj (location_t loc, tree arg, tree type)
7739 {
7740 if (!validate_arg (arg, COMPLEX_TYPE)
7741 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7742 return NULL_TREE;
7743
7744 /* If there are no infinities, return arg. */
7745 if (! HONOR_INFINITIES (type))
7746 return non_lvalue_loc (loc, arg);
7747
7748 /* Calculate the result when the argument is a constant. */
7749 if (TREE_CODE (arg) == COMPLEX_CST)
7750 {
7751 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7752 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7753
7754 if (real_isinf (real) || real_isinf (imag))
7755 return build_complex_cproj (type, imag->sign);
7756 else
7757 return arg;
7758 }
7759 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7760 {
7761 tree real = TREE_OPERAND (arg, 0);
7762 tree imag = TREE_OPERAND (arg, 1);
7763
7764 STRIP_NOPS (real);
7765 STRIP_NOPS (imag);
7766
7767 /* If the real part is inf and the imag part is known to be
7768 nonnegative, return (inf + 0i). Remember side-effects are
7769 possible in the imag part. */
7770 if (TREE_CODE (real) == REAL_CST
7771 && real_isinf (TREE_REAL_CST_PTR (real))
7772 && tree_expr_nonnegative_p (imag))
7773 return omit_one_operand_loc (loc, type,
7774 build_complex_cproj (type, false),
7775 arg);
7776
7777 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7778 Remember side-effects are possible in the real part. */
7779 if (TREE_CODE (imag) == REAL_CST
7780 && real_isinf (TREE_REAL_CST_PTR (imag)))
7781 return
7782 omit_one_operand_loc (loc, type,
7783 build_complex_cproj (type, TREE_REAL_CST_PTR
7784 (imag)->sign), arg);
7785 }
7786
7787 return NULL_TREE;
7788 }
7789
7790 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7791 Return NULL_TREE if no simplification can be made. */
7792
7793 static tree
7794 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7795 {
7796
7797 enum built_in_function fcode;
7798 tree res;
7799
7800 if (!validate_arg (arg, REAL_TYPE))
7801 return NULL_TREE;
7802
7803 /* Calculate the result when the argument is a constant. */
7804 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7805 return res;
7806
7807 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7808 fcode = builtin_mathfn_code (arg);
7809 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7810 {
7811 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7812 arg = fold_build2_loc (loc, MULT_EXPR, type,
7813 CALL_EXPR_ARG (arg, 0),
7814 build_real (type, dconsthalf));
7815 return build_call_expr_loc (loc, expfn, 1, arg);
7816 }
7817
7818 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7819 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7820 {
7821 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7822
7823 if (powfn)
7824 {
7825 tree arg0 = CALL_EXPR_ARG (arg, 0);
7826 tree tree_root;
7827 /* The inner root was either sqrt or cbrt. */
7828 /* This was a conditional expression but it triggered a bug
7829 in Sun C 5.5. */
7830 REAL_VALUE_TYPE dconstroot;
7831 if (BUILTIN_SQRT_P (fcode))
7832 dconstroot = dconsthalf;
7833 else
7834 dconstroot = dconst_third ();
7835
7836 /* Adjust for the outer root. */
7837 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7838 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7839 tree_root = build_real (type, dconstroot);
7840 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7841 }
7842 }
7843
7844 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7845 if (flag_unsafe_math_optimizations
7846 && (fcode == BUILT_IN_POW
7847 || fcode == BUILT_IN_POWF
7848 || fcode == BUILT_IN_POWL))
7849 {
7850 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7851 tree arg0 = CALL_EXPR_ARG (arg, 0);
7852 tree arg1 = CALL_EXPR_ARG (arg, 1);
7853 tree narg1;
7854 if (!tree_expr_nonnegative_p (arg0))
7855 arg0 = build1 (ABS_EXPR, type, arg0);
7856 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7857 build_real (type, dconsthalf));
7858 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7859 }
7860
7861 return NULL_TREE;
7862 }
7863
7864 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7865 Return NULL_TREE if no simplification can be made. */
7866
7867 static tree
7868 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7869 {
7870 const enum built_in_function fcode = builtin_mathfn_code (arg);
7871 tree res;
7872
7873 if (!validate_arg (arg, REAL_TYPE))
7874 return NULL_TREE;
7875
7876 /* Calculate the result when the argument is a constant. */
7877 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7878 return res;
7879
7880 if (flag_unsafe_math_optimizations)
7881 {
7882 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7883 if (BUILTIN_EXPONENT_P (fcode))
7884 {
7885 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7886 const REAL_VALUE_TYPE third_trunc =
7887 real_value_truncate (TYPE_MODE (type), dconst_third ());
7888 arg = fold_build2_loc (loc, MULT_EXPR, type,
7889 CALL_EXPR_ARG (arg, 0),
7890 build_real (type, third_trunc));
7891 return build_call_expr_loc (loc, expfn, 1, arg);
7892 }
7893
7894 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7895 if (BUILTIN_SQRT_P (fcode))
7896 {
7897 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7898
7899 if (powfn)
7900 {
7901 tree arg0 = CALL_EXPR_ARG (arg, 0);
7902 tree tree_root;
7903 REAL_VALUE_TYPE dconstroot = dconst_third ();
7904
7905 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7906 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7907 tree_root = build_real (type, dconstroot);
7908 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7909 }
7910 }
7911
7912 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7913 if (BUILTIN_CBRT_P (fcode))
7914 {
7915 tree arg0 = CALL_EXPR_ARG (arg, 0);
7916 if (tree_expr_nonnegative_p (arg0))
7917 {
7918 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7919
7920 if (powfn)
7921 {
7922 tree tree_root;
7923 REAL_VALUE_TYPE dconstroot;
7924
7925 real_arithmetic (&dconstroot, MULT_EXPR,
7926 dconst_third_ptr (), dconst_third_ptr ());
7927 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7928 tree_root = build_real (type, dconstroot);
7929 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7930 }
7931 }
7932 }
7933
7934 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7935 if (fcode == BUILT_IN_POW
7936 || fcode == BUILT_IN_POWF
7937 || fcode == BUILT_IN_POWL)
7938 {
7939 tree arg00 = CALL_EXPR_ARG (arg, 0);
7940 tree arg01 = CALL_EXPR_ARG (arg, 1);
7941 if (tree_expr_nonnegative_p (arg00))
7942 {
7943 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7944 const REAL_VALUE_TYPE dconstroot
7945 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7946 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7947 build_real (type, dconstroot));
7948 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7949 }
7950 }
7951 }
7952 return NULL_TREE;
7953 }
7954
7955 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7956 TYPE is the type of the return value. Return NULL_TREE if no
7957 simplification can be made. */
7958
7959 static tree
7960 fold_builtin_cos (location_t loc,
7961 tree arg, tree type, tree fndecl)
7962 {
7963 tree res, narg;
7964
7965 if (!validate_arg (arg, REAL_TYPE))
7966 return NULL_TREE;
7967
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7970 return res;
7971
7972 /* Optimize cos(-x) into cos (x). */
7973 if ((narg = fold_strip_sign_ops (arg)))
7974 return build_call_expr_loc (loc, fndecl, 1, narg);
7975
7976 return NULL_TREE;
7977 }
7978
7979 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7980 Return NULL_TREE if no simplification can be made. */
7981
7982 static tree
7983 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7984 {
7985 if (validate_arg (arg, REAL_TYPE))
7986 {
7987 tree res, narg;
7988
7989 /* Calculate the result when the argument is a constant. */
7990 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7991 return res;
7992
7993 /* Optimize cosh(-x) into cosh (x). */
7994 if ((narg = fold_strip_sign_ops (arg)))
7995 return build_call_expr_loc (loc, fndecl, 1, narg);
7996 }
7997
7998 return NULL_TREE;
7999 }
8000
8001 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8002 argument ARG. TYPE is the type of the return value. Return
8003 NULL_TREE if no simplification can be made. */
8004
8005 static tree
8006 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8007 bool hyper)
8008 {
8009 if (validate_arg (arg, COMPLEX_TYPE)
8010 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8011 {
8012 tree tmp;
8013
8014 /* Calculate the result when the argument is a constant. */
8015 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8016 return tmp;
8017
8018 /* Optimize fn(-x) into fn(x). */
8019 if ((tmp = fold_strip_sign_ops (arg)))
8020 return build_call_expr_loc (loc, fndecl, 1, tmp);
8021 }
8022
8023 return NULL_TREE;
8024 }
8025
8026 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8027 Return NULL_TREE if no simplification can be made. */
8028
8029 static tree
8030 fold_builtin_tan (tree arg, tree type)
8031 {
8032 enum built_in_function fcode;
8033 tree res;
8034
8035 if (!validate_arg (arg, REAL_TYPE))
8036 return NULL_TREE;
8037
8038 /* Calculate the result when the argument is a constant. */
8039 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8040 return res;
8041
8042 /* Optimize tan(atan(x)) = x. */
8043 fcode = builtin_mathfn_code (arg);
8044 if (flag_unsafe_math_optimizations
8045 && (fcode == BUILT_IN_ATAN
8046 || fcode == BUILT_IN_ATANF
8047 || fcode == BUILT_IN_ATANL))
8048 return CALL_EXPR_ARG (arg, 0);
8049
8050 return NULL_TREE;
8051 }
8052
8053 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8054 NULL_TREE if no simplification can be made. */
8055
8056 static tree
8057 fold_builtin_sincos (location_t loc,
8058 tree arg0, tree arg1, tree arg2)
8059 {
8060 tree type;
8061 tree res, fn, call;
8062
8063 if (!validate_arg (arg0, REAL_TYPE)
8064 || !validate_arg (arg1, POINTER_TYPE)
8065 || !validate_arg (arg2, POINTER_TYPE))
8066 return NULL_TREE;
8067
8068 type = TREE_TYPE (arg0);
8069
8070 /* Calculate the result when the argument is a constant. */
8071 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8072 return res;
8073
8074 /* Canonicalize sincos to cexpi. */
8075 if (!targetm.libc_has_function (function_c99_math_complex))
8076 return NULL_TREE;
8077 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8078 if (!fn)
8079 return NULL_TREE;
8080
8081 call = build_call_expr_loc (loc, fn, 1, arg0);
8082 call = builtin_save_expr (call);
8083
8084 return build2 (COMPOUND_EXPR, void_type_node,
8085 build2 (MODIFY_EXPR, void_type_node,
8086 build_fold_indirect_ref_loc (loc, arg1),
8087 build1 (IMAGPART_EXPR, type, call)),
8088 build2 (MODIFY_EXPR, void_type_node,
8089 build_fold_indirect_ref_loc (loc, arg2),
8090 build1 (REALPART_EXPR, type, call)));
8091 }
8092
8093 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8094 NULL_TREE if no simplification can be made. */
8095
8096 static tree
8097 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8098 {
8099 tree rtype;
8100 tree realp, imagp, ifn;
8101 tree res;
8102
8103 if (!validate_arg (arg0, COMPLEX_TYPE)
8104 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8105 return NULL_TREE;
8106
8107 /* Calculate the result when the argument is a constant. */
8108 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8109 return res;
8110
8111 rtype = TREE_TYPE (TREE_TYPE (arg0));
8112
8113 /* In case we can figure out the real part of arg0 and it is constant zero
8114 fold to cexpi. */
8115 if (!targetm.libc_has_function (function_c99_math_complex))
8116 return NULL_TREE;
8117 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8118 if (!ifn)
8119 return NULL_TREE;
8120
8121 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8122 && real_zerop (realp))
8123 {
8124 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8125 return build_call_expr_loc (loc, ifn, 1, narg);
8126 }
8127
8128 /* In case we can easily decompose real and imaginary parts split cexp
8129 to exp (r) * cexpi (i). */
8130 if (flag_unsafe_math_optimizations
8131 && realp)
8132 {
8133 tree rfn, rcall, icall;
8134
8135 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8136 if (!rfn)
8137 return NULL_TREE;
8138
8139 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8140 if (!imagp)
8141 return NULL_TREE;
8142
8143 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8144 icall = builtin_save_expr (icall);
8145 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8146 rcall = builtin_save_expr (rcall);
8147 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8148 fold_build2_loc (loc, MULT_EXPR, rtype,
8149 rcall,
8150 fold_build1_loc (loc, REALPART_EXPR,
8151 rtype, icall)),
8152 fold_build2_loc (loc, MULT_EXPR, rtype,
8153 rcall,
8154 fold_build1_loc (loc, IMAGPART_EXPR,
8155 rtype, icall)));
8156 }
8157
8158 return NULL_TREE;
8159 }
8160
8161 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8162 Return NULL_TREE if no simplification can be made. */
8163
8164 static tree
8165 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8166 {
8167 if (!validate_arg (arg, REAL_TYPE))
8168 return NULL_TREE;
8169
8170 /* Optimize trunc of constant value. */
8171 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8172 {
8173 REAL_VALUE_TYPE r, x;
8174 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8175
8176 x = TREE_REAL_CST (arg);
8177 real_trunc (&r, TYPE_MODE (type), &x);
8178 return build_real (type, r);
8179 }
8180
8181 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8182 }
8183
8184 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8185 Return NULL_TREE if no simplification can be made. */
8186
8187 static tree
8188 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8189 {
8190 if (!validate_arg (arg, REAL_TYPE))
8191 return NULL_TREE;
8192
8193 /* Optimize floor of constant value. */
8194 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8195 {
8196 REAL_VALUE_TYPE x;
8197
8198 x = TREE_REAL_CST (arg);
8199 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8200 {
8201 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8202 REAL_VALUE_TYPE r;
8203
8204 real_floor (&r, TYPE_MODE (type), &x);
8205 return build_real (type, r);
8206 }
8207 }
8208
8209 /* Fold floor (x) where x is nonnegative to trunc (x). */
8210 if (tree_expr_nonnegative_p (arg))
8211 {
8212 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8213 if (truncfn)
8214 return build_call_expr_loc (loc, truncfn, 1, arg);
8215 }
8216
8217 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8218 }
8219
8220 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8221 Return NULL_TREE if no simplification can be made. */
8222
8223 static tree
8224 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8225 {
8226 if (!validate_arg (arg, REAL_TYPE))
8227 return NULL_TREE;
8228
8229 /* Optimize ceil of constant value. */
8230 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8231 {
8232 REAL_VALUE_TYPE x;
8233
8234 x = TREE_REAL_CST (arg);
8235 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8236 {
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 REAL_VALUE_TYPE r;
8239
8240 real_ceil (&r, TYPE_MODE (type), &x);
8241 return build_real (type, r);
8242 }
8243 }
8244
8245 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8246 }
8247
8248 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8249 Return NULL_TREE if no simplification can be made. */
8250
8251 static tree
8252 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8253 {
8254 if (!validate_arg (arg, REAL_TYPE))
8255 return NULL_TREE;
8256
8257 /* Optimize round of constant value. */
8258 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8259 {
8260 REAL_VALUE_TYPE x;
8261
8262 x = TREE_REAL_CST (arg);
8263 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8264 {
8265 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8266 REAL_VALUE_TYPE r;
8267
8268 real_round (&r, TYPE_MODE (type), &x);
8269 return build_real (type, r);
8270 }
8271 }
8272
8273 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8274 }
8275
8276 /* Fold function call to builtin lround, lroundf or lroundl (or the
8277 corresponding long long versions) and other rounding functions. ARG
8278 is the argument to the call. Return NULL_TREE if no simplification
8279 can be made. */
8280
8281 static tree
8282 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8283 {
8284 if (!validate_arg (arg, REAL_TYPE))
8285 return NULL_TREE;
8286
8287 /* Optimize lround of constant value. */
8288 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8289 {
8290 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8291
8292 if (real_isfinite (&x))
8293 {
8294 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8295 tree ftype = TREE_TYPE (arg);
8296 REAL_VALUE_TYPE r;
8297 bool fail = false;
8298
8299 switch (DECL_FUNCTION_CODE (fndecl))
8300 {
8301 CASE_FLT_FN (BUILT_IN_IFLOOR):
8302 CASE_FLT_FN (BUILT_IN_LFLOOR):
8303 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8304 real_floor (&r, TYPE_MODE (ftype), &x);
8305 break;
8306
8307 CASE_FLT_FN (BUILT_IN_ICEIL):
8308 CASE_FLT_FN (BUILT_IN_LCEIL):
8309 CASE_FLT_FN (BUILT_IN_LLCEIL):
8310 real_ceil (&r, TYPE_MODE (ftype), &x);
8311 break;
8312
8313 CASE_FLT_FN (BUILT_IN_IROUND):
8314 CASE_FLT_FN (BUILT_IN_LROUND):
8315 CASE_FLT_FN (BUILT_IN_LLROUND):
8316 real_round (&r, TYPE_MODE (ftype), &x);
8317 break;
8318
8319 default:
8320 gcc_unreachable ();
8321 }
8322
8323 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8324 if (!fail)
8325 return wide_int_to_tree (itype, val);
8326 }
8327 }
8328
8329 switch (DECL_FUNCTION_CODE (fndecl))
8330 {
8331 CASE_FLT_FN (BUILT_IN_LFLOOR):
8332 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8333 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8334 if (tree_expr_nonnegative_p (arg))
8335 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8336 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8337 break;
8338 default:;
8339 }
8340
8341 return fold_fixed_mathfn (loc, fndecl, arg);
8342 }
8343
8344 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8345 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8346 the argument to the call. Return NULL_TREE if no simplification can
8347 be made. */
8348
8349 static tree
8350 fold_builtin_bitop (tree fndecl, tree arg)
8351 {
8352 if (!validate_arg (arg, INTEGER_TYPE))
8353 return NULL_TREE;
8354
8355 /* Optimize for constant argument. */
8356 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8357 {
8358 tree type = TREE_TYPE (arg);
8359 int result;
8360
8361 switch (DECL_FUNCTION_CODE (fndecl))
8362 {
8363 CASE_INT_FN (BUILT_IN_FFS):
8364 result = wi::ffs (arg);
8365 break;
8366
8367 CASE_INT_FN (BUILT_IN_CLZ):
8368 if (wi::ne_p (arg, 0))
8369 result = wi::clz (arg);
8370 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8371 result = TYPE_PRECISION (type);
8372 break;
8373
8374 CASE_INT_FN (BUILT_IN_CTZ):
8375 if (wi::ne_p (arg, 0))
8376 result = wi::ctz (arg);
8377 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8378 result = TYPE_PRECISION (type);
8379 break;
8380
8381 CASE_INT_FN (BUILT_IN_CLRSB):
8382 result = wi::clrsb (arg);
8383 break;
8384
8385 CASE_INT_FN (BUILT_IN_POPCOUNT):
8386 result = wi::popcount (arg);
8387 break;
8388
8389 CASE_INT_FN (BUILT_IN_PARITY):
8390 result = wi::parity (arg);
8391 break;
8392
8393 default:
8394 gcc_unreachable ();
8395 }
8396
8397 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8398 }
8399
8400 return NULL_TREE;
8401 }
8402
8403 /* Fold function call to builtin_bswap and the short, long and long long
8404 variants. Return NULL_TREE if no simplification can be made. */
8405 static tree
8406 fold_builtin_bswap (tree fndecl, tree arg)
8407 {
8408 if (! validate_arg (arg, INTEGER_TYPE))
8409 return NULL_TREE;
8410
8411 /* Optimize constant value. */
8412 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8413 {
8414 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8415
8416 switch (DECL_FUNCTION_CODE (fndecl))
8417 {
8418 case BUILT_IN_BSWAP16:
8419 case BUILT_IN_BSWAP32:
8420 case BUILT_IN_BSWAP64:
8421 {
8422 signop sgn = TYPE_SIGN (type);
8423 tree result =
8424 wide_int_to_tree (type,
8425 wide_int::from (arg, TYPE_PRECISION (type),
8426 sgn).bswap ());
8427 return result;
8428 }
8429 default:
8430 gcc_unreachable ();
8431 }
8432 }
8433
8434 return NULL_TREE;
8435 }
8436
8437 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8438 NULL_TREE if no simplification can be made. */
8439
8440 static tree
8441 fold_builtin_hypot (location_t loc, tree fndecl,
8442 tree arg0, tree arg1, tree type)
8443 {
8444 tree res, narg0, narg1;
8445
8446 if (!validate_arg (arg0, REAL_TYPE)
8447 || !validate_arg (arg1, REAL_TYPE))
8448 return NULL_TREE;
8449
8450 /* Calculate the result when the argument is a constant. */
8451 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8452 return res;
8453
8454 /* If either argument to hypot has a negate or abs, strip that off.
8455 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8456 narg0 = fold_strip_sign_ops (arg0);
8457 narg1 = fold_strip_sign_ops (arg1);
8458 if (narg0 || narg1)
8459 {
8460 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8461 narg1 ? narg1 : arg1);
8462 }
8463
8464 /* If either argument is zero, hypot is fabs of the other. */
8465 if (real_zerop (arg0))
8466 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8467 else if (real_zerop (arg1))
8468 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8469
8470 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8471 if (flag_unsafe_math_optimizations
8472 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8473 {
8474 const REAL_VALUE_TYPE sqrt2_trunc
8475 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8476 return fold_build2_loc (loc, MULT_EXPR, type,
8477 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8478 build_real (type, sqrt2_trunc));
8479 }
8480
8481 return NULL_TREE;
8482 }
8483
8484
8485 /* Fold a builtin function call to pow, powf, or powl. Return
8486 NULL_TREE if no simplification can be made. */
8487 static tree
8488 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8489 {
8490 tree res;
8491
8492 if (!validate_arg (arg0, REAL_TYPE)
8493 || !validate_arg (arg1, REAL_TYPE))
8494 return NULL_TREE;
8495
8496 /* Calculate the result when the argument is a constant. */
8497 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8498 return res;
8499
8500 /* Optimize pow(1.0,y) = 1.0. */
8501 if (real_onep (arg0))
8502 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8503
8504 if (TREE_CODE (arg1) == REAL_CST
8505 && !TREE_OVERFLOW (arg1))
8506 {
8507 REAL_VALUE_TYPE cint;
8508 REAL_VALUE_TYPE c;
8509 HOST_WIDE_INT n;
8510
8511 c = TREE_REAL_CST (arg1);
8512
8513 /* Optimize pow(x,0.0) = 1.0. */
8514 if (REAL_VALUES_EQUAL (c, dconst0))
8515 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8516 arg0);
8517
8518 /* Optimize pow(x,1.0) = x. */
8519 if (REAL_VALUES_EQUAL (c, dconst1))
8520 return arg0;
8521
8522 /* Optimize pow(x,-1.0) = 1.0/x. */
8523 if (REAL_VALUES_EQUAL (c, dconstm1))
8524 return fold_build2_loc (loc, RDIV_EXPR, type,
8525 build_real (type, dconst1), arg0);
8526
8527 /* Optimize pow(x,0.5) = sqrt(x). */
8528 if (flag_unsafe_math_optimizations
8529 && REAL_VALUES_EQUAL (c, dconsthalf))
8530 {
8531 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8532
8533 if (sqrtfn != NULL_TREE)
8534 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8535 }
8536
8537 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8538 if (flag_unsafe_math_optimizations)
8539 {
8540 const REAL_VALUE_TYPE dconstroot
8541 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8542
8543 if (REAL_VALUES_EQUAL (c, dconstroot))
8544 {
8545 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8546 if (cbrtfn != NULL_TREE)
8547 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8548 }
8549 }
8550
8551 /* Check for an integer exponent. */
8552 n = real_to_integer (&c);
8553 real_from_integer (&cint, VOIDmode, n, SIGNED);
8554 if (real_identical (&c, &cint))
8555 {
8556 /* Attempt to evaluate pow at compile-time, unless this should
8557 raise an exception. */
8558 if (TREE_CODE (arg0) == REAL_CST
8559 && !TREE_OVERFLOW (arg0)
8560 && (n > 0
8561 || (!flag_trapping_math && !flag_errno_math)
8562 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8563 {
8564 REAL_VALUE_TYPE x;
8565 bool inexact;
8566
8567 x = TREE_REAL_CST (arg0);
8568 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8569 if (flag_unsafe_math_optimizations || !inexact)
8570 return build_real (type, x);
8571 }
8572
8573 /* Strip sign ops from even integer powers. */
8574 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8575 {
8576 tree narg0 = fold_strip_sign_ops (arg0);
8577 if (narg0)
8578 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8579 }
8580 }
8581 }
8582
8583 if (flag_unsafe_math_optimizations)
8584 {
8585 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8586
8587 /* Optimize pow(expN(x),y) = expN(x*y). */
8588 if (BUILTIN_EXPONENT_P (fcode))
8589 {
8590 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8591 tree arg = CALL_EXPR_ARG (arg0, 0);
8592 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8593 return build_call_expr_loc (loc, expfn, 1, arg);
8594 }
8595
8596 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8597 if (BUILTIN_SQRT_P (fcode))
8598 {
8599 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8600 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8601 build_real (type, dconsthalf));
8602 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8603 }
8604
8605 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8606 if (BUILTIN_CBRT_P (fcode))
8607 {
8608 tree arg = CALL_EXPR_ARG (arg0, 0);
8609 if (tree_expr_nonnegative_p (arg))
8610 {
8611 const REAL_VALUE_TYPE dconstroot
8612 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8613 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8614 build_real (type, dconstroot));
8615 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8616 }
8617 }
8618
8619 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8620 if (fcode == BUILT_IN_POW
8621 || fcode == BUILT_IN_POWF
8622 || fcode == BUILT_IN_POWL)
8623 {
8624 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8625 if (tree_expr_nonnegative_p (arg00))
8626 {
8627 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8628 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8629 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8630 }
8631 }
8632 }
8633
8634 return NULL_TREE;
8635 }
8636
8637 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8638 Return NULL_TREE if no simplification can be made. */
8639 static tree
8640 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8641 tree arg0, tree arg1, tree type)
8642 {
8643 if (!validate_arg (arg0, REAL_TYPE)
8644 || !validate_arg (arg1, INTEGER_TYPE))
8645 return NULL_TREE;
8646
8647 /* Optimize pow(1.0,y) = 1.0. */
8648 if (real_onep (arg0))
8649 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8650
8651 if (tree_fits_shwi_p (arg1))
8652 {
8653 HOST_WIDE_INT c = tree_to_shwi (arg1);
8654
8655 /* Evaluate powi at compile-time. */
8656 if (TREE_CODE (arg0) == REAL_CST
8657 && !TREE_OVERFLOW (arg0))
8658 {
8659 REAL_VALUE_TYPE x;
8660 x = TREE_REAL_CST (arg0);
8661 real_powi (&x, TYPE_MODE (type), &x, c);
8662 return build_real (type, x);
8663 }
8664
8665 /* Optimize pow(x,0) = 1.0. */
8666 if (c == 0)
8667 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8668 arg0);
8669
8670 /* Optimize pow(x,1) = x. */
8671 if (c == 1)
8672 return arg0;
8673
8674 /* Optimize pow(x,-1) = 1.0/x. */
8675 if (c == -1)
8676 return fold_build2_loc (loc, RDIV_EXPR, type,
8677 build_real (type, dconst1), arg0);
8678 }
8679
8680 return NULL_TREE;
8681 }
8682
8683 /* A subroutine of fold_builtin to fold the various exponent
8684 functions. Return NULL_TREE if no simplification can be made.
8685 FUNC is the corresponding MPFR exponent function. */
8686
8687 static tree
8688 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8689 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8690 {
8691 if (validate_arg (arg, REAL_TYPE))
8692 {
8693 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8694 tree res;
8695
8696 /* Calculate the result when the argument is a constant. */
8697 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8698 return res;
8699
8700 /* Optimize expN(logN(x)) = x. */
8701 if (flag_unsafe_math_optimizations)
8702 {
8703 const enum built_in_function fcode = builtin_mathfn_code (arg);
8704
8705 if ((func == mpfr_exp
8706 && (fcode == BUILT_IN_LOG
8707 || fcode == BUILT_IN_LOGF
8708 || fcode == BUILT_IN_LOGL))
8709 || (func == mpfr_exp2
8710 && (fcode == BUILT_IN_LOG2
8711 || fcode == BUILT_IN_LOG2F
8712 || fcode == BUILT_IN_LOG2L))
8713 || (func == mpfr_exp10
8714 && (fcode == BUILT_IN_LOG10
8715 || fcode == BUILT_IN_LOG10F
8716 || fcode == BUILT_IN_LOG10L)))
8717 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8718 }
8719 }
8720
8721 return NULL_TREE;
8722 }
8723
8724 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8725 arguments to the call, and TYPE is its return type.
8726 Return NULL_TREE if no simplification can be made. */
8727
8728 static tree
8729 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8730 {
8731 if (!validate_arg (arg1, POINTER_TYPE)
8732 || !validate_arg (arg2, INTEGER_TYPE)
8733 || !validate_arg (len, INTEGER_TYPE))
8734 return NULL_TREE;
8735 else
8736 {
8737 const char *p1;
8738
8739 if (TREE_CODE (arg2) != INTEGER_CST
8740 || !tree_fits_uhwi_p (len))
8741 return NULL_TREE;
8742
8743 p1 = c_getstr (arg1);
8744 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8745 {
8746 char c;
8747 const char *r;
8748 tree tem;
8749
8750 if (target_char_cast (arg2, &c))
8751 return NULL_TREE;
8752
8753 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8754
8755 if (r == NULL)
8756 return build_int_cst (TREE_TYPE (arg1), 0);
8757
8758 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8759 return fold_convert_loc (loc, type, tem);
8760 }
8761 return NULL_TREE;
8762 }
8763 }
8764
8765 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8766 Return NULL_TREE if no simplification can be made. */
8767
8768 static tree
8769 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8770 {
8771 const char *p1, *p2;
8772
8773 if (!validate_arg (arg1, POINTER_TYPE)
8774 || !validate_arg (arg2, POINTER_TYPE)
8775 || !validate_arg (len, INTEGER_TYPE))
8776 return NULL_TREE;
8777
8778 /* If the LEN parameter is zero, return zero. */
8779 if (integer_zerop (len))
8780 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8781 arg1, arg2);
8782
8783 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8784 if (operand_equal_p (arg1, arg2, 0))
8785 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8786
8787 p1 = c_getstr (arg1);
8788 p2 = c_getstr (arg2);
8789
8790 /* If all arguments are constant, and the value of len is not greater
8791 than the lengths of arg1 and arg2, evaluate at compile-time. */
8792 if (tree_fits_uhwi_p (len) && p1 && p2
8793 && compare_tree_int (len, strlen (p1) + 1) <= 0
8794 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8795 {
8796 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8797
8798 if (r > 0)
8799 return integer_one_node;
8800 else if (r < 0)
8801 return integer_minus_one_node;
8802 else
8803 return integer_zero_node;
8804 }
8805
8806 /* If len parameter is one, return an expression corresponding to
8807 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8808 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8809 {
8810 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8811 tree cst_uchar_ptr_node
8812 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8813
8814 tree ind1
8815 = fold_convert_loc (loc, integer_type_node,
8816 build1 (INDIRECT_REF, cst_uchar_node,
8817 fold_convert_loc (loc,
8818 cst_uchar_ptr_node,
8819 arg1)));
8820 tree ind2
8821 = fold_convert_loc (loc, integer_type_node,
8822 build1 (INDIRECT_REF, cst_uchar_node,
8823 fold_convert_loc (loc,
8824 cst_uchar_ptr_node,
8825 arg2)));
8826 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8827 }
8828
8829 return NULL_TREE;
8830 }
8831
8832 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8833 Return NULL_TREE if no simplification can be made. */
8834
8835 static tree
8836 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8837 {
8838 const char *p1, *p2;
8839
8840 if (!validate_arg (arg1, POINTER_TYPE)
8841 || !validate_arg (arg2, POINTER_TYPE))
8842 return NULL_TREE;
8843
8844 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8845 if (operand_equal_p (arg1, arg2, 0))
8846 return integer_zero_node;
8847
8848 p1 = c_getstr (arg1);
8849 p2 = c_getstr (arg2);
8850
8851 if (p1 && p2)
8852 {
8853 const int i = strcmp (p1, p2);
8854 if (i < 0)
8855 return integer_minus_one_node;
8856 else if (i > 0)
8857 return integer_one_node;
8858 else
8859 return integer_zero_node;
8860 }
8861
8862 /* If the second arg is "", return *(const unsigned char*)arg1. */
8863 if (p2 && *p2 == '\0')
8864 {
8865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8866 tree cst_uchar_ptr_node
8867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8868
8869 return fold_convert_loc (loc, integer_type_node,
8870 build1 (INDIRECT_REF, cst_uchar_node,
8871 fold_convert_loc (loc,
8872 cst_uchar_ptr_node,
8873 arg1)));
8874 }
8875
8876 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8877 if (p1 && *p1 == '\0')
8878 {
8879 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8880 tree cst_uchar_ptr_node
8881 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8882
8883 tree temp
8884 = fold_convert_loc (loc, integer_type_node,
8885 build1 (INDIRECT_REF, cst_uchar_node,
8886 fold_convert_loc (loc,
8887 cst_uchar_ptr_node,
8888 arg2)));
8889 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8890 }
8891
8892 return NULL_TREE;
8893 }
8894
8895 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8896 Return NULL_TREE if no simplification can be made. */
8897
8898 static tree
8899 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8900 {
8901 const char *p1, *p2;
8902
8903 if (!validate_arg (arg1, POINTER_TYPE)
8904 || !validate_arg (arg2, POINTER_TYPE)
8905 || !validate_arg (len, INTEGER_TYPE))
8906 return NULL_TREE;
8907
8908 /* If the LEN parameter is zero, return zero. */
8909 if (integer_zerop (len))
8910 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8911 arg1, arg2);
8912
8913 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8914 if (operand_equal_p (arg1, arg2, 0))
8915 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8916
8917 p1 = c_getstr (arg1);
8918 p2 = c_getstr (arg2);
8919
8920 if (tree_fits_uhwi_p (len) && p1 && p2)
8921 {
8922 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8923 if (i > 0)
8924 return integer_one_node;
8925 else if (i < 0)
8926 return integer_minus_one_node;
8927 else
8928 return integer_zero_node;
8929 }
8930
8931 /* If the second arg is "", and the length is greater than zero,
8932 return *(const unsigned char*)arg1. */
8933 if (p2 && *p2 == '\0'
8934 && TREE_CODE (len) == INTEGER_CST
8935 && tree_int_cst_sgn (len) == 1)
8936 {
8937 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8938 tree cst_uchar_ptr_node
8939 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8940
8941 return fold_convert_loc (loc, integer_type_node,
8942 build1 (INDIRECT_REF, cst_uchar_node,
8943 fold_convert_loc (loc,
8944 cst_uchar_ptr_node,
8945 arg1)));
8946 }
8947
8948 /* If the first arg is "", and the length is greater than zero,
8949 return -*(const unsigned char*)arg2. */
8950 if (p1 && *p1 == '\0'
8951 && TREE_CODE (len) == INTEGER_CST
8952 && tree_int_cst_sgn (len) == 1)
8953 {
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8957
8958 tree temp = fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8961 cst_uchar_ptr_node,
8962 arg2)));
8963 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8964 }
8965
8966 /* If len parameter is one, return an expression corresponding to
8967 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8968 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8969 {
8970 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8971 tree cst_uchar_ptr_node
8972 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8973
8974 tree ind1 = fold_convert_loc (loc, integer_type_node,
8975 build1 (INDIRECT_REF, cst_uchar_node,
8976 fold_convert_loc (loc,
8977 cst_uchar_ptr_node,
8978 arg1)));
8979 tree ind2 = fold_convert_loc (loc, integer_type_node,
8980 build1 (INDIRECT_REF, cst_uchar_node,
8981 fold_convert_loc (loc,
8982 cst_uchar_ptr_node,
8983 arg2)));
8984 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8985 }
8986
8987 return NULL_TREE;
8988 }
8989
8990 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8991 ARG. Return NULL_TREE if no simplification can be made. */
8992
8993 static tree
8994 fold_builtin_signbit (location_t loc, tree arg, tree type)
8995 {
8996 if (!validate_arg (arg, REAL_TYPE))
8997 return NULL_TREE;
8998
8999 /* If ARG is a compile-time constant, determine the result. */
9000 if (TREE_CODE (arg) == REAL_CST
9001 && !TREE_OVERFLOW (arg))
9002 {
9003 REAL_VALUE_TYPE c;
9004
9005 c = TREE_REAL_CST (arg);
9006 return (REAL_VALUE_NEGATIVE (c)
9007 ? build_one_cst (type)
9008 : build_zero_cst (type));
9009 }
9010
9011 /* If ARG is non-negative, the result is always zero. */
9012 if (tree_expr_nonnegative_p (arg))
9013 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9014
9015 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9016 if (!HONOR_SIGNED_ZEROS (arg))
9017 return fold_convert (type,
9018 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9019 build_real (TREE_TYPE (arg), dconst0)));
9020
9021 return NULL_TREE;
9022 }
9023
9024 /* Fold function call to builtin copysign, copysignf or copysignl with
9025 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9026 be made. */
9027
9028 static tree
9029 fold_builtin_copysign (location_t loc, tree fndecl,
9030 tree arg1, tree arg2, tree type)
9031 {
9032 tree tem;
9033
9034 if (!validate_arg (arg1, REAL_TYPE)
9035 || !validate_arg (arg2, REAL_TYPE))
9036 return NULL_TREE;
9037
9038 /* copysign(X,X) is X. */
9039 if (operand_equal_p (arg1, arg2, 0))
9040 return fold_convert_loc (loc, type, arg1);
9041
9042 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9043 if (TREE_CODE (arg1) == REAL_CST
9044 && TREE_CODE (arg2) == REAL_CST
9045 && !TREE_OVERFLOW (arg1)
9046 && !TREE_OVERFLOW (arg2))
9047 {
9048 REAL_VALUE_TYPE c1, c2;
9049
9050 c1 = TREE_REAL_CST (arg1);
9051 c2 = TREE_REAL_CST (arg2);
9052 /* c1.sign := c2.sign. */
9053 real_copysign (&c1, &c2);
9054 return build_real (type, c1);
9055 }
9056
9057 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9058 Remember to evaluate Y for side-effects. */
9059 if (tree_expr_nonnegative_p (arg2))
9060 return omit_one_operand_loc (loc, type,
9061 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9062 arg2);
9063
9064 /* Strip sign changing operations for the first argument. */
9065 tem = fold_strip_sign_ops (arg1);
9066 if (tem)
9067 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9068
9069 return NULL_TREE;
9070 }
9071
9072 /* Fold a call to builtin isascii with argument ARG. */
9073
9074 static tree
9075 fold_builtin_isascii (location_t loc, tree arg)
9076 {
9077 if (!validate_arg (arg, INTEGER_TYPE))
9078 return NULL_TREE;
9079 else
9080 {
9081 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9082 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9083 build_int_cst (integer_type_node,
9084 ~ (unsigned HOST_WIDE_INT) 0x7f));
9085 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9086 arg, integer_zero_node);
9087 }
9088 }
9089
9090 /* Fold a call to builtin toascii with argument ARG. */
9091
9092 static tree
9093 fold_builtin_toascii (location_t loc, tree arg)
9094 {
9095 if (!validate_arg (arg, INTEGER_TYPE))
9096 return NULL_TREE;
9097
9098 /* Transform toascii(c) -> (c & 0x7f). */
9099 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9100 build_int_cst (integer_type_node, 0x7f));
9101 }
9102
9103 /* Fold a call to builtin isdigit with argument ARG. */
9104
9105 static tree
9106 fold_builtin_isdigit (location_t loc, tree arg)
9107 {
9108 if (!validate_arg (arg, INTEGER_TYPE))
9109 return NULL_TREE;
9110 else
9111 {
9112 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9113 /* According to the C standard, isdigit is unaffected by locale.
9114 However, it definitely is affected by the target character set. */
9115 unsigned HOST_WIDE_INT target_digit0
9116 = lang_hooks.to_target_charset ('0');
9117
9118 if (target_digit0 == 0)
9119 return NULL_TREE;
9120
9121 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9122 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9123 build_int_cst (unsigned_type_node, target_digit0));
9124 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9125 build_int_cst (unsigned_type_node, 9));
9126 }
9127 }
9128
9129 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9130
9131 static tree
9132 fold_builtin_fabs (location_t loc, tree arg, tree type)
9133 {
9134 if (!validate_arg (arg, REAL_TYPE))
9135 return NULL_TREE;
9136
9137 arg = fold_convert_loc (loc, type, arg);
9138 if (TREE_CODE (arg) == REAL_CST)
9139 return fold_abs_const (arg, type);
9140 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9141 }
9142
9143 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9144
9145 static tree
9146 fold_builtin_abs (location_t loc, tree arg, tree type)
9147 {
9148 if (!validate_arg (arg, INTEGER_TYPE))
9149 return NULL_TREE;
9150
9151 arg = fold_convert_loc (loc, type, arg);
9152 if (TREE_CODE (arg) == INTEGER_CST)
9153 return fold_abs_const (arg, type);
9154 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9155 }
9156
9157 /* Fold a fma operation with arguments ARG[012]. */
9158
9159 tree
9160 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9161 tree type, tree arg0, tree arg1, tree arg2)
9162 {
9163 if (TREE_CODE (arg0) == REAL_CST
9164 && TREE_CODE (arg1) == REAL_CST
9165 && TREE_CODE (arg2) == REAL_CST)
9166 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9167
9168 return NULL_TREE;
9169 }
9170
9171 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9172
9173 static tree
9174 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9175 {
9176 if (validate_arg (arg0, REAL_TYPE)
9177 && validate_arg (arg1, REAL_TYPE)
9178 && validate_arg (arg2, REAL_TYPE))
9179 {
9180 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9181 if (tem)
9182 return tem;
9183
9184 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9185 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9186 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9187 }
9188 return NULL_TREE;
9189 }
9190
9191 /* Fold a call to builtin fmin or fmax. */
9192
9193 static tree
9194 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9195 tree type, bool max)
9196 {
9197 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9198 {
9199 /* Calculate the result when the argument is a constant. */
9200 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9201
9202 if (res)
9203 return res;
9204
9205 /* If either argument is NaN, return the other one. Avoid the
9206 transformation if we get (and honor) a signalling NaN. Using
9207 omit_one_operand() ensures we create a non-lvalue. */
9208 if (TREE_CODE (arg0) == REAL_CST
9209 && real_isnan (&TREE_REAL_CST (arg0))
9210 && (! HONOR_SNANS (arg0)
9211 || ! TREE_REAL_CST (arg0).signalling))
9212 return omit_one_operand_loc (loc, type, arg1, arg0);
9213 if (TREE_CODE (arg1) == REAL_CST
9214 && real_isnan (&TREE_REAL_CST (arg1))
9215 && (! HONOR_SNANS (arg1)
9216 || ! TREE_REAL_CST (arg1).signalling))
9217 return omit_one_operand_loc (loc, type, arg0, arg1);
9218
9219 /* Transform fmin/fmax(x,x) -> x. */
9220 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9221 return omit_one_operand_loc (loc, type, arg0, arg1);
9222
9223 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9224 functions to return the numeric arg if the other one is NaN.
9225 These tree codes don't honor that, so only transform if
9226 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9227 handled, so we don't have to worry about it either. */
9228 if (flag_finite_math_only)
9229 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9230 fold_convert_loc (loc, type, arg0),
9231 fold_convert_loc (loc, type, arg1));
9232 }
9233 return NULL_TREE;
9234 }
9235
9236 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9237
9238 static tree
9239 fold_builtin_carg (location_t loc, tree arg, tree type)
9240 {
9241 if (validate_arg (arg, COMPLEX_TYPE)
9242 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9243 {
9244 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9245
9246 if (atan2_fn)
9247 {
9248 tree new_arg = builtin_save_expr (arg);
9249 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9250 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9251 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9252 }
9253 }
9254
9255 return NULL_TREE;
9256 }
9257
9258 /* Fold a call to builtin logb/ilogb. */
9259
9260 static tree
9261 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9262 {
9263 if (! validate_arg (arg, REAL_TYPE))
9264 return NULL_TREE;
9265
9266 STRIP_NOPS (arg);
9267
9268 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9269 {
9270 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9271
9272 switch (value->cl)
9273 {
9274 case rvc_nan:
9275 case rvc_inf:
9276 /* If arg is Inf or NaN and we're logb, return it. */
9277 if (TREE_CODE (rettype) == REAL_TYPE)
9278 {
9279 /* For logb(-Inf) we have to return +Inf. */
9280 if (real_isinf (value) && real_isneg (value))
9281 {
9282 REAL_VALUE_TYPE tem;
9283 real_inf (&tem);
9284 return build_real (rettype, tem);
9285 }
9286 return fold_convert_loc (loc, rettype, arg);
9287 }
9288 /* Fall through... */
9289 case rvc_zero:
9290 /* Zero may set errno and/or raise an exception for logb, also
9291 for ilogb we don't know FP_ILOGB0. */
9292 return NULL_TREE;
9293 case rvc_normal:
9294 /* For normal numbers, proceed iff radix == 2. In GCC,
9295 normalized significands are in the range [0.5, 1.0). We
9296 want the exponent as if they were [1.0, 2.0) so get the
9297 exponent and subtract 1. */
9298 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9299 return fold_convert_loc (loc, rettype,
9300 build_int_cst (integer_type_node,
9301 REAL_EXP (value)-1));
9302 break;
9303 }
9304 }
9305
9306 return NULL_TREE;
9307 }
9308
9309 /* Fold a call to builtin significand, if radix == 2. */
9310
9311 static tree
9312 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9313 {
9314 if (! validate_arg (arg, REAL_TYPE))
9315 return NULL_TREE;
9316
9317 STRIP_NOPS (arg);
9318
9319 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9320 {
9321 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9322
9323 switch (value->cl)
9324 {
9325 case rvc_zero:
9326 case rvc_nan:
9327 case rvc_inf:
9328 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9329 return fold_convert_loc (loc, rettype, arg);
9330 case rvc_normal:
9331 /* For normal numbers, proceed iff radix == 2. */
9332 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9333 {
9334 REAL_VALUE_TYPE result = *value;
9335 /* In GCC, normalized significands are in the range [0.5,
9336 1.0). We want them to be [1.0, 2.0) so set the
9337 exponent to 1. */
9338 SET_REAL_EXP (&result, 1);
9339 return build_real (rettype, result);
9340 }
9341 break;
9342 }
9343 }
9344
9345 return NULL_TREE;
9346 }
9347
9348 /* Fold a call to builtin frexp, we can assume the base is 2. */
9349
9350 static tree
9351 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9352 {
9353 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9354 return NULL_TREE;
9355
9356 STRIP_NOPS (arg0);
9357
9358 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9359 return NULL_TREE;
9360
9361 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9362
9363 /* Proceed if a valid pointer type was passed in. */
9364 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9365 {
9366 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9367 tree frac, exp;
9368
9369 switch (value->cl)
9370 {
9371 case rvc_zero:
9372 /* For +-0, return (*exp = 0, +-0). */
9373 exp = integer_zero_node;
9374 frac = arg0;
9375 break;
9376 case rvc_nan:
9377 case rvc_inf:
9378 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9379 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9380 case rvc_normal:
9381 {
9382 /* Since the frexp function always expects base 2, and in
9383 GCC normalized significands are already in the range
9384 [0.5, 1.0), we have exactly what frexp wants. */
9385 REAL_VALUE_TYPE frac_rvt = *value;
9386 SET_REAL_EXP (&frac_rvt, 0);
9387 frac = build_real (rettype, frac_rvt);
9388 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9389 }
9390 break;
9391 default:
9392 gcc_unreachable ();
9393 }
9394
9395 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9396 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9397 TREE_SIDE_EFFECTS (arg1) = 1;
9398 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9399 }
9400
9401 return NULL_TREE;
9402 }
9403
9404 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9405 then we can assume the base is two. If it's false, then we have to
9406 check the mode of the TYPE parameter in certain cases. */
9407
9408 static tree
9409 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9410 tree type, bool ldexp)
9411 {
9412 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9413 {
9414 STRIP_NOPS (arg0);
9415 STRIP_NOPS (arg1);
9416
9417 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9418 if (real_zerop (arg0) || integer_zerop (arg1)
9419 || (TREE_CODE (arg0) == REAL_CST
9420 && !real_isfinite (&TREE_REAL_CST (arg0))))
9421 return omit_one_operand_loc (loc, type, arg0, arg1);
9422
9423 /* If both arguments are constant, then try to evaluate it. */
9424 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9425 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9426 && tree_fits_shwi_p (arg1))
9427 {
9428 /* Bound the maximum adjustment to twice the range of the
9429 mode's valid exponents. Use abs to ensure the range is
9430 positive as a sanity check. */
9431 const long max_exp_adj = 2 *
9432 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9433 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9434
9435 /* Get the user-requested adjustment. */
9436 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9437
9438 /* The requested adjustment must be inside this range. This
9439 is a preliminary cap to avoid things like overflow, we
9440 may still fail to compute the result for other reasons. */
9441 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9442 {
9443 REAL_VALUE_TYPE initial_result;
9444
9445 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9446
9447 /* Ensure we didn't overflow. */
9448 if (! real_isinf (&initial_result))
9449 {
9450 const REAL_VALUE_TYPE trunc_result
9451 = real_value_truncate (TYPE_MODE (type), initial_result);
9452
9453 /* Only proceed if the target mode can hold the
9454 resulting value. */
9455 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9456 return build_real (type, trunc_result);
9457 }
9458 }
9459 }
9460 }
9461
9462 return NULL_TREE;
9463 }
9464
9465 /* Fold a call to builtin modf. */
9466
9467 static tree
9468 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9469 {
9470 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9471 return NULL_TREE;
9472
9473 STRIP_NOPS (arg0);
9474
9475 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9476 return NULL_TREE;
9477
9478 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9479
9480 /* Proceed if a valid pointer type was passed in. */
9481 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9482 {
9483 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9484 REAL_VALUE_TYPE trunc, frac;
9485
9486 switch (value->cl)
9487 {
9488 case rvc_nan:
9489 case rvc_zero:
9490 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9491 trunc = frac = *value;
9492 break;
9493 case rvc_inf:
9494 /* For +-Inf, return (*arg1 = arg0, +-0). */
9495 frac = dconst0;
9496 frac.sign = value->sign;
9497 trunc = *value;
9498 break;
9499 case rvc_normal:
9500 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9501 real_trunc (&trunc, VOIDmode, value);
9502 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9503 /* If the original number was negative and already
9504 integral, then the fractional part is -0.0. */
9505 if (value->sign && frac.cl == rvc_zero)
9506 frac.sign = value->sign;
9507 break;
9508 }
9509
9510 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9511 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9512 build_real (rettype, trunc));
9513 TREE_SIDE_EFFECTS (arg1) = 1;
9514 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9515 build_real (rettype, frac));
9516 }
9517
9518 return NULL_TREE;
9519 }
9520
9521 /* Given a location LOC, an interclass builtin function decl FNDECL
9522 and its single argument ARG, return an folded expression computing
9523 the same, or NULL_TREE if we either couldn't or didn't want to fold
9524 (the latter happen if there's an RTL instruction available). */
9525
9526 static tree
9527 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9528 {
9529 machine_mode mode;
9530
9531 if (!validate_arg (arg, REAL_TYPE))
9532 return NULL_TREE;
9533
9534 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9535 return NULL_TREE;
9536
9537 mode = TYPE_MODE (TREE_TYPE (arg));
9538
9539 /* If there is no optab, try generic code. */
9540 switch (DECL_FUNCTION_CODE (fndecl))
9541 {
9542 tree result;
9543
9544 CASE_FLT_FN (BUILT_IN_ISINF):
9545 {
9546 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9547 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9548 tree const type = TREE_TYPE (arg);
9549 REAL_VALUE_TYPE r;
9550 char buf[128];
9551
9552 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9553 real_from_string (&r, buf);
9554 result = build_call_expr (isgr_fn, 2,
9555 fold_build1_loc (loc, ABS_EXPR, type, arg),
9556 build_real (type, r));
9557 return result;
9558 }
9559 CASE_FLT_FN (BUILT_IN_FINITE):
9560 case BUILT_IN_ISFINITE:
9561 {
9562 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9563 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9564 tree const type = TREE_TYPE (arg);
9565 REAL_VALUE_TYPE r;
9566 char buf[128];
9567
9568 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9569 real_from_string (&r, buf);
9570 result = build_call_expr (isle_fn, 2,
9571 fold_build1_loc (loc, ABS_EXPR, type, arg),
9572 build_real (type, r));
9573 /*result = fold_build2_loc (loc, UNGT_EXPR,
9574 TREE_TYPE (TREE_TYPE (fndecl)),
9575 fold_build1_loc (loc, ABS_EXPR, type, arg),
9576 build_real (type, r));
9577 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9578 TREE_TYPE (TREE_TYPE (fndecl)),
9579 result);*/
9580 return result;
9581 }
9582 case BUILT_IN_ISNORMAL:
9583 {
9584 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9585 islessequal(fabs(x),DBL_MAX). */
9586 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9587 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9588 tree const type = TREE_TYPE (arg);
9589 REAL_VALUE_TYPE rmax, rmin;
9590 char buf[128];
9591
9592 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9593 real_from_string (&rmax, buf);
9594 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9595 real_from_string (&rmin, buf);
9596 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9597 result = build_call_expr (isle_fn, 2, arg,
9598 build_real (type, rmax));
9599 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9600 build_call_expr (isge_fn, 2, arg,
9601 build_real (type, rmin)));
9602 return result;
9603 }
9604 default:
9605 break;
9606 }
9607
9608 return NULL_TREE;
9609 }
9610
9611 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9612 ARG is the argument for the call. */
9613
9614 static tree
9615 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9616 {
9617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9618 REAL_VALUE_TYPE r;
9619
9620 if (!validate_arg (arg, REAL_TYPE))
9621 return NULL_TREE;
9622
9623 switch (builtin_index)
9624 {
9625 case BUILT_IN_ISINF:
9626 if (!HONOR_INFINITIES (arg))
9627 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9628
9629 if (TREE_CODE (arg) == REAL_CST)
9630 {
9631 r = TREE_REAL_CST (arg);
9632 if (real_isinf (&r))
9633 return real_compare (GT_EXPR, &r, &dconst0)
9634 ? integer_one_node : integer_minus_one_node;
9635 else
9636 return integer_zero_node;
9637 }
9638
9639 return NULL_TREE;
9640
9641 case BUILT_IN_ISINF_SIGN:
9642 {
9643 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9644 /* In a boolean context, GCC will fold the inner COND_EXPR to
9645 1. So e.g. "if (isinf_sign(x))" would be folded to just
9646 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9647 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9648 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9649 tree tmp = NULL_TREE;
9650
9651 arg = builtin_save_expr (arg);
9652
9653 if (signbit_fn && isinf_fn)
9654 {
9655 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9656 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9657
9658 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9659 signbit_call, integer_zero_node);
9660 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9661 isinf_call, integer_zero_node);
9662
9663 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9664 integer_minus_one_node, integer_one_node);
9665 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9666 isinf_call, tmp,
9667 integer_zero_node);
9668 }
9669
9670 return tmp;
9671 }
9672
9673 case BUILT_IN_ISFINITE:
9674 if (!HONOR_NANS (arg)
9675 && !HONOR_INFINITIES (arg))
9676 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9677
9678 if (TREE_CODE (arg) == REAL_CST)
9679 {
9680 r = TREE_REAL_CST (arg);
9681 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9682 }
9683
9684 return NULL_TREE;
9685
9686 case BUILT_IN_ISNAN:
9687 if (!HONOR_NANS (arg))
9688 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9689
9690 if (TREE_CODE (arg) == REAL_CST)
9691 {
9692 r = TREE_REAL_CST (arg);
9693 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9694 }
9695
9696 arg = builtin_save_expr (arg);
9697 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9698
9699 default:
9700 gcc_unreachable ();
9701 }
9702 }
9703
9704 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9705 This builtin will generate code to return the appropriate floating
9706 point classification depending on the value of the floating point
9707 number passed in. The possible return values must be supplied as
9708 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9709 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9710 one floating point argument which is "type generic". */
9711
9712 static tree
9713 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9714 {
9715 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9716 arg, type, res, tmp;
9717 machine_mode mode;
9718 REAL_VALUE_TYPE r;
9719 char buf[128];
9720
9721 /* Verify the required arguments in the original call. */
9722 if (nargs != 6
9723 || !validate_arg (args[0], INTEGER_TYPE)
9724 || !validate_arg (args[1], INTEGER_TYPE)
9725 || !validate_arg (args[2], INTEGER_TYPE)
9726 || !validate_arg (args[3], INTEGER_TYPE)
9727 || !validate_arg (args[4], INTEGER_TYPE)
9728 || !validate_arg (args[5], REAL_TYPE))
9729 return NULL_TREE;
9730
9731 fp_nan = args[0];
9732 fp_infinite = args[1];
9733 fp_normal = args[2];
9734 fp_subnormal = args[3];
9735 fp_zero = args[4];
9736 arg = args[5];
9737 type = TREE_TYPE (arg);
9738 mode = TYPE_MODE (type);
9739 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9740
9741 /* fpclassify(x) ->
9742 isnan(x) ? FP_NAN :
9743 (fabs(x) == Inf ? FP_INFINITE :
9744 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9745 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9746
9747 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9748 build_real (type, dconst0));
9749 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9750 tmp, fp_zero, fp_subnormal);
9751
9752 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9753 real_from_string (&r, buf);
9754 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9755 arg, build_real (type, r));
9756 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9757
9758 if (HONOR_INFINITIES (mode))
9759 {
9760 real_inf (&r);
9761 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9762 build_real (type, r));
9763 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9764 fp_infinite, res);
9765 }
9766
9767 if (HONOR_NANS (mode))
9768 {
9769 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9770 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9771 }
9772
9773 return res;
9774 }
9775
9776 /* Fold a call to an unordered comparison function such as
9777 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9778 being called and ARG0 and ARG1 are the arguments for the call.
9779 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9780 the opposite of the desired result. UNORDERED_CODE is used
9781 for modes that can hold NaNs and ORDERED_CODE is used for
9782 the rest. */
9783
9784 static tree
9785 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9786 enum tree_code unordered_code,
9787 enum tree_code ordered_code)
9788 {
9789 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9790 enum tree_code code;
9791 tree type0, type1;
9792 enum tree_code code0, code1;
9793 tree cmp_type = NULL_TREE;
9794
9795 type0 = TREE_TYPE (arg0);
9796 type1 = TREE_TYPE (arg1);
9797
9798 code0 = TREE_CODE (type0);
9799 code1 = TREE_CODE (type1);
9800
9801 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9802 /* Choose the wider of two real types. */
9803 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9804 ? type0 : type1;
9805 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9806 cmp_type = type0;
9807 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9808 cmp_type = type1;
9809
9810 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9811 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9812
9813 if (unordered_code == UNORDERED_EXPR)
9814 {
9815 if (!HONOR_NANS (arg0))
9816 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9817 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9818 }
9819
9820 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9821 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9822 fold_build2_loc (loc, code, type, arg0, arg1));
9823 }
9824
9825 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9826 arithmetics if it can never overflow, or into internal functions that
9827 return both result of arithmetics and overflowed boolean flag in
9828 a complex integer result, or some other check for overflow. */
9829
9830 static tree
9831 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9832 tree arg0, tree arg1, tree arg2)
9833 {
9834 enum internal_fn ifn = IFN_LAST;
9835 tree type = TREE_TYPE (TREE_TYPE (arg2));
9836 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9837 switch (fcode)
9838 {
9839 case BUILT_IN_ADD_OVERFLOW:
9840 case BUILT_IN_SADD_OVERFLOW:
9841 case BUILT_IN_SADDL_OVERFLOW:
9842 case BUILT_IN_SADDLL_OVERFLOW:
9843 case BUILT_IN_UADD_OVERFLOW:
9844 case BUILT_IN_UADDL_OVERFLOW:
9845 case BUILT_IN_UADDLL_OVERFLOW:
9846 ifn = IFN_ADD_OVERFLOW;
9847 break;
9848 case BUILT_IN_SUB_OVERFLOW:
9849 case BUILT_IN_SSUB_OVERFLOW:
9850 case BUILT_IN_SSUBL_OVERFLOW:
9851 case BUILT_IN_SSUBLL_OVERFLOW:
9852 case BUILT_IN_USUB_OVERFLOW:
9853 case BUILT_IN_USUBL_OVERFLOW:
9854 case BUILT_IN_USUBLL_OVERFLOW:
9855 ifn = IFN_SUB_OVERFLOW;
9856 break;
9857 case BUILT_IN_MUL_OVERFLOW:
9858 case BUILT_IN_SMUL_OVERFLOW:
9859 case BUILT_IN_SMULL_OVERFLOW:
9860 case BUILT_IN_SMULLL_OVERFLOW:
9861 case BUILT_IN_UMUL_OVERFLOW:
9862 case BUILT_IN_UMULL_OVERFLOW:
9863 case BUILT_IN_UMULLL_OVERFLOW:
9864 ifn = IFN_MUL_OVERFLOW;
9865 break;
9866 default:
9867 gcc_unreachable ();
9868 }
9869 tree ctype = build_complex_type (type);
9870 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9871 2, arg0, arg1);
9872 tree tgt = save_expr (call);
9873 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9874 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9875 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9876 tree store
9877 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9878 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9879 }
9880
9881 /* Fold a call to built-in function FNDECL with 0 arguments.
9882 This function returns NULL_TREE if no simplification was possible. */
9883
9884 static tree
9885 fold_builtin_0 (location_t loc, tree fndecl)
9886 {
9887 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9888 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9889 switch (fcode)
9890 {
9891 CASE_FLT_FN (BUILT_IN_INF):
9892 case BUILT_IN_INFD32:
9893 case BUILT_IN_INFD64:
9894 case BUILT_IN_INFD128:
9895 return fold_builtin_inf (loc, type, true);
9896
9897 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9898 return fold_builtin_inf (loc, type, false);
9899
9900 case BUILT_IN_CLASSIFY_TYPE:
9901 return fold_builtin_classify_type (NULL_TREE);
9902
9903 default:
9904 break;
9905 }
9906 return NULL_TREE;
9907 }
9908
9909 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9910 This function returns NULL_TREE if no simplification was possible. */
9911
9912 static tree
9913 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9914 {
9915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9916 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9917 switch (fcode)
9918 {
9919 case BUILT_IN_CONSTANT_P:
9920 {
9921 tree val = fold_builtin_constant_p (arg0);
9922
9923 /* Gimplification will pull the CALL_EXPR for the builtin out of
9924 an if condition. When not optimizing, we'll not CSE it back.
9925 To avoid link error types of regressions, return false now. */
9926 if (!val && !optimize)
9927 val = integer_zero_node;
9928
9929 return val;
9930 }
9931
9932 case BUILT_IN_CLASSIFY_TYPE:
9933 return fold_builtin_classify_type (arg0);
9934
9935 case BUILT_IN_STRLEN:
9936 return fold_builtin_strlen (loc, type, arg0);
9937
9938 CASE_FLT_FN (BUILT_IN_FABS):
9939 case BUILT_IN_FABSD32:
9940 case BUILT_IN_FABSD64:
9941 case BUILT_IN_FABSD128:
9942 return fold_builtin_fabs (loc, arg0, type);
9943
9944 case BUILT_IN_ABS:
9945 case BUILT_IN_LABS:
9946 case BUILT_IN_LLABS:
9947 case BUILT_IN_IMAXABS:
9948 return fold_builtin_abs (loc, arg0, type);
9949
9950 CASE_FLT_FN (BUILT_IN_CONJ):
9951 if (validate_arg (arg0, COMPLEX_TYPE)
9952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9953 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9954 break;
9955
9956 CASE_FLT_FN (BUILT_IN_CREAL):
9957 if (validate_arg (arg0, COMPLEX_TYPE)
9958 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9959 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9960 break;
9961
9962 CASE_FLT_FN (BUILT_IN_CIMAG):
9963 if (validate_arg (arg0, COMPLEX_TYPE)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9965 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9966 break;
9967
9968 CASE_FLT_FN (BUILT_IN_CCOS):
9969 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9970
9971 CASE_FLT_FN (BUILT_IN_CCOSH):
9972 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9973
9974 CASE_FLT_FN (BUILT_IN_CPROJ):
9975 return fold_builtin_cproj (loc, arg0, type);
9976
9977 CASE_FLT_FN (BUILT_IN_CSIN):
9978 if (validate_arg (arg0, COMPLEX_TYPE)
9979 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9980 return do_mpc_arg1 (arg0, type, mpc_sin);
9981 break;
9982
9983 CASE_FLT_FN (BUILT_IN_CSINH):
9984 if (validate_arg (arg0, COMPLEX_TYPE)
9985 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9986 return do_mpc_arg1 (arg0, type, mpc_sinh);
9987 break;
9988
9989 CASE_FLT_FN (BUILT_IN_CTAN):
9990 if (validate_arg (arg0, COMPLEX_TYPE)
9991 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9992 return do_mpc_arg1 (arg0, type, mpc_tan);
9993 break;
9994
9995 CASE_FLT_FN (BUILT_IN_CTANH):
9996 if (validate_arg (arg0, COMPLEX_TYPE)
9997 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9998 return do_mpc_arg1 (arg0, type, mpc_tanh);
9999 break;
10000
10001 CASE_FLT_FN (BUILT_IN_CLOG):
10002 if (validate_arg (arg0, COMPLEX_TYPE)
10003 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10004 return do_mpc_arg1 (arg0, type, mpc_log);
10005 break;
10006
10007 CASE_FLT_FN (BUILT_IN_CSQRT):
10008 if (validate_arg (arg0, COMPLEX_TYPE)
10009 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10010 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10011 break;
10012
10013 CASE_FLT_FN (BUILT_IN_CASIN):
10014 if (validate_arg (arg0, COMPLEX_TYPE)
10015 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10016 return do_mpc_arg1 (arg0, type, mpc_asin);
10017 break;
10018
10019 CASE_FLT_FN (BUILT_IN_CACOS):
10020 if (validate_arg (arg0, COMPLEX_TYPE)
10021 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10022 return do_mpc_arg1 (arg0, type, mpc_acos);
10023 break;
10024
10025 CASE_FLT_FN (BUILT_IN_CATAN):
10026 if (validate_arg (arg0, COMPLEX_TYPE)
10027 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10028 return do_mpc_arg1 (arg0, type, mpc_atan);
10029 break;
10030
10031 CASE_FLT_FN (BUILT_IN_CASINH):
10032 if (validate_arg (arg0, COMPLEX_TYPE)
10033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10034 return do_mpc_arg1 (arg0, type, mpc_asinh);
10035 break;
10036
10037 CASE_FLT_FN (BUILT_IN_CACOSH):
10038 if (validate_arg (arg0, COMPLEX_TYPE)
10039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10040 return do_mpc_arg1 (arg0, type, mpc_acosh);
10041 break;
10042
10043 CASE_FLT_FN (BUILT_IN_CATANH):
10044 if (validate_arg (arg0, COMPLEX_TYPE)
10045 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10046 return do_mpc_arg1 (arg0, type, mpc_atanh);
10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_CABS):
10050 return fold_builtin_cabs (loc, arg0, type, fndecl);
10051
10052 CASE_FLT_FN (BUILT_IN_CARG):
10053 return fold_builtin_carg (loc, arg0, type);
10054
10055 CASE_FLT_FN (BUILT_IN_SQRT):
10056 return fold_builtin_sqrt (loc, arg0, type);
10057
10058 CASE_FLT_FN (BUILT_IN_CBRT):
10059 return fold_builtin_cbrt (loc, arg0, type);
10060
10061 CASE_FLT_FN (BUILT_IN_ASIN):
10062 if (validate_arg (arg0, REAL_TYPE))
10063 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10064 &dconstm1, &dconst1, true);
10065 break;
10066
10067 CASE_FLT_FN (BUILT_IN_ACOS):
10068 if (validate_arg (arg0, REAL_TYPE))
10069 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10070 &dconstm1, &dconst1, true);
10071 break;
10072
10073 CASE_FLT_FN (BUILT_IN_ATAN):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10076 break;
10077
10078 CASE_FLT_FN (BUILT_IN_ASINH):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10081 break;
10082
10083 CASE_FLT_FN (BUILT_IN_ACOSH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10086 &dconst1, NULL, true);
10087 break;
10088
10089 CASE_FLT_FN (BUILT_IN_ATANH):
10090 if (validate_arg (arg0, REAL_TYPE))
10091 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10092 &dconstm1, &dconst1, false);
10093 break;
10094
10095 CASE_FLT_FN (BUILT_IN_SIN):
10096 if (validate_arg (arg0, REAL_TYPE))
10097 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10098 break;
10099
10100 CASE_FLT_FN (BUILT_IN_COS):
10101 return fold_builtin_cos (loc, arg0, type, fndecl);
10102
10103 CASE_FLT_FN (BUILT_IN_TAN):
10104 return fold_builtin_tan (arg0, type);
10105
10106 CASE_FLT_FN (BUILT_IN_CEXP):
10107 return fold_builtin_cexp (loc, arg0, type);
10108
10109 CASE_FLT_FN (BUILT_IN_CEXPI):
10110 if (validate_arg (arg0, REAL_TYPE))
10111 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10112 break;
10113
10114 CASE_FLT_FN (BUILT_IN_SINH):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10117 break;
10118
10119 CASE_FLT_FN (BUILT_IN_COSH):
10120 return fold_builtin_cosh (loc, arg0, type, fndecl);
10121
10122 CASE_FLT_FN (BUILT_IN_TANH):
10123 if (validate_arg (arg0, REAL_TYPE))
10124 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10125 break;
10126
10127 CASE_FLT_FN (BUILT_IN_ERF):
10128 if (validate_arg (arg0, REAL_TYPE))
10129 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10130 break;
10131
10132 CASE_FLT_FN (BUILT_IN_ERFC):
10133 if (validate_arg (arg0, REAL_TYPE))
10134 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10135 break;
10136
10137 CASE_FLT_FN (BUILT_IN_TGAMMA):
10138 if (validate_arg (arg0, REAL_TYPE))
10139 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10140 break;
10141
10142 CASE_FLT_FN (BUILT_IN_EXP):
10143 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10144
10145 CASE_FLT_FN (BUILT_IN_EXP2):
10146 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10147
10148 CASE_FLT_FN (BUILT_IN_EXP10):
10149 CASE_FLT_FN (BUILT_IN_POW10):
10150 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10151
10152 CASE_FLT_FN (BUILT_IN_EXPM1):
10153 if (validate_arg (arg0, REAL_TYPE))
10154 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10155 break;
10156
10157 CASE_FLT_FN (BUILT_IN_LOG):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10160 break;
10161
10162 CASE_FLT_FN (BUILT_IN_LOG2):
10163 if (validate_arg (arg0, REAL_TYPE))
10164 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10165 break;
10166
10167 CASE_FLT_FN (BUILT_IN_LOG10):
10168 if (validate_arg (arg0, REAL_TYPE))
10169 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10170 break;
10171
10172 CASE_FLT_FN (BUILT_IN_LOG1P):
10173 if (validate_arg (arg0, REAL_TYPE))
10174 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10175 &dconstm1, NULL, false);
10176 break;
10177
10178 CASE_FLT_FN (BUILT_IN_J0):
10179 if (validate_arg (arg0, REAL_TYPE))
10180 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10181 NULL, NULL, 0);
10182 break;
10183
10184 CASE_FLT_FN (BUILT_IN_J1):
10185 if (validate_arg (arg0, REAL_TYPE))
10186 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10187 NULL, NULL, 0);
10188 break;
10189
10190 CASE_FLT_FN (BUILT_IN_Y0):
10191 if (validate_arg (arg0, REAL_TYPE))
10192 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10193 &dconst0, NULL, false);
10194 break;
10195
10196 CASE_FLT_FN (BUILT_IN_Y1):
10197 if (validate_arg (arg0, REAL_TYPE))
10198 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10199 &dconst0, NULL, false);
10200 break;
10201
10202 CASE_FLT_FN (BUILT_IN_NAN):
10203 case BUILT_IN_NAND32:
10204 case BUILT_IN_NAND64:
10205 case BUILT_IN_NAND128:
10206 return fold_builtin_nan (arg0, type, true);
10207
10208 CASE_FLT_FN (BUILT_IN_NANS):
10209 return fold_builtin_nan (arg0, type, false);
10210
10211 CASE_FLT_FN (BUILT_IN_FLOOR):
10212 return fold_builtin_floor (loc, fndecl, arg0);
10213
10214 CASE_FLT_FN (BUILT_IN_CEIL):
10215 return fold_builtin_ceil (loc, fndecl, arg0);
10216
10217 CASE_FLT_FN (BUILT_IN_TRUNC):
10218 return fold_builtin_trunc (loc, fndecl, arg0);
10219
10220 CASE_FLT_FN (BUILT_IN_ROUND):
10221 return fold_builtin_round (loc, fndecl, arg0);
10222
10223 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10224 CASE_FLT_FN (BUILT_IN_RINT):
10225 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10226
10227 CASE_FLT_FN (BUILT_IN_ICEIL):
10228 CASE_FLT_FN (BUILT_IN_LCEIL):
10229 CASE_FLT_FN (BUILT_IN_LLCEIL):
10230 CASE_FLT_FN (BUILT_IN_LFLOOR):
10231 CASE_FLT_FN (BUILT_IN_IFLOOR):
10232 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10233 CASE_FLT_FN (BUILT_IN_IROUND):
10234 CASE_FLT_FN (BUILT_IN_LROUND):
10235 CASE_FLT_FN (BUILT_IN_LLROUND):
10236 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10237
10238 CASE_FLT_FN (BUILT_IN_IRINT):
10239 CASE_FLT_FN (BUILT_IN_LRINT):
10240 CASE_FLT_FN (BUILT_IN_LLRINT):
10241 return fold_fixed_mathfn (loc, fndecl, arg0);
10242
10243 case BUILT_IN_BSWAP16:
10244 case BUILT_IN_BSWAP32:
10245 case BUILT_IN_BSWAP64:
10246 return fold_builtin_bswap (fndecl, arg0);
10247
10248 CASE_INT_FN (BUILT_IN_FFS):
10249 CASE_INT_FN (BUILT_IN_CLZ):
10250 CASE_INT_FN (BUILT_IN_CTZ):
10251 CASE_INT_FN (BUILT_IN_CLRSB):
10252 CASE_INT_FN (BUILT_IN_POPCOUNT):
10253 CASE_INT_FN (BUILT_IN_PARITY):
10254 return fold_builtin_bitop (fndecl, arg0);
10255
10256 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10257 return fold_builtin_signbit (loc, arg0, type);
10258
10259 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10260 return fold_builtin_significand (loc, arg0, type);
10261
10262 CASE_FLT_FN (BUILT_IN_ILOGB):
10263 CASE_FLT_FN (BUILT_IN_LOGB):
10264 return fold_builtin_logb (loc, arg0, type);
10265
10266 case BUILT_IN_ISASCII:
10267 return fold_builtin_isascii (loc, arg0);
10268
10269 case BUILT_IN_TOASCII:
10270 return fold_builtin_toascii (loc, arg0);
10271
10272 case BUILT_IN_ISDIGIT:
10273 return fold_builtin_isdigit (loc, arg0);
10274
10275 CASE_FLT_FN (BUILT_IN_FINITE):
10276 case BUILT_IN_FINITED32:
10277 case BUILT_IN_FINITED64:
10278 case BUILT_IN_FINITED128:
10279 case BUILT_IN_ISFINITE:
10280 {
10281 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10282 if (ret)
10283 return ret;
10284 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10285 }
10286
10287 CASE_FLT_FN (BUILT_IN_ISINF):
10288 case BUILT_IN_ISINFD32:
10289 case BUILT_IN_ISINFD64:
10290 case BUILT_IN_ISINFD128:
10291 {
10292 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10293 if (ret)
10294 return ret;
10295 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10296 }
10297
10298 case BUILT_IN_ISNORMAL:
10299 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10300
10301 case BUILT_IN_ISINF_SIGN:
10302 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10303
10304 CASE_FLT_FN (BUILT_IN_ISNAN):
10305 case BUILT_IN_ISNAND32:
10306 case BUILT_IN_ISNAND64:
10307 case BUILT_IN_ISNAND128:
10308 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10309
10310 case BUILT_IN_FREE:
10311 if (integer_zerop (arg0))
10312 return build_empty_stmt (loc);
10313 break;
10314
10315 default:
10316 break;
10317 }
10318
10319 return NULL_TREE;
10320
10321 }
10322
10323 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10324 This function returns NULL_TREE if no simplification was possible. */
10325
10326 static tree
10327 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10328 {
10329 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10330 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10331
10332 switch (fcode)
10333 {
10334 CASE_FLT_FN (BUILT_IN_JN):
10335 if (validate_arg (arg0, INTEGER_TYPE)
10336 && validate_arg (arg1, REAL_TYPE))
10337 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10338 break;
10339
10340 CASE_FLT_FN (BUILT_IN_YN):
10341 if (validate_arg (arg0, INTEGER_TYPE)
10342 && validate_arg (arg1, REAL_TYPE))
10343 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10344 &dconst0, false);
10345 break;
10346
10347 CASE_FLT_FN (BUILT_IN_DREM):
10348 CASE_FLT_FN (BUILT_IN_REMAINDER):
10349 if (validate_arg (arg0, REAL_TYPE)
10350 && validate_arg (arg1, REAL_TYPE))
10351 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10352 break;
10353
10354 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10355 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10356 if (validate_arg (arg0, REAL_TYPE)
10357 && validate_arg (arg1, POINTER_TYPE))
10358 return do_mpfr_lgamma_r (arg0, arg1, type);
10359 break;
10360
10361 CASE_FLT_FN (BUILT_IN_ATAN2):
10362 if (validate_arg (arg0, REAL_TYPE)
10363 && validate_arg (arg1, REAL_TYPE))
10364 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10365 break;
10366
10367 CASE_FLT_FN (BUILT_IN_FDIM):
10368 if (validate_arg (arg0, REAL_TYPE)
10369 && validate_arg (arg1, REAL_TYPE))
10370 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10371 break;
10372
10373 CASE_FLT_FN (BUILT_IN_HYPOT):
10374 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10375
10376 CASE_FLT_FN (BUILT_IN_CPOW):
10377 if (validate_arg (arg0, COMPLEX_TYPE)
10378 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10379 && validate_arg (arg1, COMPLEX_TYPE)
10380 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10381 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10382 break;
10383
10384 CASE_FLT_FN (BUILT_IN_LDEXP):
10385 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10386 CASE_FLT_FN (BUILT_IN_SCALBN):
10387 CASE_FLT_FN (BUILT_IN_SCALBLN):
10388 return fold_builtin_load_exponent (loc, arg0, arg1,
10389 type, /*ldexp=*/false);
10390
10391 CASE_FLT_FN (BUILT_IN_FREXP):
10392 return fold_builtin_frexp (loc, arg0, arg1, type);
10393
10394 CASE_FLT_FN (BUILT_IN_MODF):
10395 return fold_builtin_modf (loc, arg0, arg1, type);
10396
10397 case BUILT_IN_STRSTR:
10398 return fold_builtin_strstr (loc, arg0, arg1, type);
10399
10400 case BUILT_IN_STRSPN:
10401 return fold_builtin_strspn (loc, arg0, arg1);
10402
10403 case BUILT_IN_STRCSPN:
10404 return fold_builtin_strcspn (loc, arg0, arg1);
10405
10406 case BUILT_IN_STRCHR:
10407 case BUILT_IN_INDEX:
10408 return fold_builtin_strchr (loc, arg0, arg1, type);
10409
10410 case BUILT_IN_STRRCHR:
10411 case BUILT_IN_RINDEX:
10412 return fold_builtin_strrchr (loc, arg0, arg1, type);
10413
10414 case BUILT_IN_STRCMP:
10415 return fold_builtin_strcmp (loc, arg0, arg1);
10416
10417 case BUILT_IN_STRPBRK:
10418 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10419
10420 case BUILT_IN_EXPECT:
10421 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10422
10423 CASE_FLT_FN (BUILT_IN_POW):
10424 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10425
10426 CASE_FLT_FN (BUILT_IN_POWI):
10427 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10428
10429 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10430 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10431
10432 CASE_FLT_FN (BUILT_IN_FMIN):
10433 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10434
10435 CASE_FLT_FN (BUILT_IN_FMAX):
10436 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10437
10438 case BUILT_IN_ISGREATER:
10439 return fold_builtin_unordered_cmp (loc, fndecl,
10440 arg0, arg1, UNLE_EXPR, LE_EXPR);
10441 case BUILT_IN_ISGREATEREQUAL:
10442 return fold_builtin_unordered_cmp (loc, fndecl,
10443 arg0, arg1, UNLT_EXPR, LT_EXPR);
10444 case BUILT_IN_ISLESS:
10445 return fold_builtin_unordered_cmp (loc, fndecl,
10446 arg0, arg1, UNGE_EXPR, GE_EXPR);
10447 case BUILT_IN_ISLESSEQUAL:
10448 return fold_builtin_unordered_cmp (loc, fndecl,
10449 arg0, arg1, UNGT_EXPR, GT_EXPR);
10450 case BUILT_IN_ISLESSGREATER:
10451 return fold_builtin_unordered_cmp (loc, fndecl,
10452 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10453 case BUILT_IN_ISUNORDERED:
10454 return fold_builtin_unordered_cmp (loc, fndecl,
10455 arg0, arg1, UNORDERED_EXPR,
10456 NOP_EXPR);
10457
10458 /* We do the folding for va_start in the expander. */
10459 case BUILT_IN_VA_START:
10460 break;
10461
10462 case BUILT_IN_OBJECT_SIZE:
10463 return fold_builtin_object_size (arg0, arg1);
10464
10465 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10466 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10467
10468 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10469 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10470
10471 default:
10472 break;
10473 }
10474 return NULL_TREE;
10475 }
10476
10477 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10478 and ARG2.
10479 This function returns NULL_TREE if no simplification was possible. */
10480
10481 static tree
10482 fold_builtin_3 (location_t loc, tree fndecl,
10483 tree arg0, tree arg1, tree arg2)
10484 {
10485 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10486 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10487 switch (fcode)
10488 {
10489
10490 CASE_FLT_FN (BUILT_IN_SINCOS):
10491 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10492
10493 CASE_FLT_FN (BUILT_IN_FMA):
10494 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10495 break;
10496
10497 CASE_FLT_FN (BUILT_IN_REMQUO):
10498 if (validate_arg (arg0, REAL_TYPE)
10499 && validate_arg (arg1, REAL_TYPE)
10500 && validate_arg (arg2, POINTER_TYPE))
10501 return do_mpfr_remquo (arg0, arg1, arg2);
10502 break;
10503
10504 case BUILT_IN_STRNCMP:
10505 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10506
10507 case BUILT_IN_MEMCHR:
10508 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10509
10510 case BUILT_IN_BCMP:
10511 case BUILT_IN_MEMCMP:
10512 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10513
10514 case BUILT_IN_EXPECT:
10515 return fold_builtin_expect (loc, arg0, arg1, arg2);
10516
10517 case BUILT_IN_ADD_OVERFLOW:
10518 case BUILT_IN_SUB_OVERFLOW:
10519 case BUILT_IN_MUL_OVERFLOW:
10520 case BUILT_IN_SADD_OVERFLOW:
10521 case BUILT_IN_SADDL_OVERFLOW:
10522 case BUILT_IN_SADDLL_OVERFLOW:
10523 case BUILT_IN_SSUB_OVERFLOW:
10524 case BUILT_IN_SSUBL_OVERFLOW:
10525 case BUILT_IN_SSUBLL_OVERFLOW:
10526 case BUILT_IN_SMUL_OVERFLOW:
10527 case BUILT_IN_SMULL_OVERFLOW:
10528 case BUILT_IN_SMULLL_OVERFLOW:
10529 case BUILT_IN_UADD_OVERFLOW:
10530 case BUILT_IN_UADDL_OVERFLOW:
10531 case BUILT_IN_UADDLL_OVERFLOW:
10532 case BUILT_IN_USUB_OVERFLOW:
10533 case BUILT_IN_USUBL_OVERFLOW:
10534 case BUILT_IN_USUBLL_OVERFLOW:
10535 case BUILT_IN_UMUL_OVERFLOW:
10536 case BUILT_IN_UMULL_OVERFLOW:
10537 case BUILT_IN_UMULLL_OVERFLOW:
10538 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10539
10540 default:
10541 break;
10542 }
10543 return NULL_TREE;
10544 }
10545
10546 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10547 arguments. IGNORE is true if the result of the
10548 function call is ignored. This function returns NULL_TREE if no
10549 simplification was possible. */
10550
10551 tree
10552 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10553 {
10554 tree ret = NULL_TREE;
10555
10556 switch (nargs)
10557 {
10558 case 0:
10559 ret = fold_builtin_0 (loc, fndecl);
10560 break;
10561 case 1:
10562 ret = fold_builtin_1 (loc, fndecl, args[0]);
10563 break;
10564 case 2:
10565 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10566 break;
10567 case 3:
10568 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10569 break;
10570 default:
10571 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10572 break;
10573 }
10574 if (ret)
10575 {
10576 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10577 SET_EXPR_LOCATION (ret, loc);
10578 TREE_NO_WARNING (ret) = 1;
10579 return ret;
10580 }
10581 return NULL_TREE;
10582 }
10583
10584 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10585 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10586 of arguments in ARGS to be omitted. OLDNARGS is the number of
10587 elements in ARGS. */
10588
10589 static tree
10590 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10591 int skip, tree fndecl, int n, va_list newargs)
10592 {
10593 int nargs = oldnargs - skip + n;
10594 tree *buffer;
10595
10596 if (n > 0)
10597 {
10598 int i, j;
10599
10600 buffer = XALLOCAVEC (tree, nargs);
10601 for (i = 0; i < n; i++)
10602 buffer[i] = va_arg (newargs, tree);
10603 for (j = skip; j < oldnargs; j++, i++)
10604 buffer[i] = args[j];
10605 }
10606 else
10607 buffer = args + skip;
10608
10609 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10610 }
10611
10612 /* Return true if FNDECL shouldn't be folded right now.
10613 If a built-in function has an inline attribute always_inline
10614 wrapper, defer folding it after always_inline functions have
10615 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10616 might not be performed. */
10617
10618 bool
10619 avoid_folding_inline_builtin (tree fndecl)
10620 {
10621 return (DECL_DECLARED_INLINE_P (fndecl)
10622 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10623 && cfun
10624 && !cfun->always_inline_functions_inlined
10625 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10626 }
10627
10628 /* A wrapper function for builtin folding that prevents warnings for
10629 "statement without effect" and the like, caused by removing the
10630 call node earlier than the warning is generated. */
10631
10632 tree
10633 fold_call_expr (location_t loc, tree exp, bool ignore)
10634 {
10635 tree ret = NULL_TREE;
10636 tree fndecl = get_callee_fndecl (exp);
10637 if (fndecl
10638 && TREE_CODE (fndecl) == FUNCTION_DECL
10639 && DECL_BUILT_IN (fndecl)
10640 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10641 yet. Defer folding until we see all the arguments
10642 (after inlining). */
10643 && !CALL_EXPR_VA_ARG_PACK (exp))
10644 {
10645 int nargs = call_expr_nargs (exp);
10646
10647 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10648 instead last argument is __builtin_va_arg_pack (). Defer folding
10649 even in that case, until arguments are finalized. */
10650 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10651 {
10652 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10653 if (fndecl2
10654 && TREE_CODE (fndecl2) == FUNCTION_DECL
10655 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10656 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10657 return NULL_TREE;
10658 }
10659
10660 if (avoid_folding_inline_builtin (fndecl))
10661 return NULL_TREE;
10662
10663 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10664 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10665 CALL_EXPR_ARGP (exp), ignore);
10666 else
10667 {
10668 tree *args = CALL_EXPR_ARGP (exp);
10669 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10670 if (ret)
10671 return ret;
10672 }
10673 }
10674 return NULL_TREE;
10675 }
10676
10677 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10678 N arguments are passed in the array ARGARRAY. Return a folded
10679 expression or NULL_TREE if no simplification was possible. */
10680
10681 tree
10682 fold_builtin_call_array (location_t loc, tree,
10683 tree fn,
10684 int n,
10685 tree *argarray)
10686 {
10687 if (TREE_CODE (fn) != ADDR_EXPR)
10688 return NULL_TREE;
10689
10690 tree fndecl = TREE_OPERAND (fn, 0);
10691 if (TREE_CODE (fndecl) == FUNCTION_DECL
10692 && DECL_BUILT_IN (fndecl))
10693 {
10694 /* If last argument is __builtin_va_arg_pack (), arguments to this
10695 function are not finalized yet. Defer folding until they are. */
10696 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10697 {
10698 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10699 if (fndecl2
10700 && TREE_CODE (fndecl2) == FUNCTION_DECL
10701 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10702 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10703 return NULL_TREE;
10704 }
10705 if (avoid_folding_inline_builtin (fndecl))
10706 return NULL_TREE;
10707 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10708 return targetm.fold_builtin (fndecl, n, argarray, false);
10709 else
10710 return fold_builtin_n (loc, fndecl, argarray, n, false);
10711 }
10712
10713 return NULL_TREE;
10714 }
10715
10716 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10717 along with N new arguments specified as the "..." parameters. SKIP
10718 is the number of arguments in EXP to be omitted. This function is used
10719 to do varargs-to-varargs transformations. */
10720
10721 static tree
10722 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10723 {
10724 va_list ap;
10725 tree t;
10726
10727 va_start (ap, n);
10728 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10729 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10730 va_end (ap);
10731
10732 return t;
10733 }
10734
10735 /* Validate a single argument ARG against a tree code CODE representing
10736 a type. */
10737
10738 static bool
10739 validate_arg (const_tree arg, enum tree_code code)
10740 {
10741 if (!arg)
10742 return false;
10743 else if (code == POINTER_TYPE)
10744 return POINTER_TYPE_P (TREE_TYPE (arg));
10745 else if (code == INTEGER_TYPE)
10746 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10747 return code == TREE_CODE (TREE_TYPE (arg));
10748 }
10749
10750 /* This function validates the types of a function call argument list
10751 against a specified list of tree_codes. If the last specifier is a 0,
10752 that represents an ellipses, otherwise the last specifier must be a
10753 VOID_TYPE.
10754
10755 This is the GIMPLE version of validate_arglist. Eventually we want to
10756 completely convert builtins.c to work from GIMPLEs and the tree based
10757 validate_arglist will then be removed. */
10758
10759 bool
10760 validate_gimple_arglist (const gcall *call, ...)
10761 {
10762 enum tree_code code;
10763 bool res = 0;
10764 va_list ap;
10765 const_tree arg;
10766 size_t i;
10767
10768 va_start (ap, call);
10769 i = 0;
10770
10771 do
10772 {
10773 code = (enum tree_code) va_arg (ap, int);
10774 switch (code)
10775 {
10776 case 0:
10777 /* This signifies an ellipses, any further arguments are all ok. */
10778 res = true;
10779 goto end;
10780 case VOID_TYPE:
10781 /* This signifies an endlink, if no arguments remain, return
10782 true, otherwise return false. */
10783 res = (i == gimple_call_num_args (call));
10784 goto end;
10785 default:
10786 /* If no parameters remain or the parameter's code does not
10787 match the specified code, return false. Otherwise continue
10788 checking any remaining arguments. */
10789 arg = gimple_call_arg (call, i++);
10790 if (!validate_arg (arg, code))
10791 goto end;
10792 break;
10793 }
10794 }
10795 while (1);
10796
10797 /* We need gotos here since we can only have one VA_CLOSE in a
10798 function. */
10799 end: ;
10800 va_end (ap);
10801
10802 return res;
10803 }
10804
10805 /* Default target-specific builtin expander that does nothing. */
10806
10807 rtx
10808 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10809 rtx target ATTRIBUTE_UNUSED,
10810 rtx subtarget ATTRIBUTE_UNUSED,
10811 machine_mode mode ATTRIBUTE_UNUSED,
10812 int ignore ATTRIBUTE_UNUSED)
10813 {
10814 return NULL_RTX;
10815 }
10816
10817 /* Returns true is EXP represents data that would potentially reside
10818 in a readonly section. */
10819
10820 bool
10821 readonly_data_expr (tree exp)
10822 {
10823 STRIP_NOPS (exp);
10824
10825 if (TREE_CODE (exp) != ADDR_EXPR)
10826 return false;
10827
10828 exp = get_base_address (TREE_OPERAND (exp, 0));
10829 if (!exp)
10830 return false;
10831
10832 /* Make sure we call decl_readonly_section only for trees it
10833 can handle (since it returns true for everything it doesn't
10834 understand). */
10835 if (TREE_CODE (exp) == STRING_CST
10836 || TREE_CODE (exp) == CONSTRUCTOR
10837 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10838 return decl_readonly_section (exp, 0);
10839 else
10840 return false;
10841 }
10842
10843 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10844 to the call, and TYPE is its return type.
10845
10846 Return NULL_TREE if no simplification was possible, otherwise return the
10847 simplified form of the call as a tree.
10848
10849 The simplified form may be a constant or other expression which
10850 computes the same value, but in a more efficient manner (including
10851 calls to other builtin functions).
10852
10853 The call may contain arguments which need to be evaluated, but
10854 which are not useful to determine the result of the call. In
10855 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10856 COMPOUND_EXPR will be an argument which must be evaluated.
10857 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10858 COMPOUND_EXPR in the chain will contain the tree for the simplified
10859 form of the builtin function call. */
10860
10861 static tree
10862 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10863 {
10864 if (!validate_arg (s1, POINTER_TYPE)
10865 || !validate_arg (s2, POINTER_TYPE))
10866 return NULL_TREE;
10867 else
10868 {
10869 tree fn;
10870 const char *p1, *p2;
10871
10872 p2 = c_getstr (s2);
10873 if (p2 == NULL)
10874 return NULL_TREE;
10875
10876 p1 = c_getstr (s1);
10877 if (p1 != NULL)
10878 {
10879 const char *r = strstr (p1, p2);
10880 tree tem;
10881
10882 if (r == NULL)
10883 return build_int_cst (TREE_TYPE (s1), 0);
10884
10885 /* Return an offset into the constant string argument. */
10886 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10887 return fold_convert_loc (loc, type, tem);
10888 }
10889
10890 /* The argument is const char *, and the result is char *, so we need
10891 a type conversion here to avoid a warning. */
10892 if (p2[0] == '\0')
10893 return fold_convert_loc (loc, type, s1);
10894
10895 if (p2[1] != '\0')
10896 return NULL_TREE;
10897
10898 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10899 if (!fn)
10900 return NULL_TREE;
10901
10902 /* New argument list transforming strstr(s1, s2) to
10903 strchr(s1, s2[0]). */
10904 return build_call_expr_loc (loc, fn, 2, s1,
10905 build_int_cst (integer_type_node, p2[0]));
10906 }
10907 }
10908
10909 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10910 the call, and TYPE is its return type.
10911
10912 Return NULL_TREE if no simplification was possible, otherwise return the
10913 simplified form of the call as a tree.
10914
10915 The simplified form may be a constant or other expression which
10916 computes the same value, but in a more efficient manner (including
10917 calls to other builtin functions).
10918
10919 The call may contain arguments which need to be evaluated, but
10920 which are not useful to determine the result of the call. In
10921 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10922 COMPOUND_EXPR will be an argument which must be evaluated.
10923 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10924 COMPOUND_EXPR in the chain will contain the tree for the simplified
10925 form of the builtin function call. */
10926
10927 static tree
10928 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10929 {
10930 if (!validate_arg (s1, POINTER_TYPE)
10931 || !validate_arg (s2, INTEGER_TYPE))
10932 return NULL_TREE;
10933 else
10934 {
10935 const char *p1;
10936
10937 if (TREE_CODE (s2) != INTEGER_CST)
10938 return NULL_TREE;
10939
10940 p1 = c_getstr (s1);
10941 if (p1 != NULL)
10942 {
10943 char c;
10944 const char *r;
10945 tree tem;
10946
10947 if (target_char_cast (s2, &c))
10948 return NULL_TREE;
10949
10950 r = strchr (p1, c);
10951
10952 if (r == NULL)
10953 return build_int_cst (TREE_TYPE (s1), 0);
10954
10955 /* Return an offset into the constant string argument. */
10956 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10957 return fold_convert_loc (loc, type, tem);
10958 }
10959 return NULL_TREE;
10960 }
10961 }
10962
10963 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10964 the call, and TYPE is its return type.
10965
10966 Return NULL_TREE if no simplification was possible, otherwise return the
10967 simplified form of the call as a tree.
10968
10969 The simplified form may be a constant or other expression which
10970 computes the same value, but in a more efficient manner (including
10971 calls to other builtin functions).
10972
10973 The call may contain arguments which need to be evaluated, but
10974 which are not useful to determine the result of the call. In
10975 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10976 COMPOUND_EXPR will be an argument which must be evaluated.
10977 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10978 COMPOUND_EXPR in the chain will contain the tree for the simplified
10979 form of the builtin function call. */
10980
10981 static tree
10982 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10983 {
10984 if (!validate_arg (s1, POINTER_TYPE)
10985 || !validate_arg (s2, INTEGER_TYPE))
10986 return NULL_TREE;
10987 else
10988 {
10989 tree fn;
10990 const char *p1;
10991
10992 if (TREE_CODE (s2) != INTEGER_CST)
10993 return NULL_TREE;
10994
10995 p1 = c_getstr (s1);
10996 if (p1 != NULL)
10997 {
10998 char c;
10999 const char *r;
11000 tree tem;
11001
11002 if (target_char_cast (s2, &c))
11003 return NULL_TREE;
11004
11005 r = strrchr (p1, c);
11006
11007 if (r == NULL)
11008 return build_int_cst (TREE_TYPE (s1), 0);
11009
11010 /* Return an offset into the constant string argument. */
11011 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11012 return fold_convert_loc (loc, type, tem);
11013 }
11014
11015 if (! integer_zerop (s2))
11016 return NULL_TREE;
11017
11018 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11019 if (!fn)
11020 return NULL_TREE;
11021
11022 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11023 return build_call_expr_loc (loc, fn, 2, s1, s2);
11024 }
11025 }
11026
11027 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11028 to the call, and TYPE is its return type.
11029
11030 Return NULL_TREE if no simplification was possible, otherwise return the
11031 simplified form of the call as a tree.
11032
11033 The simplified form may be a constant or other expression which
11034 computes the same value, but in a more efficient manner (including
11035 calls to other builtin functions).
11036
11037 The call may contain arguments which need to be evaluated, but
11038 which are not useful to determine the result of the call. In
11039 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11040 COMPOUND_EXPR will be an argument which must be evaluated.
11041 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11042 COMPOUND_EXPR in the chain will contain the tree for the simplified
11043 form of the builtin function call. */
11044
11045 static tree
11046 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11047 {
11048 if (!validate_arg (s1, POINTER_TYPE)
11049 || !validate_arg (s2, POINTER_TYPE))
11050 return NULL_TREE;
11051 else
11052 {
11053 tree fn;
11054 const char *p1, *p2;
11055
11056 p2 = c_getstr (s2);
11057 if (p2 == NULL)
11058 return NULL_TREE;
11059
11060 p1 = c_getstr (s1);
11061 if (p1 != NULL)
11062 {
11063 const char *r = strpbrk (p1, p2);
11064 tree tem;
11065
11066 if (r == NULL)
11067 return build_int_cst (TREE_TYPE (s1), 0);
11068
11069 /* Return an offset into the constant string argument. */
11070 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11071 return fold_convert_loc (loc, type, tem);
11072 }
11073
11074 if (p2[0] == '\0')
11075 /* strpbrk(x, "") == NULL.
11076 Evaluate and ignore s1 in case it had side-effects. */
11077 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11078
11079 if (p2[1] != '\0')
11080 return NULL_TREE; /* Really call strpbrk. */
11081
11082 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11083 if (!fn)
11084 return NULL_TREE;
11085
11086 /* New argument list transforming strpbrk(s1, s2) to
11087 strchr(s1, s2[0]). */
11088 return build_call_expr_loc (loc, fn, 2, s1,
11089 build_int_cst (integer_type_node, p2[0]));
11090 }
11091 }
11092
11093 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11094 to the call.
11095
11096 Return NULL_TREE if no simplification was possible, otherwise return the
11097 simplified form of the call as a tree.
11098
11099 The simplified form may be a constant or other expression which
11100 computes the same value, but in a more efficient manner (including
11101 calls to other builtin functions).
11102
11103 The call may contain arguments which need to be evaluated, but
11104 which are not useful to determine the result of the call. In
11105 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11106 COMPOUND_EXPR will be an argument which must be evaluated.
11107 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11108 COMPOUND_EXPR in the chain will contain the tree for the simplified
11109 form of the builtin function call. */
11110
11111 static tree
11112 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11113 {
11114 if (!validate_arg (s1, POINTER_TYPE)
11115 || !validate_arg (s2, POINTER_TYPE))
11116 return NULL_TREE;
11117 else
11118 {
11119 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11120
11121 /* If both arguments are constants, evaluate at compile-time. */
11122 if (p1 && p2)
11123 {
11124 const size_t r = strspn (p1, p2);
11125 return build_int_cst (size_type_node, r);
11126 }
11127
11128 /* If either argument is "", return NULL_TREE. */
11129 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11130 /* Evaluate and ignore both arguments in case either one has
11131 side-effects. */
11132 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11133 s1, s2);
11134 return NULL_TREE;
11135 }
11136 }
11137
11138 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11139 to the call.
11140
11141 Return NULL_TREE if no simplification was possible, otherwise return the
11142 simplified form of the call as a tree.
11143
11144 The simplified form may be a constant or other expression which
11145 computes the same value, but in a more efficient manner (including
11146 calls to other builtin functions).
11147
11148 The call may contain arguments which need to be evaluated, but
11149 which are not useful to determine the result of the call. In
11150 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11151 COMPOUND_EXPR will be an argument which must be evaluated.
11152 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11153 COMPOUND_EXPR in the chain will contain the tree for the simplified
11154 form of the builtin function call. */
11155
11156 static tree
11157 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11158 {
11159 if (!validate_arg (s1, POINTER_TYPE)
11160 || !validate_arg (s2, POINTER_TYPE))
11161 return NULL_TREE;
11162 else
11163 {
11164 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11165
11166 /* If both arguments are constants, evaluate at compile-time. */
11167 if (p1 && p2)
11168 {
11169 const size_t r = strcspn (p1, p2);
11170 return build_int_cst (size_type_node, r);
11171 }
11172
11173 /* If the first argument is "", return NULL_TREE. */
11174 if (p1 && *p1 == '\0')
11175 {
11176 /* Evaluate and ignore argument s2 in case it has
11177 side-effects. */
11178 return omit_one_operand_loc (loc, size_type_node,
11179 size_zero_node, s2);
11180 }
11181
11182 /* If the second argument is "", return __builtin_strlen(s1). */
11183 if (p2 && *p2 == '\0')
11184 {
11185 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11186
11187 /* If the replacement _DECL isn't initialized, don't do the
11188 transformation. */
11189 if (!fn)
11190 return NULL_TREE;
11191
11192 return build_call_expr_loc (loc, fn, 1, s1);
11193 }
11194 return NULL_TREE;
11195 }
11196 }
11197
11198 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11199 produced. False otherwise. This is done so that we don't output the error
11200 or warning twice or three times. */
11201
11202 bool
11203 fold_builtin_next_arg (tree exp, bool va_start_p)
11204 {
11205 tree fntype = TREE_TYPE (current_function_decl);
11206 int nargs = call_expr_nargs (exp);
11207 tree arg;
11208 /* There is good chance the current input_location points inside the
11209 definition of the va_start macro (perhaps on the token for
11210 builtin) in a system header, so warnings will not be emitted.
11211 Use the location in real source code. */
11212 source_location current_location =
11213 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11214 NULL);
11215
11216 if (!stdarg_p (fntype))
11217 {
11218 error ("%<va_start%> used in function with fixed args");
11219 return true;
11220 }
11221
11222 if (va_start_p)
11223 {
11224 if (va_start_p && (nargs != 2))
11225 {
11226 error ("wrong number of arguments to function %<va_start%>");
11227 return true;
11228 }
11229 arg = CALL_EXPR_ARG (exp, 1);
11230 }
11231 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11232 when we checked the arguments and if needed issued a warning. */
11233 else
11234 {
11235 if (nargs == 0)
11236 {
11237 /* Evidently an out of date version of <stdarg.h>; can't validate
11238 va_start's second argument, but can still work as intended. */
11239 warning_at (current_location,
11240 OPT_Wvarargs,
11241 "%<__builtin_next_arg%> called without an argument");
11242 return true;
11243 }
11244 else if (nargs > 1)
11245 {
11246 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11247 return true;
11248 }
11249 arg = CALL_EXPR_ARG (exp, 0);
11250 }
11251
11252 if (TREE_CODE (arg) == SSA_NAME)
11253 arg = SSA_NAME_VAR (arg);
11254
11255 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11256 or __builtin_next_arg (0) the first time we see it, after checking
11257 the arguments and if needed issuing a warning. */
11258 if (!integer_zerop (arg))
11259 {
11260 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11261
11262 /* Strip off all nops for the sake of the comparison. This
11263 is not quite the same as STRIP_NOPS. It does more.
11264 We must also strip off INDIRECT_EXPR for C++ reference
11265 parameters. */
11266 while (CONVERT_EXPR_P (arg)
11267 || TREE_CODE (arg) == INDIRECT_REF)
11268 arg = TREE_OPERAND (arg, 0);
11269 if (arg != last_parm)
11270 {
11271 /* FIXME: Sometimes with the tree optimizers we can get the
11272 not the last argument even though the user used the last
11273 argument. We just warn and set the arg to be the last
11274 argument so that we will get wrong-code because of
11275 it. */
11276 warning_at (current_location,
11277 OPT_Wvarargs,
11278 "second parameter of %<va_start%> not last named argument");
11279 }
11280
11281 /* Undefined by C99 7.15.1.4p4 (va_start):
11282 "If the parameter parmN is declared with the register storage
11283 class, with a function or array type, or with a type that is
11284 not compatible with the type that results after application of
11285 the default argument promotions, the behavior is undefined."
11286 */
11287 else if (DECL_REGISTER (arg))
11288 {
11289 warning_at (current_location,
11290 OPT_Wvarargs,
11291 "undefined behaviour when second parameter of "
11292 "%<va_start%> is declared with %<register%> storage");
11293 }
11294
11295 /* We want to verify the second parameter just once before the tree
11296 optimizers are run and then avoid keeping it in the tree,
11297 as otherwise we could warn even for correct code like:
11298 void foo (int i, ...)
11299 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11300 if (va_start_p)
11301 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11302 else
11303 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11304 }
11305 return false;
11306 }
11307
11308
11309 /* Expand a call EXP to __builtin_object_size. */
11310
11311 static rtx
11312 expand_builtin_object_size (tree exp)
11313 {
11314 tree ost;
11315 int object_size_type;
11316 tree fndecl = get_callee_fndecl (exp);
11317
11318 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11319 {
11320 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11321 exp, fndecl);
11322 expand_builtin_trap ();
11323 return const0_rtx;
11324 }
11325
11326 ost = CALL_EXPR_ARG (exp, 1);
11327 STRIP_NOPS (ost);
11328
11329 if (TREE_CODE (ost) != INTEGER_CST
11330 || tree_int_cst_sgn (ost) < 0
11331 || compare_tree_int (ost, 3) > 0)
11332 {
11333 error ("%Klast argument of %D is not integer constant between 0 and 3",
11334 exp, fndecl);
11335 expand_builtin_trap ();
11336 return const0_rtx;
11337 }
11338
11339 object_size_type = tree_to_shwi (ost);
11340
11341 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11342 }
11343
11344 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11345 FCODE is the BUILT_IN_* to use.
11346 Return NULL_RTX if we failed; the caller should emit a normal call,
11347 otherwise try to get the result in TARGET, if convenient (and in
11348 mode MODE if that's convenient). */
11349
11350 static rtx
11351 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11352 enum built_in_function fcode)
11353 {
11354 tree dest, src, len, size;
11355
11356 if (!validate_arglist (exp,
11357 POINTER_TYPE,
11358 fcode == BUILT_IN_MEMSET_CHK
11359 ? INTEGER_TYPE : POINTER_TYPE,
11360 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11361 return NULL_RTX;
11362
11363 dest = CALL_EXPR_ARG (exp, 0);
11364 src = CALL_EXPR_ARG (exp, 1);
11365 len = CALL_EXPR_ARG (exp, 2);
11366 size = CALL_EXPR_ARG (exp, 3);
11367
11368 if (! tree_fits_uhwi_p (size))
11369 return NULL_RTX;
11370
11371 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11372 {
11373 tree fn;
11374
11375 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11376 {
11377 warning_at (tree_nonartificial_location (exp),
11378 0, "%Kcall to %D will always overflow destination buffer",
11379 exp, get_callee_fndecl (exp));
11380 return NULL_RTX;
11381 }
11382
11383 fn = NULL_TREE;
11384 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11385 mem{cpy,pcpy,move,set} is available. */
11386 switch (fcode)
11387 {
11388 case BUILT_IN_MEMCPY_CHK:
11389 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11390 break;
11391 case BUILT_IN_MEMPCPY_CHK:
11392 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11393 break;
11394 case BUILT_IN_MEMMOVE_CHK:
11395 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11396 break;
11397 case BUILT_IN_MEMSET_CHK:
11398 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11399 break;
11400 default:
11401 break;
11402 }
11403
11404 if (! fn)
11405 return NULL_RTX;
11406
11407 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11408 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11409 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11410 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11411 }
11412 else if (fcode == BUILT_IN_MEMSET_CHK)
11413 return NULL_RTX;
11414 else
11415 {
11416 unsigned int dest_align = get_pointer_alignment (dest);
11417
11418 /* If DEST is not a pointer type, call the normal function. */
11419 if (dest_align == 0)
11420 return NULL_RTX;
11421
11422 /* If SRC and DEST are the same (and not volatile), do nothing. */
11423 if (operand_equal_p (src, dest, 0))
11424 {
11425 tree expr;
11426
11427 if (fcode != BUILT_IN_MEMPCPY_CHK)
11428 {
11429 /* Evaluate and ignore LEN in case it has side-effects. */
11430 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11431 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11432 }
11433
11434 expr = fold_build_pointer_plus (dest, len);
11435 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11436 }
11437
11438 /* __memmove_chk special case. */
11439 if (fcode == BUILT_IN_MEMMOVE_CHK)
11440 {
11441 unsigned int src_align = get_pointer_alignment (src);
11442
11443 if (src_align == 0)
11444 return NULL_RTX;
11445
11446 /* If src is categorized for a readonly section we can use
11447 normal __memcpy_chk. */
11448 if (readonly_data_expr (src))
11449 {
11450 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11451 if (!fn)
11452 return NULL_RTX;
11453 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11454 dest, src, len, size);
11455 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11456 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11457 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11458 }
11459 }
11460 return NULL_RTX;
11461 }
11462 }
11463
11464 /* Emit warning if a buffer overflow is detected at compile time. */
11465
11466 static void
11467 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11468 {
11469 int is_strlen = 0;
11470 tree len, size;
11471 location_t loc = tree_nonartificial_location (exp);
11472
11473 switch (fcode)
11474 {
11475 case BUILT_IN_STRCPY_CHK:
11476 case BUILT_IN_STPCPY_CHK:
11477 /* For __strcat_chk the warning will be emitted only if overflowing
11478 by at least strlen (dest) + 1 bytes. */
11479 case BUILT_IN_STRCAT_CHK:
11480 len = CALL_EXPR_ARG (exp, 1);
11481 size = CALL_EXPR_ARG (exp, 2);
11482 is_strlen = 1;
11483 break;
11484 case BUILT_IN_STRNCAT_CHK:
11485 case BUILT_IN_STRNCPY_CHK:
11486 case BUILT_IN_STPNCPY_CHK:
11487 len = CALL_EXPR_ARG (exp, 2);
11488 size = CALL_EXPR_ARG (exp, 3);
11489 break;
11490 case BUILT_IN_SNPRINTF_CHK:
11491 case BUILT_IN_VSNPRINTF_CHK:
11492 len = CALL_EXPR_ARG (exp, 1);
11493 size = CALL_EXPR_ARG (exp, 3);
11494 break;
11495 default:
11496 gcc_unreachable ();
11497 }
11498
11499 if (!len || !size)
11500 return;
11501
11502 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11503 return;
11504
11505 if (is_strlen)
11506 {
11507 len = c_strlen (len, 1);
11508 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11509 return;
11510 }
11511 else if (fcode == BUILT_IN_STRNCAT_CHK)
11512 {
11513 tree src = CALL_EXPR_ARG (exp, 1);
11514 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11515 return;
11516 src = c_strlen (src, 1);
11517 if (! src || ! tree_fits_uhwi_p (src))
11518 {
11519 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11520 exp, get_callee_fndecl (exp));
11521 return;
11522 }
11523 else if (tree_int_cst_lt (src, size))
11524 return;
11525 }
11526 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11527 return;
11528
11529 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11530 exp, get_callee_fndecl (exp));
11531 }
11532
11533 /* Emit warning if a buffer overflow is detected at compile time
11534 in __sprintf_chk/__vsprintf_chk calls. */
11535
11536 static void
11537 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11538 {
11539 tree size, len, fmt;
11540 const char *fmt_str;
11541 int nargs = call_expr_nargs (exp);
11542
11543 /* Verify the required arguments in the original call. */
11544
11545 if (nargs < 4)
11546 return;
11547 size = CALL_EXPR_ARG (exp, 2);
11548 fmt = CALL_EXPR_ARG (exp, 3);
11549
11550 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11551 return;
11552
11553 /* Check whether the format is a literal string constant. */
11554 fmt_str = c_getstr (fmt);
11555 if (fmt_str == NULL)
11556 return;
11557
11558 if (!init_target_chars ())
11559 return;
11560
11561 /* If the format doesn't contain % args or %%, we know its size. */
11562 if (strchr (fmt_str, target_percent) == 0)
11563 len = build_int_cstu (size_type_node, strlen (fmt_str));
11564 /* If the format is "%s" and first ... argument is a string literal,
11565 we know it too. */
11566 else if (fcode == BUILT_IN_SPRINTF_CHK
11567 && strcmp (fmt_str, target_percent_s) == 0)
11568 {
11569 tree arg;
11570
11571 if (nargs < 5)
11572 return;
11573 arg = CALL_EXPR_ARG (exp, 4);
11574 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11575 return;
11576
11577 len = c_strlen (arg, 1);
11578 if (!len || ! tree_fits_uhwi_p (len))
11579 return;
11580 }
11581 else
11582 return;
11583
11584 if (! tree_int_cst_lt (len, size))
11585 warning_at (tree_nonartificial_location (exp),
11586 0, "%Kcall to %D will always overflow destination buffer",
11587 exp, get_callee_fndecl (exp));
11588 }
11589
11590 /* Emit warning if a free is called with address of a variable. */
11591
11592 static void
11593 maybe_emit_free_warning (tree exp)
11594 {
11595 tree arg = CALL_EXPR_ARG (exp, 0);
11596
11597 STRIP_NOPS (arg);
11598 if (TREE_CODE (arg) != ADDR_EXPR)
11599 return;
11600
11601 arg = get_base_address (TREE_OPERAND (arg, 0));
11602 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11603 return;
11604
11605 if (SSA_VAR_P (arg))
11606 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11607 "%Kattempt to free a non-heap object %qD", exp, arg);
11608 else
11609 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11610 "%Kattempt to free a non-heap object", exp);
11611 }
11612
11613 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11614 if possible. */
11615
11616 static tree
11617 fold_builtin_object_size (tree ptr, tree ost)
11618 {
11619 unsigned HOST_WIDE_INT bytes;
11620 int object_size_type;
11621
11622 if (!validate_arg (ptr, POINTER_TYPE)
11623 || !validate_arg (ost, INTEGER_TYPE))
11624 return NULL_TREE;
11625
11626 STRIP_NOPS (ost);
11627
11628 if (TREE_CODE (ost) != INTEGER_CST
11629 || tree_int_cst_sgn (ost) < 0
11630 || compare_tree_int (ost, 3) > 0)
11631 return NULL_TREE;
11632
11633 object_size_type = tree_to_shwi (ost);
11634
11635 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11636 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11637 and (size_t) 0 for types 2 and 3. */
11638 if (TREE_SIDE_EFFECTS (ptr))
11639 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11640
11641 if (TREE_CODE (ptr) == ADDR_EXPR)
11642 {
11643 bytes = compute_builtin_object_size (ptr, object_size_type);
11644 if (wi::fits_to_tree_p (bytes, size_type_node))
11645 return build_int_cstu (size_type_node, bytes);
11646 }
11647 else if (TREE_CODE (ptr) == SSA_NAME)
11648 {
11649 /* If object size is not known yet, delay folding until
11650 later. Maybe subsequent passes will help determining
11651 it. */
11652 bytes = compute_builtin_object_size (ptr, object_size_type);
11653 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11654 && wi::fits_to_tree_p (bytes, size_type_node))
11655 return build_int_cstu (size_type_node, bytes);
11656 }
11657
11658 return NULL_TREE;
11659 }
11660
11661 /* Builtins with folding operations that operate on "..." arguments
11662 need special handling; we need to store the arguments in a convenient
11663 data structure before attempting any folding. Fortunately there are
11664 only a few builtins that fall into this category. FNDECL is the
11665 function, EXP is the CALL_EXPR for the call. */
11666
11667 static tree
11668 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11669 {
11670 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11671 tree ret = NULL_TREE;
11672
11673 switch (fcode)
11674 {
11675 case BUILT_IN_FPCLASSIFY:
11676 ret = fold_builtin_fpclassify (loc, args, nargs);
11677 break;
11678
11679 default:
11680 break;
11681 }
11682 if (ret)
11683 {
11684 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11685 SET_EXPR_LOCATION (ret, loc);
11686 TREE_NO_WARNING (ret) = 1;
11687 return ret;
11688 }
11689 return NULL_TREE;
11690 }
11691
11692 /* Initialize format string characters in the target charset. */
11693
11694 bool
11695 init_target_chars (void)
11696 {
11697 static bool init;
11698 if (!init)
11699 {
11700 target_newline = lang_hooks.to_target_charset ('\n');
11701 target_percent = lang_hooks.to_target_charset ('%');
11702 target_c = lang_hooks.to_target_charset ('c');
11703 target_s = lang_hooks.to_target_charset ('s');
11704 if (target_newline == 0 || target_percent == 0 || target_c == 0
11705 || target_s == 0)
11706 return false;
11707
11708 target_percent_c[0] = target_percent;
11709 target_percent_c[1] = target_c;
11710 target_percent_c[2] = '\0';
11711
11712 target_percent_s[0] = target_percent;
11713 target_percent_s[1] = target_s;
11714 target_percent_s[2] = '\0';
11715
11716 target_percent_s_newline[0] = target_percent;
11717 target_percent_s_newline[1] = target_s;
11718 target_percent_s_newline[2] = target_newline;
11719 target_percent_s_newline[3] = '\0';
11720
11721 init = true;
11722 }
11723 return true;
11724 }
11725
11726 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11727 and no overflow/underflow occurred. INEXACT is true if M was not
11728 exactly calculated. TYPE is the tree type for the result. This
11729 function assumes that you cleared the MPFR flags and then
11730 calculated M to see if anything subsequently set a flag prior to
11731 entering this function. Return NULL_TREE if any checks fail. */
11732
11733 static tree
11734 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11735 {
11736 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11737 overflow/underflow occurred. If -frounding-math, proceed iff the
11738 result of calling FUNC was exact. */
11739 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11740 && (!flag_rounding_math || !inexact))
11741 {
11742 REAL_VALUE_TYPE rr;
11743
11744 real_from_mpfr (&rr, m, type, GMP_RNDN);
11745 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11746 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11747 but the mpft_t is not, then we underflowed in the
11748 conversion. */
11749 if (real_isfinite (&rr)
11750 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11751 {
11752 REAL_VALUE_TYPE rmode;
11753
11754 real_convert (&rmode, TYPE_MODE (type), &rr);
11755 /* Proceed iff the specified mode can hold the value. */
11756 if (real_identical (&rmode, &rr))
11757 return build_real (type, rmode);
11758 }
11759 }
11760 return NULL_TREE;
11761 }
11762
11763 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11764 number and no overflow/underflow occurred. INEXACT is true if M
11765 was not exactly calculated. TYPE is the tree type for the result.
11766 This function assumes that you cleared the MPFR flags and then
11767 calculated M to see if anything subsequently set a flag prior to
11768 entering this function. Return NULL_TREE if any checks fail, if
11769 FORCE_CONVERT is true, then bypass the checks. */
11770
11771 static tree
11772 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11773 {
11774 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11775 overflow/underflow occurred. If -frounding-math, proceed iff the
11776 result of calling FUNC was exact. */
11777 if (force_convert
11778 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11779 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11780 && (!flag_rounding_math || !inexact)))
11781 {
11782 REAL_VALUE_TYPE re, im;
11783
11784 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11785 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11786 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11787 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11788 but the mpft_t is not, then we underflowed in the
11789 conversion. */
11790 if (force_convert
11791 || (real_isfinite (&re) && real_isfinite (&im)
11792 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11793 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11794 {
11795 REAL_VALUE_TYPE re_mode, im_mode;
11796
11797 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11798 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11799 /* Proceed iff the specified mode can hold the value. */
11800 if (force_convert
11801 || (real_identical (&re_mode, &re)
11802 && real_identical (&im_mode, &im)))
11803 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11804 build_real (TREE_TYPE (type), im_mode));
11805 }
11806 }
11807 return NULL_TREE;
11808 }
11809
11810 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11811 FUNC on it and return the resulting value as a tree with type TYPE.
11812 If MIN and/or MAX are not NULL, then the supplied ARG must be
11813 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11814 acceptable values, otherwise they are not. The mpfr precision is
11815 set to the precision of TYPE. We assume that function FUNC returns
11816 zero if the result could be calculated exactly within the requested
11817 precision. */
11818
11819 static tree
11820 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11821 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11822 bool inclusive)
11823 {
11824 tree result = NULL_TREE;
11825
11826 STRIP_NOPS (arg);
11827
11828 /* To proceed, MPFR must exactly represent the target floating point
11829 format, which only happens when the target base equals two. */
11830 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11831 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11832 {
11833 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11834
11835 if (real_isfinite (ra)
11836 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11837 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11838 {
11839 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11840 const int prec = fmt->p;
11841 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11842 int inexact;
11843 mpfr_t m;
11844
11845 mpfr_init2 (m, prec);
11846 mpfr_from_real (m, ra, GMP_RNDN);
11847 mpfr_clear_flags ();
11848 inexact = func (m, m, rnd);
11849 result = do_mpfr_ckconv (m, type, inexact);
11850 mpfr_clear (m);
11851 }
11852 }
11853
11854 return result;
11855 }
11856
11857 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11858 FUNC on it and return the resulting value as a tree with type TYPE.
11859 The mpfr precision is set to the precision of TYPE. We assume that
11860 function FUNC returns zero if the result could be calculated
11861 exactly within the requested precision. */
11862
11863 static tree
11864 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11865 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11866 {
11867 tree result = NULL_TREE;
11868
11869 STRIP_NOPS (arg1);
11870 STRIP_NOPS (arg2);
11871
11872 /* To proceed, MPFR must exactly represent the target floating point
11873 format, which only happens when the target base equals two. */
11874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11875 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11876 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11877 {
11878 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11879 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11880
11881 if (real_isfinite (ra1) && real_isfinite (ra2))
11882 {
11883 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11884 const int prec = fmt->p;
11885 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11886 int inexact;
11887 mpfr_t m1, m2;
11888
11889 mpfr_inits2 (prec, m1, m2, NULL);
11890 mpfr_from_real (m1, ra1, GMP_RNDN);
11891 mpfr_from_real (m2, ra2, GMP_RNDN);
11892 mpfr_clear_flags ();
11893 inexact = func (m1, m1, m2, rnd);
11894 result = do_mpfr_ckconv (m1, type, inexact);
11895 mpfr_clears (m1, m2, NULL);
11896 }
11897 }
11898
11899 return result;
11900 }
11901
11902 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11903 FUNC on it and return the resulting value as a tree with type TYPE.
11904 The mpfr precision is set to the precision of TYPE. We assume that
11905 function FUNC returns zero if the result could be calculated
11906 exactly within the requested precision. */
11907
11908 static tree
11909 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11910 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11911 {
11912 tree result = NULL_TREE;
11913
11914 STRIP_NOPS (arg1);
11915 STRIP_NOPS (arg2);
11916 STRIP_NOPS (arg3);
11917
11918 /* To proceed, MPFR must exactly represent the target floating point
11919 format, which only happens when the target base equals two. */
11920 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11921 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11922 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11923 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11924 {
11925 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11926 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11927 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11928
11929 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11930 {
11931 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11932 const int prec = fmt->p;
11933 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11934 int inexact;
11935 mpfr_t m1, m2, m3;
11936
11937 mpfr_inits2 (prec, m1, m2, m3, NULL);
11938 mpfr_from_real (m1, ra1, GMP_RNDN);
11939 mpfr_from_real (m2, ra2, GMP_RNDN);
11940 mpfr_from_real (m3, ra3, GMP_RNDN);
11941 mpfr_clear_flags ();
11942 inexact = func (m1, m1, m2, m3, rnd);
11943 result = do_mpfr_ckconv (m1, type, inexact);
11944 mpfr_clears (m1, m2, m3, NULL);
11945 }
11946 }
11947
11948 return result;
11949 }
11950
11951 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11952 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11953 If ARG_SINP and ARG_COSP are NULL then the result is returned
11954 as a complex value.
11955 The type is taken from the type of ARG and is used for setting the
11956 precision of the calculation and results. */
11957
11958 static tree
11959 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11960 {
11961 tree const type = TREE_TYPE (arg);
11962 tree result = NULL_TREE;
11963
11964 STRIP_NOPS (arg);
11965
11966 /* To proceed, MPFR must exactly represent the target floating point
11967 format, which only happens when the target base equals two. */
11968 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11969 && TREE_CODE (arg) == REAL_CST
11970 && !TREE_OVERFLOW (arg))
11971 {
11972 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11973
11974 if (real_isfinite (ra))
11975 {
11976 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11977 const int prec = fmt->p;
11978 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11979 tree result_s, result_c;
11980 int inexact;
11981 mpfr_t m, ms, mc;
11982
11983 mpfr_inits2 (prec, m, ms, mc, NULL);
11984 mpfr_from_real (m, ra, GMP_RNDN);
11985 mpfr_clear_flags ();
11986 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11987 result_s = do_mpfr_ckconv (ms, type, inexact);
11988 result_c = do_mpfr_ckconv (mc, type, inexact);
11989 mpfr_clears (m, ms, mc, NULL);
11990 if (result_s && result_c)
11991 {
11992 /* If we are to return in a complex value do so. */
11993 if (!arg_sinp && !arg_cosp)
11994 return build_complex (build_complex_type (type),
11995 result_c, result_s);
11996
11997 /* Dereference the sin/cos pointer arguments. */
11998 arg_sinp = build_fold_indirect_ref (arg_sinp);
11999 arg_cosp = build_fold_indirect_ref (arg_cosp);
12000 /* Proceed if valid pointer type were passed in. */
12001 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12002 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12003 {
12004 /* Set the values. */
12005 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12006 result_s);
12007 TREE_SIDE_EFFECTS (result_s) = 1;
12008 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12009 result_c);
12010 TREE_SIDE_EFFECTS (result_c) = 1;
12011 /* Combine the assignments into a compound expr. */
12012 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12013 result_s, result_c));
12014 }
12015 }
12016 }
12017 }
12018 return result;
12019 }
12020
12021 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12022 two-argument mpfr order N Bessel function FUNC on them and return
12023 the resulting value as a tree with type TYPE. The mpfr precision
12024 is set to the precision of TYPE. We assume that function FUNC
12025 returns zero if the result could be calculated exactly within the
12026 requested precision. */
12027 static tree
12028 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12029 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12030 const REAL_VALUE_TYPE *min, bool inclusive)
12031 {
12032 tree result = NULL_TREE;
12033
12034 STRIP_NOPS (arg1);
12035 STRIP_NOPS (arg2);
12036
12037 /* To proceed, MPFR must exactly represent the target floating point
12038 format, which only happens when the target base equals two. */
12039 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12040 && tree_fits_shwi_p (arg1)
12041 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12042 {
12043 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12044 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12045
12046 if (n == (long)n
12047 && real_isfinite (ra)
12048 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12049 {
12050 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12051 const int prec = fmt->p;
12052 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12053 int inexact;
12054 mpfr_t m;
12055
12056 mpfr_init2 (m, prec);
12057 mpfr_from_real (m, ra, GMP_RNDN);
12058 mpfr_clear_flags ();
12059 inexact = func (m, n, m, rnd);
12060 result = do_mpfr_ckconv (m, type, inexact);
12061 mpfr_clear (m);
12062 }
12063 }
12064
12065 return result;
12066 }
12067
12068 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12069 the pointer *(ARG_QUO) and return the result. The type is taken
12070 from the type of ARG0 and is used for setting the precision of the
12071 calculation and results. */
12072
12073 static tree
12074 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12075 {
12076 tree const type = TREE_TYPE (arg0);
12077 tree result = NULL_TREE;
12078
12079 STRIP_NOPS (arg0);
12080 STRIP_NOPS (arg1);
12081
12082 /* To proceed, MPFR must exactly represent the target floating point
12083 format, which only happens when the target base equals two. */
12084 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12085 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12086 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12087 {
12088 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12089 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12090
12091 if (real_isfinite (ra0) && real_isfinite (ra1))
12092 {
12093 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12094 const int prec = fmt->p;
12095 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12096 tree result_rem;
12097 long integer_quo;
12098 mpfr_t m0, m1;
12099
12100 mpfr_inits2 (prec, m0, m1, NULL);
12101 mpfr_from_real (m0, ra0, GMP_RNDN);
12102 mpfr_from_real (m1, ra1, GMP_RNDN);
12103 mpfr_clear_flags ();
12104 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12105 /* Remquo is independent of the rounding mode, so pass
12106 inexact=0 to do_mpfr_ckconv(). */
12107 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12108 mpfr_clears (m0, m1, NULL);
12109 if (result_rem)
12110 {
12111 /* MPFR calculates quo in the host's long so it may
12112 return more bits in quo than the target int can hold
12113 if sizeof(host long) > sizeof(target int). This can
12114 happen even for native compilers in LP64 mode. In
12115 these cases, modulo the quo value with the largest
12116 number that the target int can hold while leaving one
12117 bit for the sign. */
12118 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12119 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12120
12121 /* Dereference the quo pointer argument. */
12122 arg_quo = build_fold_indirect_ref (arg_quo);
12123 /* Proceed iff a valid pointer type was passed in. */
12124 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12125 {
12126 /* Set the value. */
12127 tree result_quo
12128 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12129 build_int_cst (TREE_TYPE (arg_quo),
12130 integer_quo));
12131 TREE_SIDE_EFFECTS (result_quo) = 1;
12132 /* Combine the quo assignment with the rem. */
12133 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12134 result_quo, result_rem));
12135 }
12136 }
12137 }
12138 }
12139 return result;
12140 }
12141
12142 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12143 resulting value as a tree with type TYPE. The mpfr precision is
12144 set to the precision of TYPE. We assume that this mpfr function
12145 returns zero if the result could be calculated exactly within the
12146 requested precision. In addition, the integer pointer represented
12147 by ARG_SG will be dereferenced and set to the appropriate signgam
12148 (-1,1) value. */
12149
12150 static tree
12151 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12152 {
12153 tree result = NULL_TREE;
12154
12155 STRIP_NOPS (arg);
12156
12157 /* To proceed, MPFR must exactly represent the target floating point
12158 format, which only happens when the target base equals two. Also
12159 verify ARG is a constant and that ARG_SG is an int pointer. */
12160 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12161 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12162 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12163 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12164 {
12165 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12166
12167 /* In addition to NaN and Inf, the argument cannot be zero or a
12168 negative integer. */
12169 if (real_isfinite (ra)
12170 && ra->cl != rvc_zero
12171 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12172 {
12173 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12174 const int prec = fmt->p;
12175 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12176 int inexact, sg;
12177 mpfr_t m;
12178 tree result_lg;
12179
12180 mpfr_init2 (m, prec);
12181 mpfr_from_real (m, ra, GMP_RNDN);
12182 mpfr_clear_flags ();
12183 inexact = mpfr_lgamma (m, &sg, m, rnd);
12184 result_lg = do_mpfr_ckconv (m, type, inexact);
12185 mpfr_clear (m);
12186 if (result_lg)
12187 {
12188 tree result_sg;
12189
12190 /* Dereference the arg_sg pointer argument. */
12191 arg_sg = build_fold_indirect_ref (arg_sg);
12192 /* Assign the signgam value into *arg_sg. */
12193 result_sg = fold_build2 (MODIFY_EXPR,
12194 TREE_TYPE (arg_sg), arg_sg,
12195 build_int_cst (TREE_TYPE (arg_sg), sg));
12196 TREE_SIDE_EFFECTS (result_sg) = 1;
12197 /* Combine the signgam assignment with the lgamma result. */
12198 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12199 result_sg, result_lg));
12200 }
12201 }
12202 }
12203
12204 return result;
12205 }
12206
12207 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12208 function FUNC on it and return the resulting value as a tree with
12209 type TYPE. The mpfr precision is set to the precision of TYPE. We
12210 assume that function FUNC returns zero if the result could be
12211 calculated exactly within the requested precision. */
12212
12213 static tree
12214 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12215 {
12216 tree result = NULL_TREE;
12217
12218 STRIP_NOPS (arg);
12219
12220 /* To proceed, MPFR must exactly represent the target floating point
12221 format, which only happens when the target base equals two. */
12222 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12223 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12224 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12225 {
12226 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12227 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12228
12229 if (real_isfinite (re) && real_isfinite (im))
12230 {
12231 const struct real_format *const fmt =
12232 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12233 const int prec = fmt->p;
12234 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12235 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12236 int inexact;
12237 mpc_t m;
12238
12239 mpc_init2 (m, prec);
12240 mpfr_from_real (mpc_realref (m), re, rnd);
12241 mpfr_from_real (mpc_imagref (m), im, rnd);
12242 mpfr_clear_flags ();
12243 inexact = func (m, m, crnd);
12244 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12245 mpc_clear (m);
12246 }
12247 }
12248
12249 return result;
12250 }
12251
12252 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12253 mpc function FUNC on it and return the resulting value as a tree
12254 with type TYPE. The mpfr precision is set to the precision of
12255 TYPE. We assume that function FUNC returns zero if the result
12256 could be calculated exactly within the requested precision. If
12257 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12258 in the arguments and/or results. */
12259
12260 tree
12261 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12262 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12263 {
12264 tree result = NULL_TREE;
12265
12266 STRIP_NOPS (arg0);
12267 STRIP_NOPS (arg1);
12268
12269 /* To proceed, MPFR must exactly represent the target floating point
12270 format, which only happens when the target base equals two. */
12271 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12272 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12273 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12274 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12275 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12276 {
12277 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12278 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12279 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12280 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12281
12282 if (do_nonfinite
12283 || (real_isfinite (re0) && real_isfinite (im0)
12284 && real_isfinite (re1) && real_isfinite (im1)))
12285 {
12286 const struct real_format *const fmt =
12287 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12288 const int prec = fmt->p;
12289 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12290 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12291 int inexact;
12292 mpc_t m0, m1;
12293
12294 mpc_init2 (m0, prec);
12295 mpc_init2 (m1, prec);
12296 mpfr_from_real (mpc_realref (m0), re0, rnd);
12297 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12298 mpfr_from_real (mpc_realref (m1), re1, rnd);
12299 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12300 mpfr_clear_flags ();
12301 inexact = func (m0, m0, m1, crnd);
12302 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12303 mpc_clear (m0);
12304 mpc_clear (m1);
12305 }
12306 }
12307
12308 return result;
12309 }
12310
12311 /* A wrapper function for builtin folding that prevents warnings for
12312 "statement without effect" and the like, caused by removing the
12313 call node earlier than the warning is generated. */
12314
12315 tree
12316 fold_call_stmt (gcall *stmt, bool ignore)
12317 {
12318 tree ret = NULL_TREE;
12319 tree fndecl = gimple_call_fndecl (stmt);
12320 location_t loc = gimple_location (stmt);
12321 if (fndecl
12322 && TREE_CODE (fndecl) == FUNCTION_DECL
12323 && DECL_BUILT_IN (fndecl)
12324 && !gimple_call_va_arg_pack_p (stmt))
12325 {
12326 int nargs = gimple_call_num_args (stmt);
12327 tree *args = (nargs > 0
12328 ? gimple_call_arg_ptr (stmt, 0)
12329 : &error_mark_node);
12330
12331 if (avoid_folding_inline_builtin (fndecl))
12332 return NULL_TREE;
12333 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12334 {
12335 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12336 }
12337 else
12338 {
12339 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12340 if (ret)
12341 {
12342 /* Propagate location information from original call to
12343 expansion of builtin. Otherwise things like
12344 maybe_emit_chk_warning, that operate on the expansion
12345 of a builtin, will use the wrong location information. */
12346 if (gimple_has_location (stmt))
12347 {
12348 tree realret = ret;
12349 if (TREE_CODE (ret) == NOP_EXPR)
12350 realret = TREE_OPERAND (ret, 0);
12351 if (CAN_HAVE_LOCATION_P (realret)
12352 && !EXPR_HAS_LOCATION (realret))
12353 SET_EXPR_LOCATION (realret, loc);
12354 return realret;
12355 }
12356 return ret;
12357 }
12358 }
12359 }
12360 return NULL_TREE;
12361 }
12362
12363 /* Look up the function in builtin_decl that corresponds to DECL
12364 and set ASMSPEC as its user assembler name. DECL must be a
12365 function decl that declares a builtin. */
12366
12367 void
12368 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12369 {
12370 tree builtin;
12371 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12372 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12373 && asmspec != 0);
12374
12375 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12376 set_user_assembler_name (builtin, asmspec);
12377 switch (DECL_FUNCTION_CODE (decl))
12378 {
12379 case BUILT_IN_MEMCPY:
12380 init_block_move_fn (asmspec);
12381 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12382 break;
12383 case BUILT_IN_MEMSET:
12384 init_block_clear_fn (asmspec);
12385 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12386 break;
12387 case BUILT_IN_MEMMOVE:
12388 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12389 break;
12390 case BUILT_IN_MEMCMP:
12391 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12392 break;
12393 case BUILT_IN_ABORT:
12394 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12395 break;
12396 case BUILT_IN_FFS:
12397 if (INT_TYPE_SIZE < BITS_PER_WORD)
12398 {
12399 set_user_assembler_libfunc ("ffs", asmspec);
12400 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12401 MODE_INT, 0), "ffs");
12402 }
12403 break;
12404 default:
12405 break;
12406 }
12407 }
12408
12409 /* Return true if DECL is a builtin that expands to a constant or similarly
12410 simple code. */
12411 bool
12412 is_simple_builtin (tree decl)
12413 {
12414 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12415 switch (DECL_FUNCTION_CODE (decl))
12416 {
12417 /* Builtins that expand to constants. */
12418 case BUILT_IN_CONSTANT_P:
12419 case BUILT_IN_EXPECT:
12420 case BUILT_IN_OBJECT_SIZE:
12421 case BUILT_IN_UNREACHABLE:
12422 /* Simple register moves or loads from stack. */
12423 case BUILT_IN_ASSUME_ALIGNED:
12424 case BUILT_IN_RETURN_ADDRESS:
12425 case BUILT_IN_EXTRACT_RETURN_ADDR:
12426 case BUILT_IN_FROB_RETURN_ADDR:
12427 case BUILT_IN_RETURN:
12428 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12429 case BUILT_IN_FRAME_ADDRESS:
12430 case BUILT_IN_VA_END:
12431 case BUILT_IN_STACK_SAVE:
12432 case BUILT_IN_STACK_RESTORE:
12433 /* Exception state returns or moves registers around. */
12434 case BUILT_IN_EH_FILTER:
12435 case BUILT_IN_EH_POINTER:
12436 case BUILT_IN_EH_COPY_VALUES:
12437 return true;
12438
12439 default:
12440 return false;
12441 }
12442
12443 return false;
12444 }
12445
12446 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12447 most probably expanded inline into reasonably simple code. This is a
12448 superset of is_simple_builtin. */
12449 bool
12450 is_inexpensive_builtin (tree decl)
12451 {
12452 if (!decl)
12453 return false;
12454 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12455 return true;
12456 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12457 switch (DECL_FUNCTION_CODE (decl))
12458 {
12459 case BUILT_IN_ABS:
12460 case BUILT_IN_ALLOCA:
12461 case BUILT_IN_ALLOCA_WITH_ALIGN:
12462 case BUILT_IN_BSWAP16:
12463 case BUILT_IN_BSWAP32:
12464 case BUILT_IN_BSWAP64:
12465 case BUILT_IN_CLZ:
12466 case BUILT_IN_CLZIMAX:
12467 case BUILT_IN_CLZL:
12468 case BUILT_IN_CLZLL:
12469 case BUILT_IN_CTZ:
12470 case BUILT_IN_CTZIMAX:
12471 case BUILT_IN_CTZL:
12472 case BUILT_IN_CTZLL:
12473 case BUILT_IN_FFS:
12474 case BUILT_IN_FFSIMAX:
12475 case BUILT_IN_FFSL:
12476 case BUILT_IN_FFSLL:
12477 case BUILT_IN_IMAXABS:
12478 case BUILT_IN_FINITE:
12479 case BUILT_IN_FINITEF:
12480 case BUILT_IN_FINITEL:
12481 case BUILT_IN_FINITED32:
12482 case BUILT_IN_FINITED64:
12483 case BUILT_IN_FINITED128:
12484 case BUILT_IN_FPCLASSIFY:
12485 case BUILT_IN_ISFINITE:
12486 case BUILT_IN_ISINF_SIGN:
12487 case BUILT_IN_ISINF:
12488 case BUILT_IN_ISINFF:
12489 case BUILT_IN_ISINFL:
12490 case BUILT_IN_ISINFD32:
12491 case BUILT_IN_ISINFD64:
12492 case BUILT_IN_ISINFD128:
12493 case BUILT_IN_ISNAN:
12494 case BUILT_IN_ISNANF:
12495 case BUILT_IN_ISNANL:
12496 case BUILT_IN_ISNAND32:
12497 case BUILT_IN_ISNAND64:
12498 case BUILT_IN_ISNAND128:
12499 case BUILT_IN_ISNORMAL:
12500 case BUILT_IN_ISGREATER:
12501 case BUILT_IN_ISGREATEREQUAL:
12502 case BUILT_IN_ISLESS:
12503 case BUILT_IN_ISLESSEQUAL:
12504 case BUILT_IN_ISLESSGREATER:
12505 case BUILT_IN_ISUNORDERED:
12506 case BUILT_IN_VA_ARG_PACK:
12507 case BUILT_IN_VA_ARG_PACK_LEN:
12508 case BUILT_IN_VA_COPY:
12509 case BUILT_IN_TRAP:
12510 case BUILT_IN_SAVEREGS:
12511 case BUILT_IN_POPCOUNTL:
12512 case BUILT_IN_POPCOUNTLL:
12513 case BUILT_IN_POPCOUNTIMAX:
12514 case BUILT_IN_POPCOUNT:
12515 case BUILT_IN_PARITYL:
12516 case BUILT_IN_PARITYLL:
12517 case BUILT_IN_PARITYIMAX:
12518 case BUILT_IN_PARITY:
12519 case BUILT_IN_LABS:
12520 case BUILT_IN_LLABS:
12521 case BUILT_IN_PREFETCH:
12522 case BUILT_IN_ACC_ON_DEVICE:
12523 return true;
12524
12525 default:
12526 return is_simple_builtin (decl);
12527 }
12528
12529 return false;
12530 }