coretypes.h: Include input.h and as-a.h.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "calls.h"
32 #include "varasm.h"
33 #include "tree-object-size.h"
34 #include "realmpfr.h"
35 #include "predict.h"
36 #include "hard-reg-set.h"
37 #include "function.h"
38 #include "cfgrtl.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
43 #include "gimple.h"
44 #include "flags.h"
45 #include "regs.h"
46 #include "except.h"
47 #include "insn-config.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "emit-rtl.h"
52 #include "stmt.h"
53 #include "expr.h"
54 #include "insn-codes.h"
55 #include "optabs.h"
56 #include "libfuncs.h"
57 #include "recog.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "tm_p.h"
61 #include "target.h"
62 #include "langhooks.h"
63 #include "tree-ssanames.h"
64 #include "tree-dfa.h"
65 #include "value-prof.h"
66 #include "diagnostic-core.h"
67 #include "builtins.h"
68 #include "asan.h"
69 #include "cilk.h"
70 #include "ipa-ref.h"
71 #include "lto-streamer.h"
72 #include "cgraph.h"
73 #include "tree-chkp.h"
74 #include "rtl-chkp.h"
75 #include "gomp-constants.h"
76
77
78 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
79
80 struct target_builtins default_target_builtins;
81 #if SWITCHABLE_TARGET
82 struct target_builtins *this_target_builtins = &default_target_builtins;
83 #endif
84
85 /* Define the names of the builtin function types and codes. */
86 const char *const built_in_class_names[BUILT_IN_LAST]
87 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
88
89 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
90 const char * built_in_names[(int) END_BUILTINS] =
91 {
92 #include "builtins.def"
93 };
94 #undef DEF_BUILTIN
95
96 /* Setup an array of builtin_info_type, make sure each element decl is
97 initialized to NULL_TREE. */
98 builtin_info_type builtin_info[(int)END_BUILTINS];
99
100 /* Non-zero if __builtin_constant_p should be folded right away. */
101 bool force_folding_builtin_constant_p;
102
103 static rtx c_readstr (const char *, machine_mode);
104 static int target_char_cast (tree, char *);
105 static rtx get_memory_rtx (tree, tree);
106 static int apply_args_size (void);
107 static int apply_result_size (void);
108 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
109 static rtx result_vector (int, rtx);
110 #endif
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static void expand_errno_check (tree, rtx);
119 static rtx expand_builtin_mathfn (tree, rtx, rtx);
120 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
121 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
122 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
123 static rtx expand_builtin_interclass_mathfn (tree, rtx);
124 static rtx expand_builtin_sincos (tree);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_int_roundingfn (tree, rtx);
127 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
128 static rtx expand_builtin_next_arg (void);
129 static rtx expand_builtin_va_start (tree);
130 static rtx expand_builtin_va_end (tree);
131 static rtx expand_builtin_va_copy (tree);
132 static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
133 static rtx expand_builtin_strcmp (tree, rtx);
134 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
135 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
136 static rtx expand_builtin_memcpy (tree, rtx);
137 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
138 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
139 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
140 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
141 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
142 machine_mode, int, tree);
143 static rtx expand_builtin_strcpy (tree, rtx);
144 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
145 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
148 static rtx expand_builtin_memset (tree, rtx, machine_mode);
149 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
150 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
151 static rtx expand_builtin_bzero (tree);
152 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
153 static rtx expand_builtin_alloca (tree, bool);
154 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
155 static rtx expand_builtin_frame_address (tree, tree);
156 static tree stabilize_va_list_loc (location_t, tree, int);
157 static rtx expand_builtin_expect (tree, rtx);
158 static tree fold_builtin_constant_p (tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree fold_builtin_nan (tree, tree, int);
163 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 static bool validate_arg (const_tree, enum tree_code code);
165 static bool integer_valued_real_p (tree);
166 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
167 static rtx expand_builtin_fabs (tree, rtx, rtx);
168 static rtx expand_builtin_signbit (tree, rtx);
169 static tree fold_builtin_sqrt (location_t, tree, tree);
170 static tree fold_builtin_cbrt (location_t, tree, tree);
171 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_cos (location_t, tree, tree, tree);
174 static tree fold_builtin_cosh (location_t, tree, tree, tree);
175 static tree fold_builtin_tan (tree, tree);
176 static tree fold_builtin_trunc (location_t, tree, tree);
177 static tree fold_builtin_floor (location_t, tree, tree);
178 static tree fold_builtin_ceil (location_t, tree, tree);
179 static tree fold_builtin_round (location_t, tree, tree);
180 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
181 static tree fold_builtin_bitop (tree, tree);
182 static tree fold_builtin_strchr (location_t, tree, tree, tree);
183 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
184 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
185 static tree fold_builtin_strcmp (location_t, tree, tree);
186 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
187 static tree fold_builtin_signbit (location_t, tree, tree);
188 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
189 static tree fold_builtin_isascii (location_t, tree);
190 static tree fold_builtin_toascii (location_t, tree);
191 static tree fold_builtin_isdigit (location_t, tree);
192 static tree fold_builtin_fabs (location_t, tree, tree);
193 static tree fold_builtin_abs (location_t, tree, tree);
194 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
195 enum tree_code);
196 static tree fold_builtin_0 (location_t, tree);
197 static tree fold_builtin_1 (location_t, tree, tree);
198 static tree fold_builtin_2 (location_t, tree, tree, tree);
199 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_varargs (location_t, tree, tree*, int);
201
202 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
203 static tree fold_builtin_strstr (location_t, tree, tree, tree);
204 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
205 static tree fold_builtin_strspn (location_t, tree, tree);
206 static tree fold_builtin_strcspn (location_t, tree, tree);
207
208 static rtx expand_builtin_object_size (tree);
209 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
210 enum built_in_function);
211 static void maybe_emit_chk_warning (tree, enum built_in_function);
212 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_free_warning (tree);
214 static tree fold_builtin_object_size (tree, tree);
215
216 unsigned HOST_WIDE_INT target_newline;
217 unsigned HOST_WIDE_INT target_percent;
218 static unsigned HOST_WIDE_INT target_c;
219 static unsigned HOST_WIDE_INT target_s;
220 char target_percent_c[3];
221 char target_percent_s[3];
222 char target_percent_s_newline[4];
223 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_arg2 (tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_arg3 (tree, tree, tree, tree,
228 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
229 static tree do_mpfr_sincos (tree, tree, tree);
230 static tree do_mpfr_bessel_n (tree, tree, tree,
231 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_remquo (tree, tree, tree);
234 static tree do_mpfr_lgamma_r (tree, tree, tree);
235 static void expand_builtin_sync_synchronize (void);
236
237 /* Return true if NAME starts with __builtin_ or __sync_. */
238
239 static bool
240 is_builtin_name (const char *name)
241 {
242 if (strncmp (name, "__builtin_", 10) == 0)
243 return true;
244 if (strncmp (name, "__sync_", 7) == 0)
245 return true;
246 if (strncmp (name, "__atomic_", 9) == 0)
247 return true;
248 if (flag_cilkplus
249 && (!strcmp (name, "__cilkrts_detach")
250 || !strcmp (name, "__cilkrts_pop_frame")))
251 return true;
252 return false;
253 }
254
255
256 /* Return true if DECL is a function symbol representing a built-in. */
257
258 bool
259 is_builtin_fn (tree decl)
260 {
261 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
262 }
263
264 /* Return true if NODE should be considered for inline expansion regardless
265 of the optimization level. This means whenever a function is invoked with
266 its "internal" name, which normally contains the prefix "__builtin". */
267
268 static bool
269 called_as_built_in (tree node)
270 {
271 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
272 we want the name used to call the function, not the name it
273 will have. */
274 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
275 return is_builtin_name (name);
276 }
277
278 /* Compute values M and N such that M divides (address of EXP - N) and such
279 that N < M. If these numbers can be determined, store M in alignp and N in
280 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
281 *alignp and any bit-offset to *bitposp.
282
283 Note that the address (and thus the alignment) computed here is based
284 on the address to which a symbol resolves, whereas DECL_ALIGN is based
285 on the address at which an object is actually located. These two
286 addresses are not always the same. For example, on ARM targets,
287 the address &foo of a Thumb function foo() has the lowest bit set,
288 whereas foo() itself starts on an even address.
289
290 If ADDR_P is true we are taking the address of the memory reference EXP
291 and thus cannot rely on the access taking place. */
292
293 static bool
294 get_object_alignment_2 (tree exp, unsigned int *alignp,
295 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
296 {
297 HOST_WIDE_INT bitsize, bitpos;
298 tree offset;
299 machine_mode mode;
300 int unsignedp, volatilep;
301 unsigned int align = BITS_PER_UNIT;
302 bool known_alignment = false;
303
304 /* Get the innermost object and the constant (bitpos) and possibly
305 variable (offset) offset of the access. */
306 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
307 &mode, &unsignedp, &volatilep, true);
308
309 /* Extract alignment information from the innermost object and
310 possibly adjust bitpos and offset. */
311 if (TREE_CODE (exp) == FUNCTION_DECL)
312 {
313 /* Function addresses can encode extra information besides their
314 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
315 allows the low bit to be used as a virtual bit, we know
316 that the address itself must be at least 2-byte aligned. */
317 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
318 align = 2 * BITS_PER_UNIT;
319 }
320 else if (TREE_CODE (exp) == LABEL_DECL)
321 ;
322 else if (TREE_CODE (exp) == CONST_DECL)
323 {
324 /* The alignment of a CONST_DECL is determined by its initializer. */
325 exp = DECL_INITIAL (exp);
326 align = TYPE_ALIGN (TREE_TYPE (exp));
327 #ifdef CONSTANT_ALIGNMENT
328 if (CONSTANT_CLASS_P (exp))
329 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
330 #endif
331 known_alignment = true;
332 }
333 else if (DECL_P (exp))
334 {
335 align = DECL_ALIGN (exp);
336 known_alignment = true;
337 }
338 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
339 {
340 align = TYPE_ALIGN (TREE_TYPE (exp));
341 }
342 else if (TREE_CODE (exp) == INDIRECT_REF
343 || TREE_CODE (exp) == MEM_REF
344 || TREE_CODE (exp) == TARGET_MEM_REF)
345 {
346 tree addr = TREE_OPERAND (exp, 0);
347 unsigned ptr_align;
348 unsigned HOST_WIDE_INT ptr_bitpos;
349 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
350
351 /* If the address is explicitely aligned, handle that. */
352 if (TREE_CODE (addr) == BIT_AND_EXPR
353 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
354 {
355 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
356 ptr_bitmask *= BITS_PER_UNIT;
357 align = ptr_bitmask & -ptr_bitmask;
358 addr = TREE_OPERAND (addr, 0);
359 }
360
361 known_alignment
362 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
363 align = MAX (ptr_align, align);
364
365 /* Re-apply explicit alignment to the bitpos. */
366 ptr_bitpos &= ptr_bitmask;
367
368 /* The alignment of the pointer operand in a TARGET_MEM_REF
369 has to take the variable offset parts into account. */
370 if (TREE_CODE (exp) == TARGET_MEM_REF)
371 {
372 if (TMR_INDEX (exp))
373 {
374 unsigned HOST_WIDE_INT step = 1;
375 if (TMR_STEP (exp))
376 step = TREE_INT_CST_LOW (TMR_STEP (exp));
377 align = MIN (align, (step & -step) * BITS_PER_UNIT);
378 }
379 if (TMR_INDEX2 (exp))
380 align = BITS_PER_UNIT;
381 known_alignment = false;
382 }
383
384 /* When EXP is an actual memory reference then we can use
385 TYPE_ALIGN of a pointer indirection to derive alignment.
386 Do so only if get_pointer_alignment_1 did not reveal absolute
387 alignment knowledge and if using that alignment would
388 improve the situation. */
389 if (!addr_p && !known_alignment
390 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
391 align = TYPE_ALIGN (TREE_TYPE (exp));
392 else
393 {
394 /* Else adjust bitpos accordingly. */
395 bitpos += ptr_bitpos;
396 if (TREE_CODE (exp) == MEM_REF
397 || TREE_CODE (exp) == TARGET_MEM_REF)
398 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
399 }
400 }
401 else if (TREE_CODE (exp) == STRING_CST)
402 {
403 /* STRING_CST are the only constant objects we allow to be not
404 wrapped inside a CONST_DECL. */
405 align = TYPE_ALIGN (TREE_TYPE (exp));
406 #ifdef CONSTANT_ALIGNMENT
407 if (CONSTANT_CLASS_P (exp))
408 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
409 #endif
410 known_alignment = true;
411 }
412
413 /* If there is a non-constant offset part extract the maximum
414 alignment that can prevail. */
415 if (offset)
416 {
417 unsigned int trailing_zeros = tree_ctz (offset);
418 if (trailing_zeros < HOST_BITS_PER_INT)
419 {
420 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
421 if (inner)
422 align = MIN (align, inner);
423 }
424 }
425
426 *alignp = align;
427 *bitposp = bitpos & (*alignp - 1);
428 return known_alignment;
429 }
430
431 /* For a memory reference expression EXP compute values M and N such that M
432 divides (&EXP - N) and such that N < M. If these numbers can be determined,
433 store M in alignp and N in *BITPOSP and return true. Otherwise return false
434 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
435
436 bool
437 get_object_alignment_1 (tree exp, unsigned int *alignp,
438 unsigned HOST_WIDE_INT *bitposp)
439 {
440 return get_object_alignment_2 (exp, alignp, bitposp, false);
441 }
442
443 /* Return the alignment in bits of EXP, an object. */
444
445 unsigned int
446 get_object_alignment (tree exp)
447 {
448 unsigned HOST_WIDE_INT bitpos = 0;
449 unsigned int align;
450
451 get_object_alignment_1 (exp, &align, &bitpos);
452
453 /* align and bitpos now specify known low bits of the pointer.
454 ptr & (align - 1) == bitpos. */
455
456 if (bitpos != 0)
457 align = (bitpos & -bitpos);
458 return align;
459 }
460
461 /* For a pointer valued expression EXP compute values M and N such that M
462 divides (EXP - N) and such that N < M. If these numbers can be determined,
463 store M in alignp and N in *BITPOSP and return true. Return false if
464 the results are just a conservative approximation.
465
466 If EXP is not a pointer, false is returned too. */
467
468 bool
469 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
470 unsigned HOST_WIDE_INT *bitposp)
471 {
472 STRIP_NOPS (exp);
473
474 if (TREE_CODE (exp) == ADDR_EXPR)
475 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
476 alignp, bitposp, true);
477 else if (TREE_CODE (exp) == SSA_NAME
478 && POINTER_TYPE_P (TREE_TYPE (exp)))
479 {
480 unsigned int ptr_align, ptr_misalign;
481 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
482
483 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
484 {
485 *bitposp = ptr_misalign * BITS_PER_UNIT;
486 *alignp = ptr_align * BITS_PER_UNIT;
487 /* We cannot really tell whether this result is an approximation. */
488 return true;
489 }
490 else
491 {
492 *bitposp = 0;
493 *alignp = BITS_PER_UNIT;
494 return false;
495 }
496 }
497 else if (TREE_CODE (exp) == INTEGER_CST)
498 {
499 *alignp = BIGGEST_ALIGNMENT;
500 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
501 & (BIGGEST_ALIGNMENT - 1));
502 return true;
503 }
504
505 *bitposp = 0;
506 *alignp = BITS_PER_UNIT;
507 return false;
508 }
509
510 /* Return the alignment in bits of EXP, a pointer valued expression.
511 The alignment returned is, by default, the alignment of the thing that
512 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
513
514 Otherwise, look at the expression to see if we can do better, i.e., if the
515 expression is actually pointing at an object whose alignment is tighter. */
516
517 unsigned int
518 get_pointer_alignment (tree exp)
519 {
520 unsigned HOST_WIDE_INT bitpos = 0;
521 unsigned int align;
522
523 get_pointer_alignment_1 (exp, &align, &bitpos);
524
525 /* align and bitpos now specify known low bits of the pointer.
526 ptr & (align - 1) == bitpos. */
527
528 if (bitpos != 0)
529 align = (bitpos & -bitpos);
530
531 return align;
532 }
533
534 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
535 way, because it could contain a zero byte in the middle.
536 TREE_STRING_LENGTH is the size of the character array, not the string.
537
538 ONLY_VALUE should be nonzero if the result is not going to be emitted
539 into the instruction stream and zero if it is going to be expanded.
540 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
541 is returned, otherwise NULL, since
542 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
543 evaluate the side-effects.
544
545 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
546 accesses. Note that this implies the result is not going to be emitted
547 into the instruction stream.
548
549 The value returned is of type `ssizetype'.
550
551 Unfortunately, string_constant can't access the values of const char
552 arrays with initializers, so neither can we do so here. */
553
554 tree
555 c_strlen (tree src, int only_value)
556 {
557 tree offset_node;
558 HOST_WIDE_INT offset;
559 int max;
560 const char *ptr;
561 location_t loc;
562
563 STRIP_NOPS (src);
564 if (TREE_CODE (src) == COND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
566 {
567 tree len1, len2;
568
569 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
570 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
571 if (tree_int_cst_equal (len1, len2))
572 return len1;
573 }
574
575 if (TREE_CODE (src) == COMPOUND_EXPR
576 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
577 return c_strlen (TREE_OPERAND (src, 1), only_value);
578
579 loc = EXPR_LOC_OR_LOC (src, input_location);
580
581 src = string_constant (src, &offset_node);
582 if (src == 0)
583 return NULL_TREE;
584
585 max = TREE_STRING_LENGTH (src) - 1;
586 ptr = TREE_STRING_POINTER (src);
587
588 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
589 {
590 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
591 compute the offset to the following null if we don't know where to
592 start searching for it. */
593 int i;
594
595 for (i = 0; i < max; i++)
596 if (ptr[i] == 0)
597 return NULL_TREE;
598
599 /* We don't know the starting offset, but we do know that the string
600 has no internal zero bytes. We can assume that the offset falls
601 within the bounds of the string; otherwise, the programmer deserves
602 what he gets. Subtract the offset from the length of the string,
603 and return that. This would perhaps not be valid if we were dealing
604 with named arrays in addition to literal string constants. */
605
606 return size_diffop_loc (loc, size_int (max), offset_node);
607 }
608
609 /* We have a known offset into the string. Start searching there for
610 a null character if we can represent it as a single HOST_WIDE_INT. */
611 if (offset_node == 0)
612 offset = 0;
613 else if (! tree_fits_shwi_p (offset_node))
614 offset = -1;
615 else
616 offset = tree_to_shwi (offset_node);
617
618 /* If the offset is known to be out of bounds, warn, and call strlen at
619 runtime. */
620 if (offset < 0 || offset > max)
621 {
622 /* Suppress multiple warnings for propagated constant strings. */
623 if (only_value != 2
624 && !TREE_NO_WARNING (src))
625 {
626 warning_at (loc, 0, "offset outside bounds of constant string");
627 TREE_NO_WARNING (src) = 1;
628 }
629 return NULL_TREE;
630 }
631
632 /* Use strlen to search for the first zero byte. Since any strings
633 constructed with build_string will have nulls appended, we win even
634 if we get handed something like (char[4])"abcd".
635
636 Since OFFSET is our starting index into the string, no further
637 calculation is needed. */
638 return ssize_int (strlen (ptr + offset));
639 }
640
641 /* Return a char pointer for a C string if it is a string constant
642 or sum of string constant and integer constant. */
643
644 const char *
645 c_getstr (tree src)
646 {
647 tree offset_node;
648
649 src = string_constant (src, &offset_node);
650 if (src == 0)
651 return 0;
652
653 if (offset_node == 0)
654 return TREE_STRING_POINTER (src);
655 else if (!tree_fits_uhwi_p (offset_node)
656 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
657 return 0;
658
659 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
660 }
661
662 /* Return a constant integer corresponding to target reading
663 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
664
665 static rtx
666 c_readstr (const char *str, machine_mode mode)
667 {
668 HOST_WIDE_INT ch;
669 unsigned int i, j;
670 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
671
672 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
673 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
674 / HOST_BITS_PER_WIDE_INT;
675
676 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
677 for (i = 0; i < len; i++)
678 tmp[i] = 0;
679
680 ch = 1;
681 for (i = 0; i < GET_MODE_SIZE (mode); i++)
682 {
683 j = i;
684 if (WORDS_BIG_ENDIAN)
685 j = GET_MODE_SIZE (mode) - i - 1;
686 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
687 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
688 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
689 j *= BITS_PER_UNIT;
690
691 if (ch)
692 ch = (unsigned char) str[i];
693 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
694 }
695
696 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
697 return immed_wide_int_const (c, mode);
698 }
699
700 /* Cast a target constant CST to target CHAR and if that value fits into
701 host char type, return zero and put that value into variable pointed to by
702 P. */
703
704 static int
705 target_char_cast (tree cst, char *p)
706 {
707 unsigned HOST_WIDE_INT val, hostval;
708
709 if (TREE_CODE (cst) != INTEGER_CST
710 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
711 return 1;
712
713 /* Do not care if it fits or not right here. */
714 val = TREE_INT_CST_LOW (cst);
715
716 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
717 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
718
719 hostval = val;
720 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
721 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
722
723 if (val != hostval)
724 return 1;
725
726 *p = hostval;
727 return 0;
728 }
729
730 /* Similar to save_expr, but assumes that arbitrary code is not executed
731 in between the multiple evaluations. In particular, we assume that a
732 non-addressable local variable will not be modified. */
733
734 static tree
735 builtin_save_expr (tree exp)
736 {
737 if (TREE_CODE (exp) == SSA_NAME
738 || (TREE_ADDRESSABLE (exp) == 0
739 && (TREE_CODE (exp) == PARM_DECL
740 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
741 return exp;
742
743 return save_expr (exp);
744 }
745
746 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
747 times to get the address of either a higher stack frame, or a return
748 address located within it (depending on FNDECL_CODE). */
749
750 static rtx
751 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
752 {
753 int i;
754
755 #ifdef INITIAL_FRAME_ADDRESS_RTX
756 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
757 #else
758 rtx tem;
759
760 /* For a zero count with __builtin_return_address, we don't care what
761 frame address we return, because target-specific definitions will
762 override us. Therefore frame pointer elimination is OK, and using
763 the soft frame pointer is OK.
764
765 For a nonzero count, or a zero count with __builtin_frame_address,
766 we require a stable offset from the current frame pointer to the
767 previous one, so we must use the hard frame pointer, and
768 we must disable frame pointer elimination. */
769 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
770 tem = frame_pointer_rtx;
771 else
772 {
773 tem = hard_frame_pointer_rtx;
774
775 /* Tell reload not to eliminate the frame pointer. */
776 crtl->accesses_prior_frames = 1;
777 }
778 #endif
779
780 /* Some machines need special handling before we can access
781 arbitrary frames. For example, on the SPARC, we must first flush
782 all register windows to the stack. */
783 #ifdef SETUP_FRAME_ADDRESSES
784 if (count > 0)
785 SETUP_FRAME_ADDRESSES ();
786 #endif
787
788 /* On the SPARC, the return address is not in the frame, it is in a
789 register. There is no way to access it off of the current frame
790 pointer, but it can be accessed off the previous frame pointer by
791 reading the value from the register window save area. */
792 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
793 count--;
794
795 /* Scan back COUNT frames to the specified frame. */
796 for (i = 0; i < count; i++)
797 {
798 /* Assume the dynamic chain pointer is in the word that the
799 frame address points to, unless otherwise specified. */
800 #ifdef DYNAMIC_CHAIN_ADDRESS
801 tem = DYNAMIC_CHAIN_ADDRESS (tem);
802 #endif
803 tem = memory_address (Pmode, tem);
804 tem = gen_frame_mem (Pmode, tem);
805 tem = copy_to_reg (tem);
806 }
807
808 /* For __builtin_frame_address, return what we've got. But, on
809 the SPARC for example, we may have to add a bias. */
810 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
811 #ifdef FRAME_ADDR_RTX
812 return FRAME_ADDR_RTX (tem);
813 #else
814 return tem;
815 #endif
816
817 /* For __builtin_return_address, get the return address from that frame. */
818 #ifdef RETURN_ADDR_RTX
819 tem = RETURN_ADDR_RTX (count, tem);
820 #else
821 tem = memory_address (Pmode,
822 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
823 tem = gen_frame_mem (Pmode, tem);
824 #endif
825 return tem;
826 }
827
828 /* Alias set used for setjmp buffer. */
829 static alias_set_type setjmp_alias_set = -1;
830
831 /* Construct the leading half of a __builtin_setjmp call. Control will
832 return to RECEIVER_LABEL. This is also called directly by the SJLJ
833 exception handling code. */
834
835 void
836 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
837 {
838 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
839 rtx stack_save;
840 rtx mem;
841
842 if (setjmp_alias_set == -1)
843 setjmp_alias_set = new_alias_set ();
844
845 buf_addr = convert_memory_address (Pmode, buf_addr);
846
847 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
848
849 /* We store the frame pointer and the address of receiver_label in
850 the buffer and use the rest of it for the stack save area, which
851 is machine-dependent. */
852
853 mem = gen_rtx_MEM (Pmode, buf_addr);
854 set_mem_alias_set (mem, setjmp_alias_set);
855 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
856
857 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
858 GET_MODE_SIZE (Pmode))),
859 set_mem_alias_set (mem, setjmp_alias_set);
860
861 emit_move_insn (validize_mem (mem),
862 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
863
864 stack_save = gen_rtx_MEM (sa_mode,
865 plus_constant (Pmode, buf_addr,
866 2 * GET_MODE_SIZE (Pmode)));
867 set_mem_alias_set (stack_save, setjmp_alias_set);
868 emit_stack_save (SAVE_NONLOCAL, &stack_save);
869
870 /* If there is further processing to do, do it. */
871 #ifdef HAVE_builtin_setjmp_setup
872 if (HAVE_builtin_setjmp_setup)
873 emit_insn (gen_builtin_setjmp_setup (buf_addr));
874 #endif
875
876 /* We have a nonlocal label. */
877 cfun->has_nonlocal_label = 1;
878 }
879
880 /* Construct the trailing part of a __builtin_setjmp call. This is
881 also called directly by the SJLJ exception handling code.
882 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
883
884 void
885 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
886 {
887 rtx chain;
888
889 /* Mark the FP as used when we get here, so we have to make sure it's
890 marked as used by this function. */
891 emit_use (hard_frame_pointer_rtx);
892
893 /* Mark the static chain as clobbered here so life information
894 doesn't get messed up for it. */
895 chain = targetm.calls.static_chain (current_function_decl, true);
896 if (chain && REG_P (chain))
897 emit_clobber (chain);
898
899 /* Now put in the code to restore the frame pointer, and argument
900 pointer, if needed. */
901 #ifdef HAVE_nonlocal_goto
902 if (! HAVE_nonlocal_goto)
903 #endif
904 {
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
909
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
917
918 /* Restoring the frame pointer also modifies the hard frame pointer.
919 Mark it used (so that the previous assignment remains live once
920 the frame pointer is eliminated) and clobbered (to represent the
921 implicit update from the assignment). */
922 emit_use (hard_frame_pointer_rtx);
923 emit_clobber (hard_frame_pointer_rtx);
924 }
925
926 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
927 if (fixed_regs[ARG_POINTER_REGNUM])
928 {
929 #ifdef ELIMINABLE_REGS
930 /* If the argument pointer can be eliminated in favor of the
931 frame pointer, we don't need to restore it. We assume here
932 that if such an elimination is present, it can always be used.
933 This is the case on all known machines; if we don't make this
934 assumption, we do unnecessary saving on many machines. */
935 size_t i;
936 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
937
938 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
939 if (elim_regs[i].from == ARG_POINTER_REGNUM
940 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
941 break;
942
943 if (i == ARRAY_SIZE (elim_regs))
944 #endif
945 {
946 /* Now restore our arg pointer from the address at which it
947 was saved in our stack frame. */
948 emit_move_insn (crtl->args.internal_arg_pointer,
949 copy_to_reg (get_arg_pointer_save_area ()));
950 }
951 }
952 #endif
953
954 #ifdef HAVE_builtin_setjmp_receiver
955 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
956 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
957 else
958 #endif
959 #ifdef HAVE_nonlocal_goto_receiver
960 if (HAVE_nonlocal_goto_receiver)
961 emit_insn (gen_nonlocal_goto_receiver ());
962 else
963 #endif
964 { /* Nothing */ }
965
966 /* We must not allow the code we just generated to be reordered by
967 scheduling. Specifically, the update of the frame pointer must
968 happen immediately, not later. */
969 emit_insn (gen_blockage ());
970 }
971
972 /* __builtin_longjmp is passed a pointer to an array of five words (not
973 all will be used on all machines). It operates similarly to the C
974 library function of the same name, but is more efficient. Much of
975 the code below is copied from the handling of non-local gotos. */
976
977 static void
978 expand_builtin_longjmp (rtx buf_addr, rtx value)
979 {
980 rtx fp, lab, stack;
981 rtx_insn *insn, *last;
982 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
983
984 /* DRAP is needed for stack realign if longjmp is expanded to current
985 function */
986 if (SUPPORTS_STACK_ALIGNMENT)
987 crtl->need_drap = true;
988
989 if (setjmp_alias_set == -1)
990 setjmp_alias_set = new_alias_set ();
991
992 buf_addr = convert_memory_address (Pmode, buf_addr);
993
994 buf_addr = force_reg (Pmode, buf_addr);
995
996 /* We require that the user must pass a second argument of 1, because
997 that is what builtin_setjmp will return. */
998 gcc_assert (value == const1_rtx);
999
1000 last = get_last_insn ();
1001 #ifdef HAVE_builtin_longjmp
1002 if (HAVE_builtin_longjmp)
1003 emit_insn (gen_builtin_longjmp (buf_addr));
1004 else
1005 #endif
1006 {
1007 fp = gen_rtx_MEM (Pmode, buf_addr);
1008 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1009 GET_MODE_SIZE (Pmode)));
1010
1011 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1012 2 * GET_MODE_SIZE (Pmode)));
1013 set_mem_alias_set (fp, setjmp_alias_set);
1014 set_mem_alias_set (lab, setjmp_alias_set);
1015 set_mem_alias_set (stack, setjmp_alias_set);
1016
1017 /* Pick up FP, label, and SP from the block and jump. This code is
1018 from expand_goto in stmt.c; see there for detailed comments. */
1019 #ifdef HAVE_nonlocal_goto
1020 if (HAVE_nonlocal_goto)
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1025 else
1026 #endif
1027 {
1028 lab = copy_to_reg (lab);
1029
1030 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1031 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1032
1033 emit_move_insn (hard_frame_pointer_rtx, fp);
1034 emit_stack_restore (SAVE_NONLOCAL, stack);
1035
1036 emit_use (hard_frame_pointer_rtx);
1037 emit_use (stack_pointer_rtx);
1038 emit_indirect_jump (lab);
1039 }
1040 }
1041
1042 /* Search backwards and mark the jump insn as a non-local goto.
1043 Note that this precludes the use of __builtin_longjmp to a
1044 __builtin_setjmp target in the same function. However, we've
1045 already cautioned the user that these functions are for
1046 internal exception handling use only. */
1047 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1048 {
1049 gcc_assert (insn != last);
1050
1051 if (JUMP_P (insn))
1052 {
1053 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1054 break;
1055 }
1056 else if (CALL_P (insn))
1057 break;
1058 }
1059 }
1060
1061 static inline bool
1062 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1063 {
1064 return (iter->i < iter->n);
1065 }
1066
1067 /* This function validates the types of a function call argument list
1068 against a specified list of tree_codes. If the last specifier is a 0,
1069 that represents an ellipses, otherwise the last specifier must be a
1070 VOID_TYPE. */
1071
1072 static bool
1073 validate_arglist (const_tree callexpr, ...)
1074 {
1075 enum tree_code code;
1076 bool res = 0;
1077 va_list ap;
1078 const_call_expr_arg_iterator iter;
1079 const_tree arg;
1080
1081 va_start (ap, callexpr);
1082 init_const_call_expr_arg_iterator (callexpr, &iter);
1083
1084 do
1085 {
1086 code = (enum tree_code) va_arg (ap, int);
1087 switch (code)
1088 {
1089 case 0:
1090 /* This signifies an ellipses, any further arguments are all ok. */
1091 res = true;
1092 goto end;
1093 case VOID_TYPE:
1094 /* This signifies an endlink, if no arguments remain, return
1095 true, otherwise return false. */
1096 res = !more_const_call_expr_args_p (&iter);
1097 goto end;
1098 default:
1099 /* If no parameters remain or the parameter's code does not
1100 match the specified code, return false. Otherwise continue
1101 checking any remaining arguments. */
1102 arg = next_const_call_expr_arg (&iter);
1103 if (!validate_arg (arg, code))
1104 goto end;
1105 break;
1106 }
1107 }
1108 while (1);
1109
1110 /* We need gotos here since we can only have one VA_CLOSE in a
1111 function. */
1112 end: ;
1113 va_end (ap);
1114
1115 return res;
1116 }
1117
1118 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1119 and the address of the save area. */
1120
1121 static rtx
1122 expand_builtin_nonlocal_goto (tree exp)
1123 {
1124 tree t_label, t_save_area;
1125 rtx r_label, r_save_area, r_fp, r_sp;
1126 rtx_insn *insn;
1127
1128 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1129 return NULL_RTX;
1130
1131 t_label = CALL_EXPR_ARG (exp, 0);
1132 t_save_area = CALL_EXPR_ARG (exp, 1);
1133
1134 r_label = expand_normal (t_label);
1135 r_label = convert_memory_address (Pmode, r_label);
1136 r_save_area = expand_normal (t_save_area);
1137 r_save_area = convert_memory_address (Pmode, r_save_area);
1138 /* Copy the address of the save location to a register just in case it was
1139 based on the frame pointer. */
1140 r_save_area = copy_to_reg (r_save_area);
1141 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1142 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1143 plus_constant (Pmode, r_save_area,
1144 GET_MODE_SIZE (Pmode)));
1145
1146 crtl->has_nonlocal_goto = 1;
1147
1148 #ifdef HAVE_nonlocal_goto
1149 /* ??? We no longer need to pass the static chain value, afaik. */
1150 if (HAVE_nonlocal_goto)
1151 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1152 else
1153 #endif
1154 {
1155 r_label = copy_to_reg (r_label);
1156
1157 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1158 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1159
1160 /* Restore frame pointer for containing function. */
1161 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1162 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1163
1164 /* USE of hard_frame_pointer_rtx added for consistency;
1165 not clear if really needed. */
1166 emit_use (hard_frame_pointer_rtx);
1167 emit_use (stack_pointer_rtx);
1168
1169 /* If the architecture is using a GP register, we must
1170 conservatively assume that the target function makes use of it.
1171 The prologue of functions with nonlocal gotos must therefore
1172 initialize the GP register to the appropriate value, and we
1173 must then make sure that this value is live at the point
1174 of the jump. (Note that this doesn't necessarily apply
1175 to targets with a nonlocal_goto pattern; they are free
1176 to implement it in their own way. Note also that this is
1177 a no-op if the GP register is a global invariant.) */
1178 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1179 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1180 emit_use (pic_offset_table_rtx);
1181
1182 emit_indirect_jump (r_label);
1183 }
1184
1185 /* Search backwards to the jump insn and mark it as a
1186 non-local goto. */
1187 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1188 {
1189 if (JUMP_P (insn))
1190 {
1191 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1192 break;
1193 }
1194 else if (CALL_P (insn))
1195 break;
1196 }
1197
1198 return const0_rtx;
1199 }
1200
1201 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1202 (not all will be used on all machines) that was passed to __builtin_setjmp.
1203 It updates the stack pointer in that block to the current value. This is
1204 also called directly by the SJLJ exception handling code. */
1205
1206 void
1207 expand_builtin_update_setjmp_buf (rtx buf_addr)
1208 {
1209 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1210 rtx stack_save
1211 = gen_rtx_MEM (sa_mode,
1212 memory_address
1213 (sa_mode,
1214 plus_constant (Pmode, buf_addr,
1215 2 * GET_MODE_SIZE (Pmode))));
1216
1217 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1218 }
1219
1220 /* Expand a call to __builtin_prefetch. For a target that does not support
1221 data prefetch, evaluate the memory address argument in case it has side
1222 effects. */
1223
1224 static void
1225 expand_builtin_prefetch (tree exp)
1226 {
1227 tree arg0, arg1, arg2;
1228 int nargs;
1229 rtx op0, op1, op2;
1230
1231 if (!validate_arglist (exp, POINTER_TYPE, 0))
1232 return;
1233
1234 arg0 = CALL_EXPR_ARG (exp, 0);
1235
1236 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1237 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1238 locality). */
1239 nargs = call_expr_nargs (exp);
1240 if (nargs > 1)
1241 arg1 = CALL_EXPR_ARG (exp, 1);
1242 else
1243 arg1 = integer_zero_node;
1244 if (nargs > 2)
1245 arg2 = CALL_EXPR_ARG (exp, 2);
1246 else
1247 arg2 = integer_three_node;
1248
1249 /* Argument 0 is an address. */
1250 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1251
1252 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1253 if (TREE_CODE (arg1) != INTEGER_CST)
1254 {
1255 error ("second argument to %<__builtin_prefetch%> must be a constant");
1256 arg1 = integer_zero_node;
1257 }
1258 op1 = expand_normal (arg1);
1259 /* Argument 1 must be either zero or one. */
1260 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1261 {
1262 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 " using zero");
1264 op1 = const0_rtx;
1265 }
1266
1267 /* Argument 2 (locality) must be a compile-time constant int. */
1268 if (TREE_CODE (arg2) != INTEGER_CST)
1269 {
1270 error ("third argument to %<__builtin_prefetch%> must be a constant");
1271 arg2 = integer_zero_node;
1272 }
1273 op2 = expand_normal (arg2);
1274 /* Argument 2 must be 0, 1, 2, or 3. */
1275 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1276 {
1277 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 op2 = const0_rtx;
1279 }
1280
1281 #ifdef HAVE_prefetch
1282 if (HAVE_prefetch)
1283 {
1284 struct expand_operand ops[3];
1285
1286 create_address_operand (&ops[0], op0);
1287 create_integer_operand (&ops[1], INTVAL (op1));
1288 create_integer_operand (&ops[2], INTVAL (op2));
1289 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1290 return;
1291 }
1292 #endif
1293
1294 /* Don't do anything with direct references to volatile memory, but
1295 generate code to handle other side effects. */
1296 if (!MEM_P (op0) && side_effects_p (op0))
1297 emit_insn (op0);
1298 }
1299
1300 /* Get a MEM rtx for expression EXP which is the address of an operand
1301 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1302 the maximum length of the block of memory that might be accessed or
1303 NULL if unknown. */
1304
1305 static rtx
1306 get_memory_rtx (tree exp, tree len)
1307 {
1308 tree orig_exp = exp;
1309 rtx addr, mem;
1310
1311 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1312 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1313 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1314 exp = TREE_OPERAND (exp, 0);
1315
1316 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1317 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1318
1319 /* Get an expression we can use to find the attributes to assign to MEM.
1320 First remove any nops. */
1321 while (CONVERT_EXPR_P (exp)
1322 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1323 exp = TREE_OPERAND (exp, 0);
1324
1325 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1326 (as builtin stringops may alias with anything). */
1327 exp = fold_build2 (MEM_REF,
1328 build_array_type (char_type_node,
1329 build_range_type (sizetype,
1330 size_one_node, len)),
1331 exp, build_int_cst (ptr_type_node, 0));
1332
1333 /* If the MEM_REF has no acceptable address, try to get the base object
1334 from the original address we got, and build an all-aliasing
1335 unknown-sized access to that one. */
1336 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1337 set_mem_attributes (mem, exp, 0);
1338 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1339 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1340 0))))
1341 {
1342 exp = build_fold_addr_expr (exp);
1343 exp = fold_build2 (MEM_REF,
1344 build_array_type (char_type_node,
1345 build_range_type (sizetype,
1346 size_zero_node,
1347 NULL)),
1348 exp, build_int_cst (ptr_type_node, 0));
1349 set_mem_attributes (mem, exp, 0);
1350 }
1351 set_mem_alias_set (mem, 0);
1352 return mem;
1353 }
1354 \f
1355 /* Built-in functions to perform an untyped call and return. */
1356
1357 #define apply_args_mode \
1358 (this_target_builtins->x_apply_args_mode)
1359 #define apply_result_mode \
1360 (this_target_builtins->x_apply_result_mode)
1361
1362 /* Return the size required for the block returned by __builtin_apply_args,
1363 and initialize apply_args_mode. */
1364
1365 static int
1366 apply_args_size (void)
1367 {
1368 static int size = -1;
1369 int align;
1370 unsigned int regno;
1371 machine_mode mode;
1372
1373 /* The values computed by this function never change. */
1374 if (size < 0)
1375 {
1376 /* The first value is the incoming arg-pointer. */
1377 size = GET_MODE_SIZE (Pmode);
1378
1379 /* The second value is the structure value address unless this is
1380 passed as an "invisible" first argument. */
1381 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1382 size += GET_MODE_SIZE (Pmode);
1383
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if (FUNCTION_ARG_REGNO_P (regno))
1386 {
1387 mode = targetm.calls.get_raw_arg_mode (regno);
1388
1389 gcc_assert (mode != VOIDmode);
1390
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
1394 size += GET_MODE_SIZE (mode);
1395 apply_args_mode[regno] = mode;
1396 }
1397 else
1398 {
1399 apply_args_mode[regno] = VOIDmode;
1400 }
1401 }
1402 return size;
1403 }
1404
1405 /* Return the size required for the block returned by __builtin_apply,
1406 and initialize apply_result_mode. */
1407
1408 static int
1409 apply_result_size (void)
1410 {
1411 static int size = -1;
1412 int align, regno;
1413 machine_mode mode;
1414
1415 /* The values computed by this function never change. */
1416 if (size < 0)
1417 {
1418 size = 0;
1419
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if (targetm.calls.function_value_regno_p (regno))
1422 {
1423 mode = targetm.calls.get_raw_result_mode (regno);
1424
1425 gcc_assert (mode != VOIDmode);
1426
1427 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1428 if (size % align != 0)
1429 size = CEIL (size, align) * align;
1430 size += GET_MODE_SIZE (mode);
1431 apply_result_mode[regno] = mode;
1432 }
1433 else
1434 apply_result_mode[regno] = VOIDmode;
1435
1436 /* Allow targets that use untyped_call and untyped_return to override
1437 the size so that machine-specific information can be stored here. */
1438 #ifdef APPLY_RESULT_SIZE
1439 size = APPLY_RESULT_SIZE;
1440 #endif
1441 }
1442 return size;
1443 }
1444
1445 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1446 /* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1449
1450 static rtx
1451 result_vector (int savep, rtx result)
1452 {
1453 int regno, size, align, nelts;
1454 machine_mode mode;
1455 rtx reg, mem;
1456 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1457
1458 size = nelts = 0;
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_result_mode[regno]) != VOIDmode)
1461 {
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1465 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1466 mem = adjust_address (result, mode, size);
1467 savevec[nelts++] = (savep
1468 ? gen_rtx_SET (mem, reg)
1469 : gen_rtx_SET (reg, mem));
1470 size += GET_MODE_SIZE (mode);
1471 }
1472 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1473 }
1474 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1475
1476 /* Save the state required to perform an untyped call with the same
1477 arguments as were passed to the current function. */
1478
1479 static rtx
1480 expand_builtin_apply_args_1 (void)
1481 {
1482 rtx registers, tem;
1483 int size, align, regno;
1484 machine_mode mode;
1485 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1486
1487 /* Create a block where the arg-pointer, structure value address,
1488 and argument registers can be saved. */
1489 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1490
1491 /* Walk past the arg-pointer and structure value address. */
1492 size = GET_MODE_SIZE (Pmode);
1493 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1494 size += GET_MODE_SIZE (Pmode);
1495
1496 /* Save each register used in calling a function to the block. */
1497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1498 if ((mode = apply_args_mode[regno]) != VOIDmode)
1499 {
1500 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1501 if (size % align != 0)
1502 size = CEIL (size, align) * align;
1503
1504 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1505
1506 emit_move_insn (adjust_address (registers, mode, size), tem);
1507 size += GET_MODE_SIZE (mode);
1508 }
1509
1510 /* Save the arg pointer to the block. */
1511 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1512 /* We need the pointer as the caller actually passed them to us, not
1513 as we might have pretended they were passed. Make sure it's a valid
1514 operand, as emit_move_insn isn't expected to handle a PLUS. */
1515 if (STACK_GROWS_DOWNWARD)
1516 tem
1517 = force_operand (plus_constant (Pmode, tem,
1518 crtl->args.pretend_args_size),
1519 NULL_RTX);
1520 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1521
1522 size = GET_MODE_SIZE (Pmode);
1523
1524 /* Save the structure value address unless this is passed as an
1525 "invisible" first argument. */
1526 if (struct_incoming_value)
1527 {
1528 emit_move_insn (adjust_address (registers, Pmode, size),
1529 copy_to_reg (struct_incoming_value));
1530 size += GET_MODE_SIZE (Pmode);
1531 }
1532
1533 /* Return the address of the block. */
1534 return copy_addr_to_reg (XEXP (registers, 0));
1535 }
1536
1537 /* __builtin_apply_args returns block of memory allocated on
1538 the stack into which is stored the arg pointer, structure
1539 value address, static chain, and all the registers that might
1540 possibly be used in performing a function call. The code is
1541 moved to the start of the function so the incoming values are
1542 saved. */
1543
1544 static rtx
1545 expand_builtin_apply_args (void)
1546 {
1547 /* Don't do __builtin_apply_args more than once in a function.
1548 Save the result of the first call and reuse it. */
1549 if (apply_args_value != 0)
1550 return apply_args_value;
1551 {
1552 /* When this function is called, it means that registers must be
1553 saved on entry to this function. So we migrate the
1554 call to the first insn of this function. */
1555 rtx temp;
1556
1557 start_sequence ();
1558 temp = expand_builtin_apply_args_1 ();
1559 rtx_insn *seq = get_insns ();
1560 end_sequence ();
1561
1562 apply_args_value = temp;
1563
1564 /* Put the insns after the NOTE that starts the function.
1565 If this is inside a start_sequence, make the outer-level insn
1566 chain current, so the code is placed at the start of the
1567 function. If internal_arg_pointer is a non-virtual pseudo,
1568 it needs to be placed after the function that initializes
1569 that pseudo. */
1570 push_topmost_sequence ();
1571 if (REG_P (crtl->args.internal_arg_pointer)
1572 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1573 emit_insn_before (seq, parm_birth_insn);
1574 else
1575 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1576 pop_topmost_sequence ();
1577 return temp;
1578 }
1579 }
1580
1581 /* Perform an untyped call and save the state required to perform an
1582 untyped return of whatever value was returned by the given function. */
1583
1584 static rtx
1585 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1586 {
1587 int size, align, regno;
1588 machine_mode mode;
1589 rtx incoming_args, result, reg, dest, src;
1590 rtx_call_insn *call_insn;
1591 rtx old_stack_level = 0;
1592 rtx call_fusage = 0;
1593 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1594
1595 arguments = convert_memory_address (Pmode, arguments);
1596
1597 /* Create a block where the return registers can be saved. */
1598 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1599
1600 /* Fetch the arg pointer from the ARGUMENTS block. */
1601 incoming_args = gen_reg_rtx (Pmode);
1602 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1603 if (!STACK_GROWS_DOWNWARD)
1604 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1605 incoming_args, 0, OPTAB_LIB_WIDEN);
1606
1607 /* Push a new argument block and copy the arguments. Do not allow
1608 the (potential) memcpy call below to interfere with our stack
1609 manipulations. */
1610 do_pending_stack_adjust ();
1611 NO_DEFER_POP;
1612
1613 /* Save the stack with nonlocal if available. */
1614 #ifdef HAVE_save_stack_nonlocal
1615 if (HAVE_save_stack_nonlocal)
1616 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1617 else
1618 #endif
1619 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1620
1621 /* Allocate a block of memory onto the stack and copy the memory
1622 arguments to the outgoing arguments address. We can pass TRUE
1623 as the 4th argument because we just saved the stack pointer
1624 and will restore it right after the call. */
1625 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1626
1627 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1628 may have already set current_function_calls_alloca to true.
1629 current_function_calls_alloca won't be set if argsize is zero,
1630 so we have to guarantee need_drap is true here. */
1631 if (SUPPORTS_STACK_ALIGNMENT)
1632 crtl->need_drap = true;
1633
1634 dest = virtual_outgoing_args_rtx;
1635 if (!STACK_GROWS_DOWNWARD)
1636 {
1637 if (CONST_INT_P (argsize))
1638 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1639 else
1640 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1641 }
1642 dest = gen_rtx_MEM (BLKmode, dest);
1643 set_mem_align (dest, PARM_BOUNDARY);
1644 src = gen_rtx_MEM (BLKmode, incoming_args);
1645 set_mem_align (src, PARM_BOUNDARY);
1646 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1647
1648 /* Refer to the argument block. */
1649 apply_args_size ();
1650 arguments = gen_rtx_MEM (BLKmode, arguments);
1651 set_mem_align (arguments, PARM_BOUNDARY);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (struct_value)
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Restore each of the registers previously saved. Make USE insns
1659 for each of these registers for use in making the call. */
1660 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1661 if ((mode = apply_args_mode[regno]) != VOIDmode)
1662 {
1663 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1664 if (size % align != 0)
1665 size = CEIL (size, align) * align;
1666 reg = gen_rtx_REG (mode, regno);
1667 emit_move_insn (reg, adjust_address (arguments, mode, size));
1668 use_reg (&call_fusage, reg);
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Restore the structure value address unless this is passed as an
1673 "invisible" first argument. */
1674 size = GET_MODE_SIZE (Pmode);
1675 if (struct_value)
1676 {
1677 rtx value = gen_reg_rtx (Pmode);
1678 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1679 emit_move_insn (struct_value, value);
1680 if (REG_P (struct_value))
1681 use_reg (&call_fusage, struct_value);
1682 size += GET_MODE_SIZE (Pmode);
1683 }
1684
1685 /* All arguments and registers used for the call are set up by now! */
1686 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1687
1688 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1689 and we don't want to load it into a register as an optimization,
1690 because prepare_call_address already did it if it should be done. */
1691 if (GET_CODE (function) != SYMBOL_REF)
1692 function = memory_address (FUNCTION_MODE, function);
1693
1694 /* Generate the actual call instruction and save the return value. */
1695 #ifdef HAVE_untyped_call
1696 if (HAVE_untyped_call)
1697 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1698 result, result_vector (1, result)));
1699 else
1700 #endif
1701 #ifdef HAVE_call_value
1702 if (HAVE_call_value)
1703 {
1704 rtx valreg = 0;
1705
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1712 {
1713 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1714
1715 valreg = gen_rtx_REG (mode, regno);
1716 }
1717
1718 emit_call_insn (GEN_CALL_VALUE (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
1721
1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1723 }
1724 else
1725 #endif
1726 gcc_unreachable ();
1727
1728 /* Find the CALL insn we just emitted, and attach the register usage
1729 information. */
1730 call_insn = last_call_insn ();
1731 add_function_usage_to (call_insn, call_fusage);
1732
1733 /* Restore the stack. */
1734 #ifdef HAVE_save_stack_nonlocal
1735 if (HAVE_save_stack_nonlocal)
1736 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1737 else
1738 #endif
1739 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1740 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1741
1742 OK_DEFER_POP;
1743
1744 /* Return the address of the result block. */
1745 result = copy_addr_to_reg (XEXP (result, 0));
1746 return convert_memory_address (ptr_mode, result);
1747 }
1748
1749 /* Perform an untyped return. */
1750
1751 static void
1752 expand_builtin_return (rtx result)
1753 {
1754 int size, align, regno;
1755 machine_mode mode;
1756 rtx reg;
1757 rtx_insn *call_fusage = 0;
1758
1759 result = convert_memory_address (Pmode, result);
1760
1761 apply_result_size ();
1762 result = gen_rtx_MEM (BLKmode, result);
1763
1764 #ifdef HAVE_untyped_return
1765 if (HAVE_untyped_return)
1766 {
1767 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1768 emit_barrier ();
1769 return;
1770 }
1771 #endif
1772
1773 /* Restore the return value and note that each value is used. */
1774 size = 0;
1775 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1776 if ((mode = apply_result_mode[regno]) != VOIDmode)
1777 {
1778 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1779 if (size % align != 0)
1780 size = CEIL (size, align) * align;
1781 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1782 emit_move_insn (reg, adjust_address (result, mode, size));
1783
1784 push_to_sequence (call_fusage);
1785 emit_use (reg);
1786 call_fusage = get_insns ();
1787 end_sequence ();
1788 size += GET_MODE_SIZE (mode);
1789 }
1790
1791 /* Put the USE insns before the return. */
1792 emit_insn (call_fusage);
1793
1794 /* Return whatever values was restored by jumping directly to the end
1795 of the function. */
1796 expand_naked_return ();
1797 }
1798
1799 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1800
1801 static enum type_class
1802 type_to_class (tree type)
1803 {
1804 switch (TREE_CODE (type))
1805 {
1806 case VOID_TYPE: return void_type_class;
1807 case INTEGER_TYPE: return integer_type_class;
1808 case ENUMERAL_TYPE: return enumeral_type_class;
1809 case BOOLEAN_TYPE: return boolean_type_class;
1810 case POINTER_TYPE: return pointer_type_class;
1811 case REFERENCE_TYPE: return reference_type_class;
1812 case OFFSET_TYPE: return offset_type_class;
1813 case REAL_TYPE: return real_type_class;
1814 case COMPLEX_TYPE: return complex_type_class;
1815 case FUNCTION_TYPE: return function_type_class;
1816 case METHOD_TYPE: return method_type_class;
1817 case RECORD_TYPE: return record_type_class;
1818 case UNION_TYPE:
1819 case QUAL_UNION_TYPE: return union_type_class;
1820 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1821 ? string_type_class : array_type_class);
1822 case LANG_TYPE: return lang_type_class;
1823 default: return no_type_class;
1824 }
1825 }
1826
1827 /* Expand a call EXP to __builtin_classify_type. */
1828
1829 static rtx
1830 expand_builtin_classify_type (tree exp)
1831 {
1832 if (call_expr_nargs (exp))
1833 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1834 return GEN_INT (no_type_class);
1835 }
1836
1837 /* This helper macro, meant to be used in mathfn_built_in below,
1838 determines which among a set of three builtin math functions is
1839 appropriate for a given type mode. The `F' and `L' cases are
1840 automatically generated from the `double' case. */
1841 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1842 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1843 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1844 fcodel = BUILT_IN_MATHFN##L ; break;
1845 /* Similar to above, but appends _R after any F/L suffix. */
1846 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1847 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1848 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1849 fcodel = BUILT_IN_MATHFN##L_R ; break;
1850
1851 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1852 if available. If IMPLICIT is true use the implicit builtin declaration,
1853 otherwise use the explicit declaration. If we can't do the conversion,
1854 return zero. */
1855
1856 static tree
1857 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1858 {
1859 enum built_in_function fcode, fcodef, fcodel, fcode2;
1860
1861 switch (fn)
1862 {
1863 CASE_MATHFN (BUILT_IN_ACOS)
1864 CASE_MATHFN (BUILT_IN_ACOSH)
1865 CASE_MATHFN (BUILT_IN_ASIN)
1866 CASE_MATHFN (BUILT_IN_ASINH)
1867 CASE_MATHFN (BUILT_IN_ATAN)
1868 CASE_MATHFN (BUILT_IN_ATAN2)
1869 CASE_MATHFN (BUILT_IN_ATANH)
1870 CASE_MATHFN (BUILT_IN_CBRT)
1871 CASE_MATHFN (BUILT_IN_CEIL)
1872 CASE_MATHFN (BUILT_IN_CEXPI)
1873 CASE_MATHFN (BUILT_IN_COPYSIGN)
1874 CASE_MATHFN (BUILT_IN_COS)
1875 CASE_MATHFN (BUILT_IN_COSH)
1876 CASE_MATHFN (BUILT_IN_DREM)
1877 CASE_MATHFN (BUILT_IN_ERF)
1878 CASE_MATHFN (BUILT_IN_ERFC)
1879 CASE_MATHFN (BUILT_IN_EXP)
1880 CASE_MATHFN (BUILT_IN_EXP10)
1881 CASE_MATHFN (BUILT_IN_EXP2)
1882 CASE_MATHFN (BUILT_IN_EXPM1)
1883 CASE_MATHFN (BUILT_IN_FABS)
1884 CASE_MATHFN (BUILT_IN_FDIM)
1885 CASE_MATHFN (BUILT_IN_FLOOR)
1886 CASE_MATHFN (BUILT_IN_FMA)
1887 CASE_MATHFN (BUILT_IN_FMAX)
1888 CASE_MATHFN (BUILT_IN_FMIN)
1889 CASE_MATHFN (BUILT_IN_FMOD)
1890 CASE_MATHFN (BUILT_IN_FREXP)
1891 CASE_MATHFN (BUILT_IN_GAMMA)
1892 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1893 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1894 CASE_MATHFN (BUILT_IN_HYPOT)
1895 CASE_MATHFN (BUILT_IN_ILOGB)
1896 CASE_MATHFN (BUILT_IN_ICEIL)
1897 CASE_MATHFN (BUILT_IN_IFLOOR)
1898 CASE_MATHFN (BUILT_IN_INF)
1899 CASE_MATHFN (BUILT_IN_IRINT)
1900 CASE_MATHFN (BUILT_IN_IROUND)
1901 CASE_MATHFN (BUILT_IN_ISINF)
1902 CASE_MATHFN (BUILT_IN_J0)
1903 CASE_MATHFN (BUILT_IN_J1)
1904 CASE_MATHFN (BUILT_IN_JN)
1905 CASE_MATHFN (BUILT_IN_LCEIL)
1906 CASE_MATHFN (BUILT_IN_LDEXP)
1907 CASE_MATHFN (BUILT_IN_LFLOOR)
1908 CASE_MATHFN (BUILT_IN_LGAMMA)
1909 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1910 CASE_MATHFN (BUILT_IN_LLCEIL)
1911 CASE_MATHFN (BUILT_IN_LLFLOOR)
1912 CASE_MATHFN (BUILT_IN_LLRINT)
1913 CASE_MATHFN (BUILT_IN_LLROUND)
1914 CASE_MATHFN (BUILT_IN_LOG)
1915 CASE_MATHFN (BUILT_IN_LOG10)
1916 CASE_MATHFN (BUILT_IN_LOG1P)
1917 CASE_MATHFN (BUILT_IN_LOG2)
1918 CASE_MATHFN (BUILT_IN_LOGB)
1919 CASE_MATHFN (BUILT_IN_LRINT)
1920 CASE_MATHFN (BUILT_IN_LROUND)
1921 CASE_MATHFN (BUILT_IN_MODF)
1922 CASE_MATHFN (BUILT_IN_NAN)
1923 CASE_MATHFN (BUILT_IN_NANS)
1924 CASE_MATHFN (BUILT_IN_NEARBYINT)
1925 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1926 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1927 CASE_MATHFN (BUILT_IN_POW)
1928 CASE_MATHFN (BUILT_IN_POWI)
1929 CASE_MATHFN (BUILT_IN_POW10)
1930 CASE_MATHFN (BUILT_IN_REMAINDER)
1931 CASE_MATHFN (BUILT_IN_REMQUO)
1932 CASE_MATHFN (BUILT_IN_RINT)
1933 CASE_MATHFN (BUILT_IN_ROUND)
1934 CASE_MATHFN (BUILT_IN_SCALB)
1935 CASE_MATHFN (BUILT_IN_SCALBLN)
1936 CASE_MATHFN (BUILT_IN_SCALBN)
1937 CASE_MATHFN (BUILT_IN_SIGNBIT)
1938 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1939 CASE_MATHFN (BUILT_IN_SIN)
1940 CASE_MATHFN (BUILT_IN_SINCOS)
1941 CASE_MATHFN (BUILT_IN_SINH)
1942 CASE_MATHFN (BUILT_IN_SQRT)
1943 CASE_MATHFN (BUILT_IN_TAN)
1944 CASE_MATHFN (BUILT_IN_TANH)
1945 CASE_MATHFN (BUILT_IN_TGAMMA)
1946 CASE_MATHFN (BUILT_IN_TRUNC)
1947 CASE_MATHFN (BUILT_IN_Y0)
1948 CASE_MATHFN (BUILT_IN_Y1)
1949 CASE_MATHFN (BUILT_IN_YN)
1950
1951 default:
1952 return NULL_TREE;
1953 }
1954
1955 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1956 fcode2 = fcode;
1957 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1958 fcode2 = fcodef;
1959 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1960 fcode2 = fcodel;
1961 else
1962 return NULL_TREE;
1963
1964 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1965 return NULL_TREE;
1966
1967 return builtin_decl_explicit (fcode2);
1968 }
1969
1970 /* Like mathfn_built_in_1(), but always use the implicit array. */
1971
1972 tree
1973 mathfn_built_in (tree type, enum built_in_function fn)
1974 {
1975 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1976 }
1977
1978 /* If errno must be maintained, expand the RTL to check if the result,
1979 TARGET, of a built-in function call, EXP, is NaN, and if so set
1980 errno to EDOM. */
1981
1982 static void
1983 expand_errno_check (tree exp, rtx target)
1984 {
1985 rtx_code_label *lab = gen_label_rtx ();
1986
1987 /* Test the result; if it is NaN, set errno=EDOM because
1988 the argument was not in the domain. */
1989 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1990 NULL_RTX, NULL, lab,
1991 /* The jump is very likely. */
1992 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1993
1994 #ifdef TARGET_EDOM
1995 /* If this built-in doesn't throw an exception, set errno directly. */
1996 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1997 {
1998 #ifdef GEN_ERRNO_RTX
1999 rtx errno_rtx = GEN_ERRNO_RTX;
2000 #else
2001 rtx errno_rtx
2002 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2003 #endif
2004 emit_move_insn (errno_rtx,
2005 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2006 emit_label (lab);
2007 return;
2008 }
2009 #endif
2010
2011 /* Make sure the library call isn't expanded as a tail call. */
2012 CALL_EXPR_TAILCALL (exp) = 0;
2013
2014 /* We can't set errno=EDOM directly; let the library call do it.
2015 Pop the arguments right away in case the call gets deleted. */
2016 NO_DEFER_POP;
2017 expand_call (exp, target, 0);
2018 OK_DEFER_POP;
2019 emit_label (lab);
2020 }
2021
2022 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2023 Return NULL_RTX if a normal call should be emitted rather than expanding
2024 the function in-line. EXP is the expression that is a call to the builtin
2025 function; if convenient, the result should be placed in TARGET.
2026 SUBTARGET may be used as the target for computing one of EXP's operands. */
2027
2028 static rtx
2029 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2030 {
2031 optab builtin_optab;
2032 rtx op0;
2033 rtx_insn *insns;
2034 tree fndecl = get_callee_fndecl (exp);
2035 machine_mode mode;
2036 bool errno_set = false;
2037 bool try_widening = false;
2038 tree arg;
2039
2040 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2041 return NULL_RTX;
2042
2043 arg = CALL_EXPR_ARG (exp, 0);
2044
2045 switch (DECL_FUNCTION_CODE (fndecl))
2046 {
2047 CASE_FLT_FN (BUILT_IN_SQRT):
2048 errno_set = ! tree_expr_nonnegative_p (arg);
2049 try_widening = true;
2050 builtin_optab = sqrt_optab;
2051 break;
2052 CASE_FLT_FN (BUILT_IN_EXP):
2053 errno_set = true; builtin_optab = exp_optab; break;
2054 CASE_FLT_FN (BUILT_IN_EXP10):
2055 CASE_FLT_FN (BUILT_IN_POW10):
2056 errno_set = true; builtin_optab = exp10_optab; break;
2057 CASE_FLT_FN (BUILT_IN_EXP2):
2058 errno_set = true; builtin_optab = exp2_optab; break;
2059 CASE_FLT_FN (BUILT_IN_EXPM1):
2060 errno_set = true; builtin_optab = expm1_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOGB):
2062 errno_set = true; builtin_optab = logb_optab; break;
2063 CASE_FLT_FN (BUILT_IN_LOG):
2064 errno_set = true; builtin_optab = log_optab; break;
2065 CASE_FLT_FN (BUILT_IN_LOG10):
2066 errno_set = true; builtin_optab = log10_optab; break;
2067 CASE_FLT_FN (BUILT_IN_LOG2):
2068 errno_set = true; builtin_optab = log2_optab; break;
2069 CASE_FLT_FN (BUILT_IN_LOG1P):
2070 errno_set = true; builtin_optab = log1p_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ASIN):
2072 builtin_optab = asin_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ACOS):
2074 builtin_optab = acos_optab; break;
2075 CASE_FLT_FN (BUILT_IN_TAN):
2076 builtin_optab = tan_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ATAN):
2078 builtin_optab = atan_optab; break;
2079 CASE_FLT_FN (BUILT_IN_FLOOR):
2080 builtin_optab = floor_optab; break;
2081 CASE_FLT_FN (BUILT_IN_CEIL):
2082 builtin_optab = ceil_optab; break;
2083 CASE_FLT_FN (BUILT_IN_TRUNC):
2084 builtin_optab = btrunc_optab; break;
2085 CASE_FLT_FN (BUILT_IN_ROUND):
2086 builtin_optab = round_optab; break;
2087 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2088 builtin_optab = nearbyint_optab;
2089 if (flag_trapping_math)
2090 break;
2091 /* Else fallthrough and expand as rint. */
2092 CASE_FLT_FN (BUILT_IN_RINT):
2093 builtin_optab = rint_optab; break;
2094 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2095 builtin_optab = significand_optab; break;
2096 default:
2097 gcc_unreachable ();
2098 }
2099
2100 /* Make a suitable register to place result in. */
2101 mode = TYPE_MODE (TREE_TYPE (exp));
2102
2103 if (! flag_errno_math || ! HONOR_NANS (mode))
2104 errno_set = false;
2105
2106 /* Before working hard, check whether the instruction is available, but try
2107 to widen the mode for specific operations. */
2108 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2109 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2110 && (!errno_set || !optimize_insn_for_size_p ()))
2111 {
2112 rtx result = gen_reg_rtx (mode);
2113
2114 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2115 need to expand the argument again. This way, we will not perform
2116 side-effects more the once. */
2117 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2118
2119 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2120
2121 start_sequence ();
2122
2123 /* Compute into RESULT.
2124 Set RESULT to wherever the result comes back. */
2125 result = expand_unop (mode, builtin_optab, op0, result, 0);
2126
2127 if (result != 0)
2128 {
2129 if (errno_set)
2130 expand_errno_check (exp, result);
2131
2132 /* Output the entire sequence. */
2133 insns = get_insns ();
2134 end_sequence ();
2135 emit_insn (insns);
2136 return result;
2137 }
2138
2139 /* If we were unable to expand via the builtin, stop the sequence
2140 (without outputting the insns) and call to the library function
2141 with the stabilized argument list. */
2142 end_sequence ();
2143 }
2144
2145 return expand_call (exp, target, target == const0_rtx);
2146 }
2147
2148 /* Expand a call to the builtin binary math functions (pow and atan2).
2149 Return NULL_RTX if a normal call should be emitted rather than expanding the
2150 function in-line. EXP is the expression that is a call to the builtin
2151 function; if convenient, the result should be placed in TARGET.
2152 SUBTARGET may be used as the target for computing one of EXP's
2153 operands. */
2154
2155 static rtx
2156 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2157 {
2158 optab builtin_optab;
2159 rtx op0, op1, result;
2160 rtx_insn *insns;
2161 int op1_type = REAL_TYPE;
2162 tree fndecl = get_callee_fndecl (exp);
2163 tree arg0, arg1;
2164 machine_mode mode;
2165 bool errno_set = true;
2166
2167 switch (DECL_FUNCTION_CODE (fndecl))
2168 {
2169 CASE_FLT_FN (BUILT_IN_SCALBN):
2170 CASE_FLT_FN (BUILT_IN_SCALBLN):
2171 CASE_FLT_FN (BUILT_IN_LDEXP):
2172 op1_type = INTEGER_TYPE;
2173 default:
2174 break;
2175 }
2176
2177 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2178 return NULL_RTX;
2179
2180 arg0 = CALL_EXPR_ARG (exp, 0);
2181 arg1 = CALL_EXPR_ARG (exp, 1);
2182
2183 switch (DECL_FUNCTION_CODE (fndecl))
2184 {
2185 CASE_FLT_FN (BUILT_IN_POW):
2186 builtin_optab = pow_optab; break;
2187 CASE_FLT_FN (BUILT_IN_ATAN2):
2188 builtin_optab = atan2_optab; break;
2189 CASE_FLT_FN (BUILT_IN_SCALB):
2190 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2191 return 0;
2192 builtin_optab = scalb_optab; break;
2193 CASE_FLT_FN (BUILT_IN_SCALBN):
2194 CASE_FLT_FN (BUILT_IN_SCALBLN):
2195 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2196 return 0;
2197 /* Fall through... */
2198 CASE_FLT_FN (BUILT_IN_LDEXP):
2199 builtin_optab = ldexp_optab; break;
2200 CASE_FLT_FN (BUILT_IN_FMOD):
2201 builtin_optab = fmod_optab; break;
2202 CASE_FLT_FN (BUILT_IN_REMAINDER):
2203 CASE_FLT_FN (BUILT_IN_DREM):
2204 builtin_optab = remainder_optab; break;
2205 default:
2206 gcc_unreachable ();
2207 }
2208
2209 /* Make a suitable register to place result in. */
2210 mode = TYPE_MODE (TREE_TYPE (exp));
2211
2212 /* Before working hard, check whether the instruction is available. */
2213 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2214 return NULL_RTX;
2215
2216 result = gen_reg_rtx (mode);
2217
2218 if (! flag_errno_math || ! HONOR_NANS (mode))
2219 errno_set = false;
2220
2221 if (errno_set && optimize_insn_for_size_p ())
2222 return 0;
2223
2224 /* Always stabilize the argument list. */
2225 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2226 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2227
2228 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2229 op1 = expand_normal (arg1);
2230
2231 start_sequence ();
2232
2233 /* Compute into RESULT.
2234 Set RESULT to wherever the result comes back. */
2235 result = expand_binop (mode, builtin_optab, op0, op1,
2236 result, 0, OPTAB_DIRECT);
2237
2238 /* If we were unable to expand via the builtin, stop the sequence
2239 (without outputting the insns) and call to the library function
2240 with the stabilized argument list. */
2241 if (result == 0)
2242 {
2243 end_sequence ();
2244 return expand_call (exp, target, target == const0_rtx);
2245 }
2246
2247 if (errno_set)
2248 expand_errno_check (exp, result);
2249
2250 /* Output the entire sequence. */
2251 insns = get_insns ();
2252 end_sequence ();
2253 emit_insn (insns);
2254
2255 return result;
2256 }
2257
2258 /* Expand a call to the builtin trinary math functions (fma).
2259 Return NULL_RTX if a normal call should be emitted rather than expanding the
2260 function in-line. EXP is the expression that is a call to the builtin
2261 function; if convenient, the result should be placed in TARGET.
2262 SUBTARGET may be used as the target for computing one of EXP's
2263 operands. */
2264
2265 static rtx
2266 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2267 {
2268 optab builtin_optab;
2269 rtx op0, op1, op2, result;
2270 rtx_insn *insns;
2271 tree fndecl = get_callee_fndecl (exp);
2272 tree arg0, arg1, arg2;
2273 machine_mode mode;
2274
2275 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2276 return NULL_RTX;
2277
2278 arg0 = CALL_EXPR_ARG (exp, 0);
2279 arg1 = CALL_EXPR_ARG (exp, 1);
2280 arg2 = CALL_EXPR_ARG (exp, 2);
2281
2282 switch (DECL_FUNCTION_CODE (fndecl))
2283 {
2284 CASE_FLT_FN (BUILT_IN_FMA):
2285 builtin_optab = fma_optab; break;
2286 default:
2287 gcc_unreachable ();
2288 }
2289
2290 /* Make a suitable register to place result in. */
2291 mode = TYPE_MODE (TREE_TYPE (exp));
2292
2293 /* Before working hard, check whether the instruction is available. */
2294 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2295 return NULL_RTX;
2296
2297 result = gen_reg_rtx (mode);
2298
2299 /* Always stabilize the argument list. */
2300 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2301 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2302 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2303
2304 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2305 op1 = expand_normal (arg1);
2306 op2 = expand_normal (arg2);
2307
2308 start_sequence ();
2309
2310 /* Compute into RESULT.
2311 Set RESULT to wherever the result comes back. */
2312 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2313 result, 0);
2314
2315 /* If we were unable to expand via the builtin, stop the sequence
2316 (without outputting the insns) and call to the library function
2317 with the stabilized argument list. */
2318 if (result == 0)
2319 {
2320 end_sequence ();
2321 return expand_call (exp, target, target == const0_rtx);
2322 }
2323
2324 /* Output the entire sequence. */
2325 insns = get_insns ();
2326 end_sequence ();
2327 emit_insn (insns);
2328
2329 return result;
2330 }
2331
2332 /* Expand a call to the builtin sin and cos math functions.
2333 Return NULL_RTX if a normal call should be emitted rather than expanding the
2334 function in-line. EXP is the expression that is a call to the builtin
2335 function; if convenient, the result should be placed in TARGET.
2336 SUBTARGET may be used as the target for computing one of EXP's
2337 operands. */
2338
2339 static rtx
2340 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2341 {
2342 optab builtin_optab;
2343 rtx op0;
2344 rtx_insn *insns;
2345 tree fndecl = get_callee_fndecl (exp);
2346 machine_mode mode;
2347 tree arg;
2348
2349 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2350 return NULL_RTX;
2351
2352 arg = CALL_EXPR_ARG (exp, 0);
2353
2354 switch (DECL_FUNCTION_CODE (fndecl))
2355 {
2356 CASE_FLT_FN (BUILT_IN_SIN):
2357 CASE_FLT_FN (BUILT_IN_COS):
2358 builtin_optab = sincos_optab; break;
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 /* Make a suitable register to place result in. */
2364 mode = TYPE_MODE (TREE_TYPE (exp));
2365
2366 /* Check if sincos insn is available, otherwise fallback
2367 to sin or cos insn. */
2368 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2369 switch (DECL_FUNCTION_CODE (fndecl))
2370 {
2371 CASE_FLT_FN (BUILT_IN_SIN):
2372 builtin_optab = sin_optab; break;
2373 CASE_FLT_FN (BUILT_IN_COS):
2374 builtin_optab = cos_optab; break;
2375 default:
2376 gcc_unreachable ();
2377 }
2378
2379 /* Before working hard, check whether the instruction is available. */
2380 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2381 {
2382 rtx result = gen_reg_rtx (mode);
2383
2384 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2385 need to expand the argument again. This way, we will not perform
2386 side-effects more the once. */
2387 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2388
2389 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2390
2391 start_sequence ();
2392
2393 /* Compute into RESULT.
2394 Set RESULT to wherever the result comes back. */
2395 if (builtin_optab == sincos_optab)
2396 {
2397 int ok;
2398
2399 switch (DECL_FUNCTION_CODE (fndecl))
2400 {
2401 CASE_FLT_FN (BUILT_IN_SIN):
2402 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2403 break;
2404 CASE_FLT_FN (BUILT_IN_COS):
2405 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2406 break;
2407 default:
2408 gcc_unreachable ();
2409 }
2410 gcc_assert (ok);
2411 }
2412 else
2413 result = expand_unop (mode, builtin_optab, op0, result, 0);
2414
2415 if (result != 0)
2416 {
2417 /* Output the entire sequence. */
2418 insns = get_insns ();
2419 end_sequence ();
2420 emit_insn (insns);
2421 return result;
2422 }
2423
2424 /* If we were unable to expand via the builtin, stop the sequence
2425 (without outputting the insns) and call to the library function
2426 with the stabilized argument list. */
2427 end_sequence ();
2428 }
2429
2430 return expand_call (exp, target, target == const0_rtx);
2431 }
2432
2433 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2434 return an RTL instruction code that implements the functionality.
2435 If that isn't possible or available return CODE_FOR_nothing. */
2436
2437 static enum insn_code
2438 interclass_mathfn_icode (tree arg, tree fndecl)
2439 {
2440 bool errno_set = false;
2441 optab builtin_optab = unknown_optab;
2442 machine_mode mode;
2443
2444 switch (DECL_FUNCTION_CODE (fndecl))
2445 {
2446 CASE_FLT_FN (BUILT_IN_ILOGB):
2447 errno_set = true; builtin_optab = ilogb_optab; break;
2448 CASE_FLT_FN (BUILT_IN_ISINF):
2449 builtin_optab = isinf_optab; break;
2450 case BUILT_IN_ISNORMAL:
2451 case BUILT_IN_ISFINITE:
2452 CASE_FLT_FN (BUILT_IN_FINITE):
2453 case BUILT_IN_FINITED32:
2454 case BUILT_IN_FINITED64:
2455 case BUILT_IN_FINITED128:
2456 case BUILT_IN_ISINFD32:
2457 case BUILT_IN_ISINFD64:
2458 case BUILT_IN_ISINFD128:
2459 /* These builtins have no optabs (yet). */
2460 break;
2461 default:
2462 gcc_unreachable ();
2463 }
2464
2465 /* There's no easy way to detect the case we need to set EDOM. */
2466 if (flag_errno_math && errno_set)
2467 return CODE_FOR_nothing;
2468
2469 /* Optab mode depends on the mode of the input argument. */
2470 mode = TYPE_MODE (TREE_TYPE (arg));
2471
2472 if (builtin_optab)
2473 return optab_handler (builtin_optab, mode);
2474 return CODE_FOR_nothing;
2475 }
2476
2477 /* Expand a call to one of the builtin math functions that operate on
2478 floating point argument and output an integer result (ilogb, isinf,
2479 isnan, etc).
2480 Return 0 if a normal call should be emitted rather than expanding the
2481 function in-line. EXP is the expression that is a call to the builtin
2482 function; if convenient, the result should be placed in TARGET. */
2483
2484 static rtx
2485 expand_builtin_interclass_mathfn (tree exp, rtx target)
2486 {
2487 enum insn_code icode = CODE_FOR_nothing;
2488 rtx op0;
2489 tree fndecl = get_callee_fndecl (exp);
2490 machine_mode mode;
2491 tree arg;
2492
2493 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2494 return NULL_RTX;
2495
2496 arg = CALL_EXPR_ARG (exp, 0);
2497 icode = interclass_mathfn_icode (arg, fndecl);
2498 mode = TYPE_MODE (TREE_TYPE (arg));
2499
2500 if (icode != CODE_FOR_nothing)
2501 {
2502 struct expand_operand ops[1];
2503 rtx_insn *last = get_last_insn ();
2504 tree orig_arg = arg;
2505
2506 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2507 need to expand the argument again. This way, we will not perform
2508 side-effects more the once. */
2509 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2510
2511 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2512
2513 if (mode != GET_MODE (op0))
2514 op0 = convert_to_mode (mode, op0, 0);
2515
2516 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2517 if (maybe_legitimize_operands (icode, 0, 1, ops)
2518 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2519 return ops[0].value;
2520
2521 delete_insns_since (last);
2522 CALL_EXPR_ARG (exp, 0) = orig_arg;
2523 }
2524
2525 return NULL_RTX;
2526 }
2527
2528 /* Expand a call to the builtin sincos math function.
2529 Return NULL_RTX if a normal call should be emitted rather than expanding the
2530 function in-line. EXP is the expression that is a call to the builtin
2531 function. */
2532
2533 static rtx
2534 expand_builtin_sincos (tree exp)
2535 {
2536 rtx op0, op1, op2, target1, target2;
2537 machine_mode mode;
2538 tree arg, sinp, cosp;
2539 int result;
2540 location_t loc = EXPR_LOCATION (exp);
2541 tree alias_type, alias_off;
2542
2543 if (!validate_arglist (exp, REAL_TYPE,
2544 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2545 return NULL_RTX;
2546
2547 arg = CALL_EXPR_ARG (exp, 0);
2548 sinp = CALL_EXPR_ARG (exp, 1);
2549 cosp = CALL_EXPR_ARG (exp, 2);
2550
2551 /* Make a suitable register to place result in. */
2552 mode = TYPE_MODE (TREE_TYPE (arg));
2553
2554 /* Check if sincos insn is available, otherwise emit the call. */
2555 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2556 return NULL_RTX;
2557
2558 target1 = gen_reg_rtx (mode);
2559 target2 = gen_reg_rtx (mode);
2560
2561 op0 = expand_normal (arg);
2562 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2563 alias_off = build_int_cst (alias_type, 0);
2564 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2565 sinp, alias_off));
2566 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2567 cosp, alias_off));
2568
2569 /* Compute into target1 and target2.
2570 Set TARGET to wherever the result comes back. */
2571 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2572 gcc_assert (result);
2573
2574 /* Move target1 and target2 to the memory locations indicated
2575 by op1 and op2. */
2576 emit_move_insn (op1, target1);
2577 emit_move_insn (op2, target2);
2578
2579 return const0_rtx;
2580 }
2581
2582 /* Expand a call to the internal cexpi builtin to the sincos math function.
2583 EXP is the expression that is a call to the builtin function; if convenient,
2584 the result should be placed in TARGET. */
2585
2586 static rtx
2587 expand_builtin_cexpi (tree exp, rtx target)
2588 {
2589 tree fndecl = get_callee_fndecl (exp);
2590 tree arg, type;
2591 machine_mode mode;
2592 rtx op0, op1, op2;
2593 location_t loc = EXPR_LOCATION (exp);
2594
2595 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2596 return NULL_RTX;
2597
2598 arg = CALL_EXPR_ARG (exp, 0);
2599 type = TREE_TYPE (arg);
2600 mode = TYPE_MODE (TREE_TYPE (arg));
2601
2602 /* Try expanding via a sincos optab, fall back to emitting a libcall
2603 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2604 is only generated from sincos, cexp or if we have either of them. */
2605 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2606 {
2607 op1 = gen_reg_rtx (mode);
2608 op2 = gen_reg_rtx (mode);
2609
2610 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2611
2612 /* Compute into op1 and op2. */
2613 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2614 }
2615 else if (targetm.libc_has_function (function_sincos))
2616 {
2617 tree call, fn = NULL_TREE;
2618 tree top1, top2;
2619 rtx op1a, op2a;
2620
2621 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2624 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2626 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2627 else
2628 gcc_unreachable ();
2629
2630 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2631 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2632 op1a = copy_addr_to_reg (XEXP (op1, 0));
2633 op2a = copy_addr_to_reg (XEXP (op2, 0));
2634 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2635 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2636
2637 /* Make sure not to fold the sincos call again. */
2638 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2639 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2640 call, 3, arg, top1, top2));
2641 }
2642 else
2643 {
2644 tree call, fn = NULL_TREE, narg;
2645 tree ctype = build_complex_type (type);
2646
2647 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2649 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2650 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2651 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2652 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2653 else
2654 gcc_unreachable ();
2655
2656 /* If we don't have a decl for cexp create one. This is the
2657 friendliest fallback if the user calls __builtin_cexpi
2658 without full target C99 function support. */
2659 if (fn == NULL_TREE)
2660 {
2661 tree fntype;
2662 const char *name = NULL;
2663
2664 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2665 name = "cexpf";
2666 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2667 name = "cexp";
2668 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2669 name = "cexpl";
2670
2671 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2672 fn = build_fn_decl (name, fntype);
2673 }
2674
2675 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2676 build_real (type, dconst0), arg);
2677
2678 /* Make sure not to fold the cexp call again. */
2679 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2680 return expand_expr (build_call_nary (ctype, call, 1, narg),
2681 target, VOIDmode, EXPAND_NORMAL);
2682 }
2683
2684 /* Now build the proper return type. */
2685 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2686 make_tree (TREE_TYPE (arg), op2),
2687 make_tree (TREE_TYPE (arg), op1)),
2688 target, VOIDmode, EXPAND_NORMAL);
2689 }
2690
2691 /* Conveniently construct a function call expression. FNDECL names the
2692 function to be called, N is the number of arguments, and the "..."
2693 parameters are the argument expressions. Unlike build_call_exr
2694 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2695
2696 static tree
2697 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2698 {
2699 va_list ap;
2700 tree fntype = TREE_TYPE (fndecl);
2701 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2702
2703 va_start (ap, n);
2704 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2705 va_end (ap);
2706 SET_EXPR_LOCATION (fn, loc);
2707 return fn;
2708 }
2709
2710 /* Expand a call to one of the builtin rounding functions gcc defines
2711 as an extension (lfloor and lceil). As these are gcc extensions we
2712 do not need to worry about setting errno to EDOM.
2713 If expanding via optab fails, lower expression to (int)(floor(x)).
2714 EXP is the expression that is a call to the builtin function;
2715 if convenient, the result should be placed in TARGET. */
2716
2717 static rtx
2718 expand_builtin_int_roundingfn (tree exp, rtx target)
2719 {
2720 convert_optab builtin_optab;
2721 rtx op0, tmp;
2722 rtx_insn *insns;
2723 tree fndecl = get_callee_fndecl (exp);
2724 enum built_in_function fallback_fn;
2725 tree fallback_fndecl;
2726 machine_mode mode;
2727 tree arg;
2728
2729 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2730 gcc_unreachable ();
2731
2732 arg = CALL_EXPR_ARG (exp, 0);
2733
2734 switch (DECL_FUNCTION_CODE (fndecl))
2735 {
2736 CASE_FLT_FN (BUILT_IN_ICEIL):
2737 CASE_FLT_FN (BUILT_IN_LCEIL):
2738 CASE_FLT_FN (BUILT_IN_LLCEIL):
2739 builtin_optab = lceil_optab;
2740 fallback_fn = BUILT_IN_CEIL;
2741 break;
2742
2743 CASE_FLT_FN (BUILT_IN_IFLOOR):
2744 CASE_FLT_FN (BUILT_IN_LFLOOR):
2745 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2746 builtin_optab = lfloor_optab;
2747 fallback_fn = BUILT_IN_FLOOR;
2748 break;
2749
2750 default:
2751 gcc_unreachable ();
2752 }
2753
2754 /* Make a suitable register to place result in. */
2755 mode = TYPE_MODE (TREE_TYPE (exp));
2756
2757 target = gen_reg_rtx (mode);
2758
2759 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2760 need to expand the argument again. This way, we will not perform
2761 side-effects more the once. */
2762 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2763
2764 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2765
2766 start_sequence ();
2767
2768 /* Compute into TARGET. */
2769 if (expand_sfix_optab (target, op0, builtin_optab))
2770 {
2771 /* Output the entire sequence. */
2772 insns = get_insns ();
2773 end_sequence ();
2774 emit_insn (insns);
2775 return target;
2776 }
2777
2778 /* If we were unable to expand via the builtin, stop the sequence
2779 (without outputting the insns). */
2780 end_sequence ();
2781
2782 /* Fall back to floating point rounding optab. */
2783 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2784
2785 /* For non-C99 targets we may end up without a fallback fndecl here
2786 if the user called __builtin_lfloor directly. In this case emit
2787 a call to the floor/ceil variants nevertheless. This should result
2788 in the best user experience for not full C99 targets. */
2789 if (fallback_fndecl == NULL_TREE)
2790 {
2791 tree fntype;
2792 const char *name = NULL;
2793
2794 switch (DECL_FUNCTION_CODE (fndecl))
2795 {
2796 case BUILT_IN_ICEIL:
2797 case BUILT_IN_LCEIL:
2798 case BUILT_IN_LLCEIL:
2799 name = "ceil";
2800 break;
2801 case BUILT_IN_ICEILF:
2802 case BUILT_IN_LCEILF:
2803 case BUILT_IN_LLCEILF:
2804 name = "ceilf";
2805 break;
2806 case BUILT_IN_ICEILL:
2807 case BUILT_IN_LCEILL:
2808 case BUILT_IN_LLCEILL:
2809 name = "ceill";
2810 break;
2811 case BUILT_IN_IFLOOR:
2812 case BUILT_IN_LFLOOR:
2813 case BUILT_IN_LLFLOOR:
2814 name = "floor";
2815 break;
2816 case BUILT_IN_IFLOORF:
2817 case BUILT_IN_LFLOORF:
2818 case BUILT_IN_LLFLOORF:
2819 name = "floorf";
2820 break;
2821 case BUILT_IN_IFLOORL:
2822 case BUILT_IN_LFLOORL:
2823 case BUILT_IN_LLFLOORL:
2824 name = "floorl";
2825 break;
2826 default:
2827 gcc_unreachable ();
2828 }
2829
2830 fntype = build_function_type_list (TREE_TYPE (arg),
2831 TREE_TYPE (arg), NULL_TREE);
2832 fallback_fndecl = build_fn_decl (name, fntype);
2833 }
2834
2835 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2836
2837 tmp = expand_normal (exp);
2838 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2839
2840 /* Truncate the result of floating point optab to integer
2841 via expand_fix (). */
2842 target = gen_reg_rtx (mode);
2843 expand_fix (target, tmp, 0);
2844
2845 return target;
2846 }
2847
2848 /* Expand a call to one of the builtin math functions doing integer
2849 conversion (lrint).
2850 Return 0 if a normal call should be emitted rather than expanding the
2851 function in-line. EXP is the expression that is a call to the builtin
2852 function; if convenient, the result should be placed in TARGET. */
2853
2854 static rtx
2855 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2856 {
2857 convert_optab builtin_optab;
2858 rtx op0;
2859 rtx_insn *insns;
2860 tree fndecl = get_callee_fndecl (exp);
2861 tree arg;
2862 machine_mode mode;
2863 enum built_in_function fallback_fn = BUILT_IN_NONE;
2864
2865 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2866 gcc_unreachable ();
2867
2868 arg = CALL_EXPR_ARG (exp, 0);
2869
2870 switch (DECL_FUNCTION_CODE (fndecl))
2871 {
2872 CASE_FLT_FN (BUILT_IN_IRINT):
2873 fallback_fn = BUILT_IN_LRINT;
2874 /* FALLTHRU */
2875 CASE_FLT_FN (BUILT_IN_LRINT):
2876 CASE_FLT_FN (BUILT_IN_LLRINT):
2877 builtin_optab = lrint_optab;
2878 break;
2879
2880 CASE_FLT_FN (BUILT_IN_IROUND):
2881 fallback_fn = BUILT_IN_LROUND;
2882 /* FALLTHRU */
2883 CASE_FLT_FN (BUILT_IN_LROUND):
2884 CASE_FLT_FN (BUILT_IN_LLROUND):
2885 builtin_optab = lround_optab;
2886 break;
2887
2888 default:
2889 gcc_unreachable ();
2890 }
2891
2892 /* There's no easy way to detect the case we need to set EDOM. */
2893 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2894 return NULL_RTX;
2895
2896 /* Make a suitable register to place result in. */
2897 mode = TYPE_MODE (TREE_TYPE (exp));
2898
2899 /* There's no easy way to detect the case we need to set EDOM. */
2900 if (!flag_errno_math)
2901 {
2902 rtx result = gen_reg_rtx (mode);
2903
2904 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2905 need to expand the argument again. This way, we will not perform
2906 side-effects more the once. */
2907 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2908
2909 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2910
2911 start_sequence ();
2912
2913 if (expand_sfix_optab (result, op0, builtin_optab))
2914 {
2915 /* Output the entire sequence. */
2916 insns = get_insns ();
2917 end_sequence ();
2918 emit_insn (insns);
2919 return result;
2920 }
2921
2922 /* If we were unable to expand via the builtin, stop the sequence
2923 (without outputting the insns) and call to the library function
2924 with the stabilized argument list. */
2925 end_sequence ();
2926 }
2927
2928 if (fallback_fn != BUILT_IN_NONE)
2929 {
2930 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2931 targets, (int) round (x) should never be transformed into
2932 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2933 a call to lround in the hope that the target provides at least some
2934 C99 functions. This should result in the best user experience for
2935 not full C99 targets. */
2936 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2937 fallback_fn, 0);
2938
2939 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2940 fallback_fndecl, 1, arg);
2941
2942 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2943 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2944 return convert_to_mode (mode, target, 0);
2945 }
2946
2947 return expand_call (exp, target, target == const0_rtx);
2948 }
2949
2950 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2951 a normal call should be emitted rather than expanding the function
2952 in-line. EXP is the expression that is a call to the builtin
2953 function; if convenient, the result should be placed in TARGET. */
2954
2955 static rtx
2956 expand_builtin_powi (tree exp, rtx target)
2957 {
2958 tree arg0, arg1;
2959 rtx op0, op1;
2960 machine_mode mode;
2961 machine_mode mode2;
2962
2963 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2964 return NULL_RTX;
2965
2966 arg0 = CALL_EXPR_ARG (exp, 0);
2967 arg1 = CALL_EXPR_ARG (exp, 1);
2968 mode = TYPE_MODE (TREE_TYPE (exp));
2969
2970 /* Emit a libcall to libgcc. */
2971
2972 /* Mode of the 2nd argument must match that of an int. */
2973 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2974
2975 if (target == NULL_RTX)
2976 target = gen_reg_rtx (mode);
2977
2978 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2979 if (GET_MODE (op0) != mode)
2980 op0 = convert_to_mode (mode, op0, 0);
2981 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2982 if (GET_MODE (op1) != mode2)
2983 op1 = convert_to_mode (mode2, op1, 0);
2984
2985 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2986 target, LCT_CONST, mode, 2,
2987 op0, mode, op1, mode2);
2988
2989 return target;
2990 }
2991
2992 /* Expand expression EXP which is a call to the strlen builtin. Return
2993 NULL_RTX if we failed the caller should emit a normal call, otherwise
2994 try to get the result in TARGET, if convenient. */
2995
2996 static rtx
2997 expand_builtin_strlen (tree exp, rtx target,
2998 machine_mode target_mode)
2999 {
3000 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3001 return NULL_RTX;
3002 else
3003 {
3004 struct expand_operand ops[4];
3005 rtx pat;
3006 tree len;
3007 tree src = CALL_EXPR_ARG (exp, 0);
3008 rtx src_reg;
3009 rtx_insn *before_strlen;
3010 machine_mode insn_mode = target_mode;
3011 enum insn_code icode = CODE_FOR_nothing;
3012 unsigned int align;
3013
3014 /* If the length can be computed at compile-time, return it. */
3015 len = c_strlen (src, 0);
3016 if (len)
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3018
3019 /* If the length can be computed at compile-time and is constant
3020 integer, but there are side-effects in src, evaluate
3021 src for side-effects, then return len.
3022 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3023 can be optimized into: i++; x = 3; */
3024 len = c_strlen (src, 1);
3025 if (len && TREE_CODE (len) == INTEGER_CST)
3026 {
3027 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3028 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3029 }
3030
3031 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3032
3033 /* If SRC is not a pointer type, don't do this operation inline. */
3034 if (align == 0)
3035 return NULL_RTX;
3036
3037 /* Bail out if we can't compute strlen in the right mode. */
3038 while (insn_mode != VOIDmode)
3039 {
3040 icode = optab_handler (strlen_optab, insn_mode);
3041 if (icode != CODE_FOR_nothing)
3042 break;
3043
3044 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3045 }
3046 if (insn_mode == VOIDmode)
3047 return NULL_RTX;
3048
3049 /* Make a place to hold the source address. We will not expand
3050 the actual source until we are sure that the expansion will
3051 not fail -- there are trees that cannot be expanded twice. */
3052 src_reg = gen_reg_rtx (Pmode);
3053
3054 /* Mark the beginning of the strlen sequence so we can emit the
3055 source operand later. */
3056 before_strlen = get_last_insn ();
3057
3058 create_output_operand (&ops[0], target, insn_mode);
3059 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3060 create_integer_operand (&ops[2], 0);
3061 create_integer_operand (&ops[3], align);
3062 if (!maybe_expand_insn (icode, 4, ops))
3063 return NULL_RTX;
3064
3065 /* Now that we are assured of success, expand the source. */
3066 start_sequence ();
3067 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3068 if (pat != src_reg)
3069 {
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 if (GET_MODE (pat) != Pmode)
3072 pat = convert_to_mode (Pmode, pat,
3073 POINTERS_EXTEND_UNSIGNED);
3074 #endif
3075 emit_move_insn (src_reg, pat);
3076 }
3077 pat = get_insns ();
3078 end_sequence ();
3079
3080 if (before_strlen)
3081 emit_insn_after (pat, before_strlen);
3082 else
3083 emit_insn_before (pat, get_insns ());
3084
3085 /* Return the value in the proper mode for this function. */
3086 if (GET_MODE (ops[0].value) == target_mode)
3087 target = ops[0].value;
3088 else if (target != 0)
3089 convert_move (target, ops[0].value, 0);
3090 else
3091 target = convert_to_mode (target_mode, ops[0].value, 0);
3092
3093 return target;
3094 }
3095 }
3096
3097 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3098 bytes from constant string DATA + OFFSET and return it as target
3099 constant. */
3100
3101 static rtx
3102 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3103 machine_mode mode)
3104 {
3105 const char *str = (const char *) data;
3106
3107 gcc_assert (offset >= 0
3108 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3109 <= strlen (str) + 1));
3110
3111 return c_readstr (str + offset, mode);
3112 }
3113
3114 /* LEN specify length of the block of memcpy/memset operation.
3115 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3116 In some cases we can make very likely guess on max size, then we
3117 set it into PROBABLE_MAX_SIZE. */
3118
3119 static void
3120 determine_block_size (tree len, rtx len_rtx,
3121 unsigned HOST_WIDE_INT *min_size,
3122 unsigned HOST_WIDE_INT *max_size,
3123 unsigned HOST_WIDE_INT *probable_max_size)
3124 {
3125 if (CONST_INT_P (len_rtx))
3126 {
3127 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3128 return;
3129 }
3130 else
3131 {
3132 wide_int min, max;
3133 enum value_range_type range_type = VR_UNDEFINED;
3134
3135 /* Determine bounds from the type. */
3136 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3137 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3138 else
3139 *min_size = 0;
3140 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3141 *probable_max_size = *max_size
3142 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3143 else
3144 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3145
3146 if (TREE_CODE (len) == SSA_NAME)
3147 range_type = get_range_info (len, &min, &max);
3148 if (range_type == VR_RANGE)
3149 {
3150 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3151 *min_size = min.to_uhwi ();
3152 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3153 *probable_max_size = *max_size = max.to_uhwi ();
3154 }
3155 else if (range_type == VR_ANTI_RANGE)
3156 {
3157 /* Anti range 0...N lets us to determine minimal size to N+1. */
3158 if (min == 0)
3159 {
3160 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3161 *min_size = max.to_uhwi () + 1;
3162 }
3163 /* Code like
3164
3165 int n;
3166 if (n < 100)
3167 memcpy (a, b, n)
3168
3169 Produce anti range allowing negative values of N. We still
3170 can use the information and make a guess that N is not negative.
3171 */
3172 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3173 *probable_max_size = min.to_uhwi () - 1;
3174 }
3175 }
3176 gcc_checking_assert (*max_size <=
3177 (unsigned HOST_WIDE_INT)
3178 GET_MODE_MASK (GET_MODE (len_rtx)));
3179 }
3180
3181 /* Helper function to do the actual work for expand_builtin_memcpy. */
3182
3183 static rtx
3184 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3185 {
3186 const char *src_str;
3187 unsigned int src_align = get_pointer_alignment (src);
3188 unsigned int dest_align = get_pointer_alignment (dest);
3189 rtx dest_mem, src_mem, dest_addr, len_rtx;
3190 HOST_WIDE_INT expected_size = -1;
3191 unsigned int expected_align = 0;
3192 unsigned HOST_WIDE_INT min_size;
3193 unsigned HOST_WIDE_INT max_size;
3194 unsigned HOST_WIDE_INT probable_max_size;
3195
3196 /* If DEST is not a pointer type, call the normal function. */
3197 if (dest_align == 0)
3198 return NULL_RTX;
3199
3200 /* If either SRC is not a pointer type, don't do this
3201 operation in-line. */
3202 if (src_align == 0)
3203 return NULL_RTX;
3204
3205 if (currently_expanding_gimple_stmt)
3206 stringop_block_profile (currently_expanding_gimple_stmt,
3207 &expected_align, &expected_size);
3208
3209 if (expected_align < dest_align)
3210 expected_align = dest_align;
3211 dest_mem = get_memory_rtx (dest, len);
3212 set_mem_align (dest_mem, dest_align);
3213 len_rtx = expand_normal (len);
3214 determine_block_size (len, len_rtx, &min_size, &max_size,
3215 &probable_max_size);
3216 src_str = c_getstr (src);
3217
3218 /* If SRC is a string constant and block move would be done
3219 by pieces, we can avoid loading the string from memory
3220 and only stored the computed constants. */
3221 if (src_str
3222 && CONST_INT_P (len_rtx)
3223 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3224 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3225 CONST_CAST (char *, src_str),
3226 dest_align, false))
3227 {
3228 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3229 builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false, 0);
3232 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3233 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3234 return dest_mem;
3235 }
3236
3237 src_mem = get_memory_rtx (src, len);
3238 set_mem_align (src_mem, src_align);
3239
3240 /* Copy word part most expediently. */
3241 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3242 CALL_EXPR_TAILCALL (exp)
3243 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3244 expected_align, expected_size,
3245 min_size, max_size, probable_max_size);
3246
3247 if (dest_addr == 0)
3248 {
3249 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3250 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3251 }
3252
3253 return dest_addr;
3254 }
3255
3256 /* Expand a call EXP to the memcpy builtin.
3257 Return NULL_RTX if we failed, the caller should emit a normal call,
3258 otherwise try to get the result in TARGET, if convenient (and in
3259 mode MODE if that's convenient). */
3260
3261 static rtx
3262 expand_builtin_memcpy (tree exp, rtx target)
3263 {
3264 if (!validate_arglist (exp,
3265 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3266 return NULL_RTX;
3267 else
3268 {
3269 tree dest = CALL_EXPR_ARG (exp, 0);
3270 tree src = CALL_EXPR_ARG (exp, 1);
3271 tree len = CALL_EXPR_ARG (exp, 2);
3272 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3273 }
3274 }
3275
3276 /* Expand an instrumented call EXP to the memcpy builtin.
3277 Return NULL_RTX if we failed, the caller should emit a normal call,
3278 otherwise try to get the result in TARGET, if convenient (and in
3279 mode MODE if that's convenient). */
3280
3281 static rtx
3282 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3283 {
3284 if (!validate_arglist (exp,
3285 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3286 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3287 INTEGER_TYPE, VOID_TYPE))
3288 return NULL_RTX;
3289 else
3290 {
3291 tree dest = CALL_EXPR_ARG (exp, 0);
3292 tree src = CALL_EXPR_ARG (exp, 2);
3293 tree len = CALL_EXPR_ARG (exp, 4);
3294 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3295
3296 /* Return src bounds with the result. */
3297 if (res)
3298 {
3299 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3300 expand_normal (CALL_EXPR_ARG (exp, 1)));
3301 res = chkp_join_splitted_slot (res, bnd);
3302 }
3303 return res;
3304 }
3305 }
3306
3307 /* Expand a call EXP to the mempcpy builtin.
3308 Return NULL_RTX if we failed; the caller should emit a normal call,
3309 otherwise try to get the result in TARGET, if convenient (and in
3310 mode MODE if that's convenient). If ENDP is 0 return the
3311 destination pointer, if ENDP is 1 return the end pointer ala
3312 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3313 stpcpy. */
3314
3315 static rtx
3316 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3317 {
3318 if (!validate_arglist (exp,
3319 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3320 return NULL_RTX;
3321 else
3322 {
3323 tree dest = CALL_EXPR_ARG (exp, 0);
3324 tree src = CALL_EXPR_ARG (exp, 1);
3325 tree len = CALL_EXPR_ARG (exp, 2);
3326 return expand_builtin_mempcpy_args (dest, src, len,
3327 target, mode, /*endp=*/ 1,
3328 exp);
3329 }
3330 }
3331
3332 /* Expand an instrumented call EXP to the mempcpy builtin.
3333 Return NULL_RTX if we failed, the caller should emit a normal call,
3334 otherwise try to get the result in TARGET, if convenient (and in
3335 mode MODE if that's convenient). */
3336
3337 static rtx
3338 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3339 {
3340 if (!validate_arglist (exp,
3341 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3342 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3343 INTEGER_TYPE, VOID_TYPE))
3344 return NULL_RTX;
3345 else
3346 {
3347 tree dest = CALL_EXPR_ARG (exp, 0);
3348 tree src = CALL_EXPR_ARG (exp, 2);
3349 tree len = CALL_EXPR_ARG (exp, 4);
3350 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3351 mode, 1, exp);
3352
3353 /* Return src bounds with the result. */
3354 if (res)
3355 {
3356 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3357 expand_normal (CALL_EXPR_ARG (exp, 1)));
3358 res = chkp_join_splitted_slot (res, bnd);
3359 }
3360 return res;
3361 }
3362 }
3363
3364 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3365 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3366 so that this can also be called without constructing an actual CALL_EXPR.
3367 The other arguments and return value are the same as for
3368 expand_builtin_mempcpy. */
3369
3370 static rtx
3371 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3372 rtx target, machine_mode mode, int endp,
3373 tree orig_exp)
3374 {
3375 tree fndecl = get_callee_fndecl (orig_exp);
3376
3377 /* If return value is ignored, transform mempcpy into memcpy. */
3378 if (target == const0_rtx
3379 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3380 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3381 {
3382 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3383 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3384 dest, src, len);
3385 return expand_expr (result, target, mode, EXPAND_NORMAL);
3386 }
3387 else if (target == const0_rtx
3388 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3389 {
3390 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3391 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3392 dest, src, len);
3393 return expand_expr (result, target, mode, EXPAND_NORMAL);
3394 }
3395 else
3396 {
3397 const char *src_str;
3398 unsigned int src_align = get_pointer_alignment (src);
3399 unsigned int dest_align = get_pointer_alignment (dest);
3400 rtx dest_mem, src_mem, len_rtx;
3401
3402 /* If either SRC or DEST is not a pointer type, don't do this
3403 operation in-line. */
3404 if (dest_align == 0 || src_align == 0)
3405 return NULL_RTX;
3406
3407 /* If LEN is not constant, call the normal function. */
3408 if (! tree_fits_uhwi_p (len))
3409 return NULL_RTX;
3410
3411 len_rtx = expand_normal (len);
3412 src_str = c_getstr (src);
3413
3414 /* If SRC is a string constant and block move would be done
3415 by pieces, we can avoid loading the string from memory
3416 and only stored the computed constants. */
3417 if (src_str
3418 && CONST_INT_P (len_rtx)
3419 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3420 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3421 CONST_CAST (char *, src_str),
3422 dest_align, false))
3423 {
3424 dest_mem = get_memory_rtx (dest, len);
3425 set_mem_align (dest_mem, dest_align);
3426 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3427 builtin_memcpy_read_str,
3428 CONST_CAST (char *, src_str),
3429 dest_align, false, endp);
3430 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3431 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3432 return dest_mem;
3433 }
3434
3435 if (CONST_INT_P (len_rtx)
3436 && can_move_by_pieces (INTVAL (len_rtx),
3437 MIN (dest_align, src_align)))
3438 {
3439 dest_mem = get_memory_rtx (dest, len);
3440 set_mem_align (dest_mem, dest_align);
3441 src_mem = get_memory_rtx (src, len);
3442 set_mem_align (src_mem, src_align);
3443 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3444 MIN (dest_align, src_align), endp);
3445 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3446 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3447 return dest_mem;
3448 }
3449
3450 return NULL_RTX;
3451 }
3452 }
3453
3454 #ifndef HAVE_movstr
3455 # define HAVE_movstr 0
3456 # define CODE_FOR_movstr CODE_FOR_nothing
3457 #endif
3458
3459 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3460 we failed, the caller should emit a normal call, otherwise try to
3461 get the result in TARGET, if convenient. If ENDP is 0 return the
3462 destination pointer, if ENDP is 1 return the end pointer ala
3463 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3464 stpcpy. */
3465
3466 static rtx
3467 expand_movstr (tree dest, tree src, rtx target, int endp)
3468 {
3469 struct expand_operand ops[3];
3470 rtx dest_mem;
3471 rtx src_mem;
3472
3473 if (!HAVE_movstr)
3474 return NULL_RTX;
3475
3476 dest_mem = get_memory_rtx (dest, NULL);
3477 src_mem = get_memory_rtx (src, NULL);
3478 if (!endp)
3479 {
3480 target = force_reg (Pmode, XEXP (dest_mem, 0));
3481 dest_mem = replace_equiv_address (dest_mem, target);
3482 }
3483
3484 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3485 create_fixed_operand (&ops[1], dest_mem);
3486 create_fixed_operand (&ops[2], src_mem);
3487 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3488 return NULL_RTX;
3489
3490 if (endp && target != const0_rtx)
3491 {
3492 target = ops[0].value;
3493 /* movstr is supposed to set end to the address of the NUL
3494 terminator. If the caller requested a mempcpy-like return value,
3495 adjust it. */
3496 if (endp == 1)
3497 {
3498 rtx tem = plus_constant (GET_MODE (target),
3499 gen_lowpart (GET_MODE (target), target), 1);
3500 emit_move_insn (target, force_operand (tem, NULL_RTX));
3501 }
3502 }
3503 return target;
3504 }
3505
3506 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3507 NULL_RTX if we failed the caller should emit a normal call, otherwise
3508 try to get the result in TARGET, if convenient (and in mode MODE if that's
3509 convenient). */
3510
3511 static rtx
3512 expand_builtin_strcpy (tree exp, rtx target)
3513 {
3514 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3515 {
3516 tree dest = CALL_EXPR_ARG (exp, 0);
3517 tree src = CALL_EXPR_ARG (exp, 1);
3518 return expand_builtin_strcpy_args (dest, src, target);
3519 }
3520 return NULL_RTX;
3521 }
3522
3523 /* Helper function to do the actual work for expand_builtin_strcpy. The
3524 arguments to the builtin_strcpy call DEST and SRC are broken out
3525 so that this can also be called without constructing an actual CALL_EXPR.
3526 The other arguments and return value are the same as for
3527 expand_builtin_strcpy. */
3528
3529 static rtx
3530 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3531 {
3532 return expand_movstr (dest, src, target, /*endp=*/0);
3533 }
3534
3535 /* Expand a call EXP to the stpcpy builtin.
3536 Return NULL_RTX if we failed the caller should emit a normal call,
3537 otherwise try to get the result in TARGET, if convenient (and in
3538 mode MODE if that's convenient). */
3539
3540 static rtx
3541 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3542 {
3543 tree dst, src;
3544 location_t loc = EXPR_LOCATION (exp);
3545
3546 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3547 return NULL_RTX;
3548
3549 dst = CALL_EXPR_ARG (exp, 0);
3550 src = CALL_EXPR_ARG (exp, 1);
3551
3552 /* If return value is ignored, transform stpcpy into strcpy. */
3553 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3554 {
3555 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3556 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3557 return expand_expr (result, target, mode, EXPAND_NORMAL);
3558 }
3559 else
3560 {
3561 tree len, lenp1;
3562 rtx ret;
3563
3564 /* Ensure we get an actual string whose length can be evaluated at
3565 compile-time, not an expression containing a string. This is
3566 because the latter will potentially produce pessimized code
3567 when used to produce the return value. */
3568 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3569 return expand_movstr (dst, src, target, /*endp=*/2);
3570
3571 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3572 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3573 target, mode, /*endp=*/2,
3574 exp);
3575
3576 if (ret)
3577 return ret;
3578
3579 if (TREE_CODE (len) == INTEGER_CST)
3580 {
3581 rtx len_rtx = expand_normal (len);
3582
3583 if (CONST_INT_P (len_rtx))
3584 {
3585 ret = expand_builtin_strcpy_args (dst, src, target);
3586
3587 if (ret)
3588 {
3589 if (! target)
3590 {
3591 if (mode != VOIDmode)
3592 target = gen_reg_rtx (mode);
3593 else
3594 target = gen_reg_rtx (GET_MODE (ret));
3595 }
3596 if (GET_MODE (target) != GET_MODE (ret))
3597 ret = gen_lowpart (GET_MODE (target), ret);
3598
3599 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3600 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3601 gcc_assert (ret);
3602
3603 return target;
3604 }
3605 }
3606 }
3607
3608 return expand_movstr (dst, src, target, /*endp=*/2);
3609 }
3610 }
3611
3612 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3613 bytes from constant string DATA + OFFSET and return it as target
3614 constant. */
3615
3616 rtx
3617 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3618 machine_mode mode)
3619 {
3620 const char *str = (const char *) data;
3621
3622 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3623 return const0_rtx;
3624
3625 return c_readstr (str + offset, mode);
3626 }
3627
3628 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3629 NULL_RTX if we failed the caller should emit a normal call. */
3630
3631 static rtx
3632 expand_builtin_strncpy (tree exp, rtx target)
3633 {
3634 location_t loc = EXPR_LOCATION (exp);
3635
3636 if (validate_arglist (exp,
3637 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3638 {
3639 tree dest = CALL_EXPR_ARG (exp, 0);
3640 tree src = CALL_EXPR_ARG (exp, 1);
3641 tree len = CALL_EXPR_ARG (exp, 2);
3642 tree slen = c_strlen (src, 1);
3643
3644 /* We must be passed a constant len and src parameter. */
3645 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3646 return NULL_RTX;
3647
3648 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3649
3650 /* We're required to pad with trailing zeros if the requested
3651 len is greater than strlen(s2)+1. In that case try to
3652 use store_by_pieces, if it fails, punt. */
3653 if (tree_int_cst_lt (slen, len))
3654 {
3655 unsigned int dest_align = get_pointer_alignment (dest);
3656 const char *p = c_getstr (src);
3657 rtx dest_mem;
3658
3659 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3660 || !can_store_by_pieces (tree_to_uhwi (len),
3661 builtin_strncpy_read_str,
3662 CONST_CAST (char *, p),
3663 dest_align, false))
3664 return NULL_RTX;
3665
3666 dest_mem = get_memory_rtx (dest, len);
3667 store_by_pieces (dest_mem, tree_to_uhwi (len),
3668 builtin_strncpy_read_str,
3669 CONST_CAST (char *, p), dest_align, false, 0);
3670 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3671 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3672 return dest_mem;
3673 }
3674 }
3675 return NULL_RTX;
3676 }
3677
3678 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3679 bytes from constant string DATA + OFFSET and return it as target
3680 constant. */
3681
3682 rtx
3683 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3684 machine_mode mode)
3685 {
3686 const char *c = (const char *) data;
3687 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3688
3689 memset (p, *c, GET_MODE_SIZE (mode));
3690
3691 return c_readstr (p, mode);
3692 }
3693
3694 /* Callback routine for store_by_pieces. Return the RTL of a register
3695 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3696 char value given in the RTL register data. For example, if mode is
3697 4 bytes wide, return the RTL for 0x01010101*data. */
3698
3699 static rtx
3700 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3701 machine_mode mode)
3702 {
3703 rtx target, coeff;
3704 size_t size;
3705 char *p;
3706
3707 size = GET_MODE_SIZE (mode);
3708 if (size == 1)
3709 return (rtx) data;
3710
3711 p = XALLOCAVEC (char, size);
3712 memset (p, 1, size);
3713 coeff = c_readstr (p, mode);
3714
3715 target = convert_to_mode (mode, (rtx) data, 1);
3716 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3717 return force_reg (mode, target);
3718 }
3719
3720 /* Expand expression EXP, which is a call to the memset builtin. Return
3721 NULL_RTX if we failed the caller should emit a normal call, otherwise
3722 try to get the result in TARGET, if convenient (and in mode MODE if that's
3723 convenient). */
3724
3725 static rtx
3726 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3727 {
3728 if (!validate_arglist (exp,
3729 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3730 return NULL_RTX;
3731 else
3732 {
3733 tree dest = CALL_EXPR_ARG (exp, 0);
3734 tree val = CALL_EXPR_ARG (exp, 1);
3735 tree len = CALL_EXPR_ARG (exp, 2);
3736 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3737 }
3738 }
3739
3740 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3741 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3742 try to get the result in TARGET, if convenient (and in mode MODE if that's
3743 convenient). */
3744
3745 static rtx
3746 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3747 {
3748 if (!validate_arglist (exp,
3749 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3750 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3751 return NULL_RTX;
3752 else
3753 {
3754 tree dest = CALL_EXPR_ARG (exp, 0);
3755 tree val = CALL_EXPR_ARG (exp, 2);
3756 tree len = CALL_EXPR_ARG (exp, 3);
3757 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3758
3759 /* Return src bounds with the result. */
3760 if (res)
3761 {
3762 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3763 expand_normal (CALL_EXPR_ARG (exp, 1)));
3764 res = chkp_join_splitted_slot (res, bnd);
3765 }
3766 return res;
3767 }
3768 }
3769
3770 /* Helper function to do the actual work for expand_builtin_memset. The
3771 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3772 so that this can also be called without constructing an actual CALL_EXPR.
3773 The other arguments and return value are the same as for
3774 expand_builtin_memset. */
3775
3776 static rtx
3777 expand_builtin_memset_args (tree dest, tree val, tree len,
3778 rtx target, machine_mode mode, tree orig_exp)
3779 {
3780 tree fndecl, fn;
3781 enum built_in_function fcode;
3782 machine_mode val_mode;
3783 char c;
3784 unsigned int dest_align;
3785 rtx dest_mem, dest_addr, len_rtx;
3786 HOST_WIDE_INT expected_size = -1;
3787 unsigned int expected_align = 0;
3788 unsigned HOST_WIDE_INT min_size;
3789 unsigned HOST_WIDE_INT max_size;
3790 unsigned HOST_WIDE_INT probable_max_size;
3791
3792 dest_align = get_pointer_alignment (dest);
3793
3794 /* If DEST is not a pointer type, don't do this operation in-line. */
3795 if (dest_align == 0)
3796 return NULL_RTX;
3797
3798 if (currently_expanding_gimple_stmt)
3799 stringop_block_profile (currently_expanding_gimple_stmt,
3800 &expected_align, &expected_size);
3801
3802 if (expected_align < dest_align)
3803 expected_align = dest_align;
3804
3805 /* If the LEN parameter is zero, return DEST. */
3806 if (integer_zerop (len))
3807 {
3808 /* Evaluate and ignore VAL in case it has side-effects. */
3809 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3810 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3811 }
3812
3813 /* Stabilize the arguments in case we fail. */
3814 dest = builtin_save_expr (dest);
3815 val = builtin_save_expr (val);
3816 len = builtin_save_expr (len);
3817
3818 len_rtx = expand_normal (len);
3819 determine_block_size (len, len_rtx, &min_size, &max_size,
3820 &probable_max_size);
3821 dest_mem = get_memory_rtx (dest, len);
3822 val_mode = TYPE_MODE (unsigned_char_type_node);
3823
3824 if (TREE_CODE (val) != INTEGER_CST)
3825 {
3826 rtx val_rtx;
3827
3828 val_rtx = expand_normal (val);
3829 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3830
3831 /* Assume that we can memset by pieces if we can store
3832 * the coefficients by pieces (in the required modes).
3833 * We can't pass builtin_memset_gen_str as that emits RTL. */
3834 c = 1;
3835 if (tree_fits_uhwi_p (len)
3836 && can_store_by_pieces (tree_to_uhwi (len),
3837 builtin_memset_read_str, &c, dest_align,
3838 true))
3839 {
3840 val_rtx = force_reg (val_mode, val_rtx);
3841 store_by_pieces (dest_mem, tree_to_uhwi (len),
3842 builtin_memset_gen_str, val_rtx, dest_align,
3843 true, 0);
3844 }
3845 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3846 dest_align, expected_align,
3847 expected_size, min_size, max_size,
3848 probable_max_size))
3849 goto do_libcall;
3850
3851 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3852 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3853 return dest_mem;
3854 }
3855
3856 if (target_char_cast (val, &c))
3857 goto do_libcall;
3858
3859 if (c)
3860 {
3861 if (tree_fits_uhwi_p (len)
3862 && can_store_by_pieces (tree_to_uhwi (len),
3863 builtin_memset_read_str, &c, dest_align,
3864 true))
3865 store_by_pieces (dest_mem, tree_to_uhwi (len),
3866 builtin_memset_read_str, &c, dest_align, true, 0);
3867 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3868 gen_int_mode (c, val_mode),
3869 dest_align, expected_align,
3870 expected_size, min_size, max_size,
3871 probable_max_size))
3872 goto do_libcall;
3873
3874 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3875 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3876 return dest_mem;
3877 }
3878
3879 set_mem_align (dest_mem, dest_align);
3880 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3881 CALL_EXPR_TAILCALL (orig_exp)
3882 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3883 expected_align, expected_size,
3884 min_size, max_size,
3885 probable_max_size);
3886
3887 if (dest_addr == 0)
3888 {
3889 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3890 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3891 }
3892
3893 return dest_addr;
3894
3895 do_libcall:
3896 fndecl = get_callee_fndecl (orig_exp);
3897 fcode = DECL_FUNCTION_CODE (fndecl);
3898 if (fcode == BUILT_IN_MEMSET
3899 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3900 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3901 dest, val, len);
3902 else if (fcode == BUILT_IN_BZERO)
3903 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3904 dest, len);
3905 else
3906 gcc_unreachable ();
3907 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3908 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3909 return expand_call (fn, target, target == const0_rtx);
3910 }
3911
3912 /* Expand expression EXP, which is a call to the bzero builtin. Return
3913 NULL_RTX if we failed the caller should emit a normal call. */
3914
3915 static rtx
3916 expand_builtin_bzero (tree exp)
3917 {
3918 tree dest, size;
3919 location_t loc = EXPR_LOCATION (exp);
3920
3921 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3922 return NULL_RTX;
3923
3924 dest = CALL_EXPR_ARG (exp, 0);
3925 size = CALL_EXPR_ARG (exp, 1);
3926
3927 /* New argument list transforming bzero(ptr x, int y) to
3928 memset(ptr x, int 0, size_t y). This is done this way
3929 so that if it isn't expanded inline, we fallback to
3930 calling bzero instead of memset. */
3931
3932 return expand_builtin_memset_args (dest, integer_zero_node,
3933 fold_convert_loc (loc,
3934 size_type_node, size),
3935 const0_rtx, VOIDmode, exp);
3936 }
3937
3938 /* Expand expression EXP, which is a call to the memcmp built-in function.
3939 Return NULL_RTX if we failed and the caller should emit a normal call,
3940 otherwise try to get the result in TARGET, if convenient (and in mode
3941 MODE, if that's convenient). */
3942
3943 static rtx
3944 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3945 ATTRIBUTE_UNUSED machine_mode mode)
3946 {
3947 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3948
3949 if (!validate_arglist (exp,
3950 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3951 return NULL_RTX;
3952
3953 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3954 implementing memcmp because it will stop if it encounters two
3955 zero bytes. */
3956 #if defined HAVE_cmpmemsi
3957 {
3958 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3959 rtx result;
3960 rtx insn;
3961 tree arg1 = CALL_EXPR_ARG (exp, 0);
3962 tree arg2 = CALL_EXPR_ARG (exp, 1);
3963 tree len = CALL_EXPR_ARG (exp, 2);
3964
3965 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3966 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3967 machine_mode insn_mode;
3968
3969 if (HAVE_cmpmemsi)
3970 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3971 else
3972 return NULL_RTX;
3973
3974 /* If we don't have POINTER_TYPE, call the function. */
3975 if (arg1_align == 0 || arg2_align == 0)
3976 return NULL_RTX;
3977
3978 /* Make a place to write the result of the instruction. */
3979 result = target;
3980 if (! (result != 0
3981 && REG_P (result) && GET_MODE (result) == insn_mode
3982 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3983 result = gen_reg_rtx (insn_mode);
3984
3985 arg1_rtx = get_memory_rtx (arg1, len);
3986 arg2_rtx = get_memory_rtx (arg2, len);
3987 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3988
3989 /* Set MEM_SIZE as appropriate. */
3990 if (CONST_INT_P (arg3_rtx))
3991 {
3992 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3993 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3994 }
3995
3996 if (HAVE_cmpmemsi)
3997 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3998 GEN_INT (MIN (arg1_align, arg2_align)));
3999 else
4000 gcc_unreachable ();
4001
4002 if (insn)
4003 emit_insn (insn);
4004 else
4005 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4006 TYPE_MODE (integer_type_node), 3,
4007 XEXP (arg1_rtx, 0), Pmode,
4008 XEXP (arg2_rtx, 0), Pmode,
4009 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4010 TYPE_UNSIGNED (sizetype)),
4011 TYPE_MODE (sizetype));
4012
4013 /* Return the value in the proper mode for this function. */
4014 mode = TYPE_MODE (TREE_TYPE (exp));
4015 if (GET_MODE (result) == mode)
4016 return result;
4017 else if (target != 0)
4018 {
4019 convert_move (target, result, 0);
4020 return target;
4021 }
4022 else
4023 return convert_to_mode (mode, result, 0);
4024 }
4025 #endif /* HAVE_cmpmemsi. */
4026
4027 return NULL_RTX;
4028 }
4029
4030 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4031 if we failed the caller should emit a normal call, otherwise try to get
4032 the result in TARGET, if convenient. */
4033
4034 static rtx
4035 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4036 {
4037 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4038 return NULL_RTX;
4039
4040 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4041 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4042 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4043 {
4044 rtx arg1_rtx, arg2_rtx;
4045 rtx result, insn = NULL_RTX;
4046 tree fndecl, fn;
4047 tree arg1 = CALL_EXPR_ARG (exp, 0);
4048 tree arg2 = CALL_EXPR_ARG (exp, 1);
4049
4050 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4051 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4052
4053 /* If we don't have POINTER_TYPE, call the function. */
4054 if (arg1_align == 0 || arg2_align == 0)
4055 return NULL_RTX;
4056
4057 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4058 arg1 = builtin_save_expr (arg1);
4059 arg2 = builtin_save_expr (arg2);
4060
4061 arg1_rtx = get_memory_rtx (arg1, NULL);
4062 arg2_rtx = get_memory_rtx (arg2, NULL);
4063
4064 #ifdef HAVE_cmpstrsi
4065 /* Try to call cmpstrsi. */
4066 if (HAVE_cmpstrsi)
4067 {
4068 machine_mode insn_mode
4069 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4070
4071 /* Make a place to write the result of the instruction. */
4072 result = target;
4073 if (! (result != 0
4074 && REG_P (result) && GET_MODE (result) == insn_mode
4075 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4076 result = gen_reg_rtx (insn_mode);
4077
4078 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4079 GEN_INT (MIN (arg1_align, arg2_align)));
4080 }
4081 #endif
4082 #ifdef HAVE_cmpstrnsi
4083 /* Try to determine at least one length and call cmpstrnsi. */
4084 if (!insn && HAVE_cmpstrnsi)
4085 {
4086 tree len;
4087 rtx arg3_rtx;
4088
4089 machine_mode insn_mode
4090 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4091 tree len1 = c_strlen (arg1, 1);
4092 tree len2 = c_strlen (arg2, 1);
4093
4094 if (len1)
4095 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4096 if (len2)
4097 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4098
4099 /* If we don't have a constant length for the first, use the length
4100 of the second, if we know it. We don't require a constant for
4101 this case; some cost analysis could be done if both are available
4102 but neither is constant. For now, assume they're equally cheap,
4103 unless one has side effects. If both strings have constant lengths,
4104 use the smaller. */
4105
4106 if (!len1)
4107 len = len2;
4108 else if (!len2)
4109 len = len1;
4110 else if (TREE_SIDE_EFFECTS (len1))
4111 len = len2;
4112 else if (TREE_SIDE_EFFECTS (len2))
4113 len = len1;
4114 else if (TREE_CODE (len1) != INTEGER_CST)
4115 len = len2;
4116 else if (TREE_CODE (len2) != INTEGER_CST)
4117 len = len1;
4118 else if (tree_int_cst_lt (len1, len2))
4119 len = len1;
4120 else
4121 len = len2;
4122
4123 /* If both arguments have side effects, we cannot optimize. */
4124 if (!len || TREE_SIDE_EFFECTS (len))
4125 goto do_libcall;
4126
4127 arg3_rtx = expand_normal (len);
4128
4129 /* Make a place to write the result of the instruction. */
4130 result = target;
4131 if (! (result != 0
4132 && REG_P (result) && GET_MODE (result) == insn_mode
4133 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4134 result = gen_reg_rtx (insn_mode);
4135
4136 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4137 GEN_INT (MIN (arg1_align, arg2_align)));
4138 }
4139 #endif
4140
4141 if (insn)
4142 {
4143 machine_mode mode;
4144 emit_insn (insn);
4145
4146 /* Return the value in the proper mode for this function. */
4147 mode = TYPE_MODE (TREE_TYPE (exp));
4148 if (GET_MODE (result) == mode)
4149 return result;
4150 if (target == 0)
4151 return convert_to_mode (mode, result, 0);
4152 convert_move (target, result, 0);
4153 return target;
4154 }
4155
4156 /* Expand the library call ourselves using a stabilized argument
4157 list to avoid re-evaluating the function's arguments twice. */
4158 #ifdef HAVE_cmpstrnsi
4159 do_libcall:
4160 #endif
4161 fndecl = get_callee_fndecl (exp);
4162 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4163 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4164 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4165 return expand_call (fn, target, target == const0_rtx);
4166 }
4167 #endif
4168 return NULL_RTX;
4169 }
4170
4171 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4172 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4173 the result in TARGET, if convenient. */
4174
4175 static rtx
4176 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4177 ATTRIBUTE_UNUSED machine_mode mode)
4178 {
4179 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4180
4181 if (!validate_arglist (exp,
4182 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4183 return NULL_RTX;
4184
4185 /* If c_strlen can determine an expression for one of the string
4186 lengths, and it doesn't have side effects, then emit cmpstrnsi
4187 using length MIN(strlen(string)+1, arg3). */
4188 #ifdef HAVE_cmpstrnsi
4189 if (HAVE_cmpstrnsi)
4190 {
4191 tree len, len1, len2;
4192 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4193 rtx result, insn;
4194 tree fndecl, fn;
4195 tree arg1 = CALL_EXPR_ARG (exp, 0);
4196 tree arg2 = CALL_EXPR_ARG (exp, 1);
4197 tree arg3 = CALL_EXPR_ARG (exp, 2);
4198
4199 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4200 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4201 machine_mode insn_mode
4202 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4203
4204 len1 = c_strlen (arg1, 1);
4205 len2 = c_strlen (arg2, 1);
4206
4207 if (len1)
4208 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4209 if (len2)
4210 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4211
4212 /* If we don't have a constant length for the first, use the length
4213 of the second, if we know it. We don't require a constant for
4214 this case; some cost analysis could be done if both are available
4215 but neither is constant. For now, assume they're equally cheap,
4216 unless one has side effects. If both strings have constant lengths,
4217 use the smaller. */
4218
4219 if (!len1)
4220 len = len2;
4221 else if (!len2)
4222 len = len1;
4223 else if (TREE_SIDE_EFFECTS (len1))
4224 len = len2;
4225 else if (TREE_SIDE_EFFECTS (len2))
4226 len = len1;
4227 else if (TREE_CODE (len1) != INTEGER_CST)
4228 len = len2;
4229 else if (TREE_CODE (len2) != INTEGER_CST)
4230 len = len1;
4231 else if (tree_int_cst_lt (len1, len2))
4232 len = len1;
4233 else
4234 len = len2;
4235
4236 /* If both arguments have side effects, we cannot optimize. */
4237 if (!len || TREE_SIDE_EFFECTS (len))
4238 return NULL_RTX;
4239
4240 /* The actual new length parameter is MIN(len,arg3). */
4241 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4242 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4243
4244 /* If we don't have POINTER_TYPE, call the function. */
4245 if (arg1_align == 0 || arg2_align == 0)
4246 return NULL_RTX;
4247
4248 /* Make a place to write the result of the instruction. */
4249 result = target;
4250 if (! (result != 0
4251 && REG_P (result) && GET_MODE (result) == insn_mode
4252 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4253 result = gen_reg_rtx (insn_mode);
4254
4255 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4256 arg1 = builtin_save_expr (arg1);
4257 arg2 = builtin_save_expr (arg2);
4258 len = builtin_save_expr (len);
4259
4260 arg1_rtx = get_memory_rtx (arg1, len);
4261 arg2_rtx = get_memory_rtx (arg2, len);
4262 arg3_rtx = expand_normal (len);
4263 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4264 GEN_INT (MIN (arg1_align, arg2_align)));
4265 if (insn)
4266 {
4267 emit_insn (insn);
4268
4269 /* Return the value in the proper mode for this function. */
4270 mode = TYPE_MODE (TREE_TYPE (exp));
4271 if (GET_MODE (result) == mode)
4272 return result;
4273 if (target == 0)
4274 return convert_to_mode (mode, result, 0);
4275 convert_move (target, result, 0);
4276 return target;
4277 }
4278
4279 /* Expand the library call ourselves using a stabilized argument
4280 list to avoid re-evaluating the function's arguments twice. */
4281 fndecl = get_callee_fndecl (exp);
4282 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4283 arg1, arg2, len);
4284 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4285 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4286 return expand_call (fn, target, target == const0_rtx);
4287 }
4288 #endif
4289 return NULL_RTX;
4290 }
4291
4292 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4293 if that's convenient. */
4294
4295 rtx
4296 expand_builtin_saveregs (void)
4297 {
4298 rtx val;
4299 rtx_insn *seq;
4300
4301 /* Don't do __builtin_saveregs more than once in a function.
4302 Save the result of the first call and reuse it. */
4303 if (saveregs_value != 0)
4304 return saveregs_value;
4305
4306 /* When this function is called, it means that registers must be
4307 saved on entry to this function. So we migrate the call to the
4308 first insn of this function. */
4309
4310 start_sequence ();
4311
4312 /* Do whatever the machine needs done in this case. */
4313 val = targetm.calls.expand_builtin_saveregs ();
4314
4315 seq = get_insns ();
4316 end_sequence ();
4317
4318 saveregs_value = val;
4319
4320 /* Put the insns after the NOTE that starts the function. If this
4321 is inside a start_sequence, make the outer-level insn chain current, so
4322 the code is placed at the start of the function. */
4323 push_topmost_sequence ();
4324 emit_insn_after (seq, entry_of_function ());
4325 pop_topmost_sequence ();
4326
4327 return val;
4328 }
4329
4330 /* Expand a call to __builtin_next_arg. */
4331
4332 static rtx
4333 expand_builtin_next_arg (void)
4334 {
4335 /* Checking arguments is already done in fold_builtin_next_arg
4336 that must be called before this function. */
4337 return expand_binop (ptr_mode, add_optab,
4338 crtl->args.internal_arg_pointer,
4339 crtl->args.arg_offset_rtx,
4340 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4341 }
4342
4343 /* Make it easier for the backends by protecting the valist argument
4344 from multiple evaluations. */
4345
4346 static tree
4347 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4348 {
4349 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4350
4351 /* The current way of determining the type of valist is completely
4352 bogus. We should have the information on the va builtin instead. */
4353 if (!vatype)
4354 vatype = targetm.fn_abi_va_list (cfun->decl);
4355
4356 if (TREE_CODE (vatype) == ARRAY_TYPE)
4357 {
4358 if (TREE_SIDE_EFFECTS (valist))
4359 valist = save_expr (valist);
4360
4361 /* For this case, the backends will be expecting a pointer to
4362 vatype, but it's possible we've actually been given an array
4363 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4364 So fix it. */
4365 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4366 {
4367 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4368 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4369 }
4370 }
4371 else
4372 {
4373 tree pt = build_pointer_type (vatype);
4374
4375 if (! needs_lvalue)
4376 {
4377 if (! TREE_SIDE_EFFECTS (valist))
4378 return valist;
4379
4380 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4381 TREE_SIDE_EFFECTS (valist) = 1;
4382 }
4383
4384 if (TREE_SIDE_EFFECTS (valist))
4385 valist = save_expr (valist);
4386 valist = fold_build2_loc (loc, MEM_REF,
4387 vatype, valist, build_int_cst (pt, 0));
4388 }
4389
4390 return valist;
4391 }
4392
4393 /* The "standard" definition of va_list is void*. */
4394
4395 tree
4396 std_build_builtin_va_list (void)
4397 {
4398 return ptr_type_node;
4399 }
4400
4401 /* The "standard" abi va_list is va_list_type_node. */
4402
4403 tree
4404 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4405 {
4406 return va_list_type_node;
4407 }
4408
4409 /* The "standard" type of va_list is va_list_type_node. */
4410
4411 tree
4412 std_canonical_va_list_type (tree type)
4413 {
4414 tree wtype, htype;
4415
4416 if (INDIRECT_REF_P (type))
4417 type = TREE_TYPE (type);
4418 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4419 type = TREE_TYPE (type);
4420 wtype = va_list_type_node;
4421 htype = type;
4422 /* Treat structure va_list types. */
4423 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4424 htype = TREE_TYPE (htype);
4425 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4426 {
4427 /* If va_list is an array type, the argument may have decayed
4428 to a pointer type, e.g. by being passed to another function.
4429 In that case, unwrap both types so that we can compare the
4430 underlying records. */
4431 if (TREE_CODE (htype) == ARRAY_TYPE
4432 || POINTER_TYPE_P (htype))
4433 {
4434 wtype = TREE_TYPE (wtype);
4435 htype = TREE_TYPE (htype);
4436 }
4437 }
4438 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4439 return va_list_type_node;
4440
4441 return NULL_TREE;
4442 }
4443
4444 /* The "standard" implementation of va_start: just assign `nextarg' to
4445 the variable. */
4446
4447 void
4448 std_expand_builtin_va_start (tree valist, rtx nextarg)
4449 {
4450 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4451 convert_move (va_r, nextarg, 0);
4452
4453 /* We do not have any valid bounds for the pointer, so
4454 just store zero bounds for it. */
4455 if (chkp_function_instrumented_p (current_function_decl))
4456 chkp_expand_bounds_reset_for_mem (valist,
4457 make_tree (TREE_TYPE (valist),
4458 nextarg));
4459 }
4460
4461 /* Expand EXP, a call to __builtin_va_start. */
4462
4463 static rtx
4464 expand_builtin_va_start (tree exp)
4465 {
4466 rtx nextarg;
4467 tree valist;
4468 location_t loc = EXPR_LOCATION (exp);
4469
4470 if (call_expr_nargs (exp) < 2)
4471 {
4472 error_at (loc, "too few arguments to function %<va_start%>");
4473 return const0_rtx;
4474 }
4475
4476 if (fold_builtin_next_arg (exp, true))
4477 return const0_rtx;
4478
4479 nextarg = expand_builtin_next_arg ();
4480 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4481
4482 if (targetm.expand_builtin_va_start)
4483 targetm.expand_builtin_va_start (valist, nextarg);
4484 else
4485 std_expand_builtin_va_start (valist, nextarg);
4486
4487 return const0_rtx;
4488 }
4489
4490 /* Expand EXP, a call to __builtin_va_end. */
4491
4492 static rtx
4493 expand_builtin_va_end (tree exp)
4494 {
4495 tree valist = CALL_EXPR_ARG (exp, 0);
4496
4497 /* Evaluate for side effects, if needed. I hate macros that don't
4498 do that. */
4499 if (TREE_SIDE_EFFECTS (valist))
4500 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4501
4502 return const0_rtx;
4503 }
4504
4505 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4506 builtin rather than just as an assignment in stdarg.h because of the
4507 nastiness of array-type va_list types. */
4508
4509 static rtx
4510 expand_builtin_va_copy (tree exp)
4511 {
4512 tree dst, src, t;
4513 location_t loc = EXPR_LOCATION (exp);
4514
4515 dst = CALL_EXPR_ARG (exp, 0);
4516 src = CALL_EXPR_ARG (exp, 1);
4517
4518 dst = stabilize_va_list_loc (loc, dst, 1);
4519 src = stabilize_va_list_loc (loc, src, 0);
4520
4521 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4522
4523 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4524 {
4525 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4526 TREE_SIDE_EFFECTS (t) = 1;
4527 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4528 }
4529 else
4530 {
4531 rtx dstb, srcb, size;
4532
4533 /* Evaluate to pointers. */
4534 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4535 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4536 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4537 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4538
4539 dstb = convert_memory_address (Pmode, dstb);
4540 srcb = convert_memory_address (Pmode, srcb);
4541
4542 /* "Dereference" to BLKmode memories. */
4543 dstb = gen_rtx_MEM (BLKmode, dstb);
4544 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4545 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4546 srcb = gen_rtx_MEM (BLKmode, srcb);
4547 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4548 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4549
4550 /* Copy. */
4551 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4552 }
4553
4554 return const0_rtx;
4555 }
4556
4557 /* Expand a call to one of the builtin functions __builtin_frame_address or
4558 __builtin_return_address. */
4559
4560 static rtx
4561 expand_builtin_frame_address (tree fndecl, tree exp)
4562 {
4563 /* The argument must be a nonnegative integer constant.
4564 It counts the number of frames to scan up the stack.
4565 The value is the return address saved in that frame. */
4566 if (call_expr_nargs (exp) == 0)
4567 /* Warning about missing arg was already issued. */
4568 return const0_rtx;
4569 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4570 {
4571 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4572 error ("invalid argument to %<__builtin_frame_address%>");
4573 else
4574 error ("invalid argument to %<__builtin_return_address%>");
4575 return const0_rtx;
4576 }
4577 else
4578 {
4579 rtx tem
4580 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4581 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4582
4583 /* Some ports cannot access arbitrary stack frames. */
4584 if (tem == NULL)
4585 {
4586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4587 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4588 else
4589 warning (0, "unsupported argument to %<__builtin_return_address%>");
4590 return const0_rtx;
4591 }
4592
4593 /* For __builtin_frame_address, return what we've got. */
4594 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4595 return tem;
4596
4597 if (!REG_P (tem)
4598 && ! CONSTANT_P (tem))
4599 tem = copy_addr_to_reg (tem);
4600 return tem;
4601 }
4602 }
4603
4604 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4605 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4606 is the same as for allocate_dynamic_stack_space. */
4607
4608 static rtx
4609 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4610 {
4611 rtx op0;
4612 rtx result;
4613 bool valid_arglist;
4614 unsigned int align;
4615 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4616 == BUILT_IN_ALLOCA_WITH_ALIGN);
4617
4618 valid_arglist
4619 = (alloca_with_align
4620 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4621 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4622
4623 if (!valid_arglist)
4624 return NULL_RTX;
4625
4626 /* Compute the argument. */
4627 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4628
4629 /* Compute the alignment. */
4630 align = (alloca_with_align
4631 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4632 : BIGGEST_ALIGNMENT);
4633
4634 /* Allocate the desired space. */
4635 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4636 result = convert_memory_address (ptr_mode, result);
4637
4638 return result;
4639 }
4640
4641 /* Expand a call to bswap builtin in EXP.
4642 Return NULL_RTX if a normal call should be emitted rather than expanding the
4643 function in-line. If convenient, the result should be placed in TARGET.
4644 SUBTARGET may be used as the target for computing one of EXP's operands. */
4645
4646 static rtx
4647 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4648 rtx subtarget)
4649 {
4650 tree arg;
4651 rtx op0;
4652
4653 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4654 return NULL_RTX;
4655
4656 arg = CALL_EXPR_ARG (exp, 0);
4657 op0 = expand_expr (arg,
4658 subtarget && GET_MODE (subtarget) == target_mode
4659 ? subtarget : NULL_RTX,
4660 target_mode, EXPAND_NORMAL);
4661 if (GET_MODE (op0) != target_mode)
4662 op0 = convert_to_mode (target_mode, op0, 1);
4663
4664 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4665
4666 gcc_assert (target);
4667
4668 return convert_to_mode (target_mode, target, 1);
4669 }
4670
4671 /* Expand a call to a unary builtin in EXP.
4672 Return NULL_RTX if a normal call should be emitted rather than expanding the
4673 function in-line. If convenient, the result should be placed in TARGET.
4674 SUBTARGET may be used as the target for computing one of EXP's operands. */
4675
4676 static rtx
4677 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4678 rtx subtarget, optab op_optab)
4679 {
4680 rtx op0;
4681
4682 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4683 return NULL_RTX;
4684
4685 /* Compute the argument. */
4686 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4687 (subtarget
4688 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4689 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4690 VOIDmode, EXPAND_NORMAL);
4691 /* Compute op, into TARGET if possible.
4692 Set TARGET to wherever the result comes back. */
4693 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4694 op_optab, op0, target, op_optab != clrsb_optab);
4695 gcc_assert (target);
4696
4697 return convert_to_mode (target_mode, target, 0);
4698 }
4699
4700 /* Expand a call to __builtin_expect. We just return our argument
4701 as the builtin_expect semantic should've been already executed by
4702 tree branch prediction pass. */
4703
4704 static rtx
4705 expand_builtin_expect (tree exp, rtx target)
4706 {
4707 tree arg;
4708
4709 if (call_expr_nargs (exp) < 2)
4710 return const0_rtx;
4711 arg = CALL_EXPR_ARG (exp, 0);
4712
4713 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4714 /* When guessing was done, the hints should be already stripped away. */
4715 gcc_assert (!flag_guess_branch_prob
4716 || optimize == 0 || seen_error ());
4717 return target;
4718 }
4719
4720 /* Expand a call to __builtin_assume_aligned. We just return our first
4721 argument as the builtin_assume_aligned semantic should've been already
4722 executed by CCP. */
4723
4724 static rtx
4725 expand_builtin_assume_aligned (tree exp, rtx target)
4726 {
4727 if (call_expr_nargs (exp) < 2)
4728 return const0_rtx;
4729 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4730 EXPAND_NORMAL);
4731 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4732 && (call_expr_nargs (exp) < 3
4733 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4734 return target;
4735 }
4736
4737 void
4738 expand_builtin_trap (void)
4739 {
4740 #ifdef HAVE_trap
4741 if (HAVE_trap)
4742 {
4743 rtx_insn *insn = emit_insn (gen_trap ());
4744 /* For trap insns when not accumulating outgoing args force
4745 REG_ARGS_SIZE note to prevent crossjumping of calls with
4746 different args sizes. */
4747 if (!ACCUMULATE_OUTGOING_ARGS)
4748 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4749 }
4750 else
4751 #endif
4752 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4753 emit_barrier ();
4754 }
4755
4756 /* Expand a call to __builtin_unreachable. We do nothing except emit
4757 a barrier saying that control flow will not pass here.
4758
4759 It is the responsibility of the program being compiled to ensure
4760 that control flow does never reach __builtin_unreachable. */
4761 static void
4762 expand_builtin_unreachable (void)
4763 {
4764 emit_barrier ();
4765 }
4766
4767 /* Expand EXP, a call to fabs, fabsf or fabsl.
4768 Return NULL_RTX if a normal call should be emitted rather than expanding
4769 the function inline. If convenient, the result should be placed
4770 in TARGET. SUBTARGET may be used as the target for computing
4771 the operand. */
4772
4773 static rtx
4774 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4775 {
4776 machine_mode mode;
4777 tree arg;
4778 rtx op0;
4779
4780 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4781 return NULL_RTX;
4782
4783 arg = CALL_EXPR_ARG (exp, 0);
4784 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4785 mode = TYPE_MODE (TREE_TYPE (arg));
4786 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4787 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4788 }
4789
4790 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4791 Return NULL is a normal call should be emitted rather than expanding the
4792 function inline. If convenient, the result should be placed in TARGET.
4793 SUBTARGET may be used as the target for computing the operand. */
4794
4795 static rtx
4796 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4797 {
4798 rtx op0, op1;
4799 tree arg;
4800
4801 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4802 return NULL_RTX;
4803
4804 arg = CALL_EXPR_ARG (exp, 0);
4805 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4806
4807 arg = CALL_EXPR_ARG (exp, 1);
4808 op1 = expand_normal (arg);
4809
4810 return expand_copysign (op0, op1, target);
4811 }
4812
4813 /* Expand a call to __builtin___clear_cache. */
4814
4815 static rtx
4816 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4817 {
4818 #ifndef HAVE_clear_cache
4819 #ifdef CLEAR_INSN_CACHE
4820 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4821 does something. Just do the default expansion to a call to
4822 __clear_cache(). */
4823 return NULL_RTX;
4824 #else
4825 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4826 does nothing. There is no need to call it. Do nothing. */
4827 return const0_rtx;
4828 #endif /* CLEAR_INSN_CACHE */
4829 #else
4830 /* We have a "clear_cache" insn, and it will handle everything. */
4831 tree begin, end;
4832 rtx begin_rtx, end_rtx;
4833
4834 /* We must not expand to a library call. If we did, any
4835 fallback library function in libgcc that might contain a call to
4836 __builtin___clear_cache() would recurse infinitely. */
4837 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4838 {
4839 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4840 return const0_rtx;
4841 }
4842
4843 if (HAVE_clear_cache)
4844 {
4845 struct expand_operand ops[2];
4846
4847 begin = CALL_EXPR_ARG (exp, 0);
4848 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4849
4850 end = CALL_EXPR_ARG (exp, 1);
4851 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4852
4853 create_address_operand (&ops[0], begin_rtx);
4854 create_address_operand (&ops[1], end_rtx);
4855 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4856 return const0_rtx;
4857 }
4858 return const0_rtx;
4859 #endif /* HAVE_clear_cache */
4860 }
4861
4862 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4863
4864 static rtx
4865 round_trampoline_addr (rtx tramp)
4866 {
4867 rtx temp, addend, mask;
4868
4869 /* If we don't need too much alignment, we'll have been guaranteed
4870 proper alignment by get_trampoline_type. */
4871 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4872 return tramp;
4873
4874 /* Round address up to desired boundary. */
4875 temp = gen_reg_rtx (Pmode);
4876 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4877 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4878
4879 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4880 temp, 0, OPTAB_LIB_WIDEN);
4881 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4882 temp, 0, OPTAB_LIB_WIDEN);
4883
4884 return tramp;
4885 }
4886
4887 static rtx
4888 expand_builtin_init_trampoline (tree exp, bool onstack)
4889 {
4890 tree t_tramp, t_func, t_chain;
4891 rtx m_tramp, r_tramp, r_chain, tmp;
4892
4893 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4894 POINTER_TYPE, VOID_TYPE))
4895 return NULL_RTX;
4896
4897 t_tramp = CALL_EXPR_ARG (exp, 0);
4898 t_func = CALL_EXPR_ARG (exp, 1);
4899 t_chain = CALL_EXPR_ARG (exp, 2);
4900
4901 r_tramp = expand_normal (t_tramp);
4902 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4903 MEM_NOTRAP_P (m_tramp) = 1;
4904
4905 /* If ONSTACK, the TRAMP argument should be the address of a field
4906 within the local function's FRAME decl. Either way, let's see if
4907 we can fill in the MEM_ATTRs for this memory. */
4908 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4909 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4910
4911 /* Creator of a heap trampoline is responsible for making sure the
4912 address is aligned to at least STACK_BOUNDARY. Normally malloc
4913 will ensure this anyhow. */
4914 tmp = round_trampoline_addr (r_tramp);
4915 if (tmp != r_tramp)
4916 {
4917 m_tramp = change_address (m_tramp, BLKmode, tmp);
4918 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4919 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4920 }
4921
4922 /* The FUNC argument should be the address of the nested function.
4923 Extract the actual function decl to pass to the hook. */
4924 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4925 t_func = TREE_OPERAND (t_func, 0);
4926 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4927
4928 r_chain = expand_normal (t_chain);
4929
4930 /* Generate insns to initialize the trampoline. */
4931 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4932
4933 if (onstack)
4934 {
4935 trampolines_created = 1;
4936
4937 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4938 "trampoline generated for nested function %qD", t_func);
4939 }
4940
4941 return const0_rtx;
4942 }
4943
4944 static rtx
4945 expand_builtin_adjust_trampoline (tree exp)
4946 {
4947 rtx tramp;
4948
4949 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4950 return NULL_RTX;
4951
4952 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4953 tramp = round_trampoline_addr (tramp);
4954 if (targetm.calls.trampoline_adjust_address)
4955 tramp = targetm.calls.trampoline_adjust_address (tramp);
4956
4957 return tramp;
4958 }
4959
4960 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4961 function. The function first checks whether the back end provides
4962 an insn to implement signbit for the respective mode. If not, it
4963 checks whether the floating point format of the value is such that
4964 the sign bit can be extracted. If that is not the case, the
4965 function returns NULL_RTX to indicate that a normal call should be
4966 emitted rather than expanding the function in-line. EXP is the
4967 expression that is a call to the builtin function; if convenient,
4968 the result should be placed in TARGET. */
4969 static rtx
4970 expand_builtin_signbit (tree exp, rtx target)
4971 {
4972 const struct real_format *fmt;
4973 machine_mode fmode, imode, rmode;
4974 tree arg;
4975 int word, bitpos;
4976 enum insn_code icode;
4977 rtx temp;
4978 location_t loc = EXPR_LOCATION (exp);
4979
4980 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4981 return NULL_RTX;
4982
4983 arg = CALL_EXPR_ARG (exp, 0);
4984 fmode = TYPE_MODE (TREE_TYPE (arg));
4985 rmode = TYPE_MODE (TREE_TYPE (exp));
4986 fmt = REAL_MODE_FORMAT (fmode);
4987
4988 arg = builtin_save_expr (arg);
4989
4990 /* Expand the argument yielding a RTX expression. */
4991 temp = expand_normal (arg);
4992
4993 /* Check if the back end provides an insn that handles signbit for the
4994 argument's mode. */
4995 icode = optab_handler (signbit_optab, fmode);
4996 if (icode != CODE_FOR_nothing)
4997 {
4998 rtx_insn *last = get_last_insn ();
4999 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5000 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5001 return target;
5002 delete_insns_since (last);
5003 }
5004
5005 /* For floating point formats without a sign bit, implement signbit
5006 as "ARG < 0.0". */
5007 bitpos = fmt->signbit_ro;
5008 if (bitpos < 0)
5009 {
5010 /* But we can't do this if the format supports signed zero. */
5011 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5012 return NULL_RTX;
5013
5014 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5015 build_real (TREE_TYPE (arg), dconst0));
5016 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5017 }
5018
5019 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5020 {
5021 imode = int_mode_for_mode (fmode);
5022 if (imode == BLKmode)
5023 return NULL_RTX;
5024 temp = gen_lowpart (imode, temp);
5025 }
5026 else
5027 {
5028 imode = word_mode;
5029 /* Handle targets with different FP word orders. */
5030 if (FLOAT_WORDS_BIG_ENDIAN)
5031 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5032 else
5033 word = bitpos / BITS_PER_WORD;
5034 temp = operand_subword_force (temp, word, fmode);
5035 bitpos = bitpos % BITS_PER_WORD;
5036 }
5037
5038 /* Force the intermediate word_mode (or narrower) result into a
5039 register. This avoids attempting to create paradoxical SUBREGs
5040 of floating point modes below. */
5041 temp = force_reg (imode, temp);
5042
5043 /* If the bitpos is within the "result mode" lowpart, the operation
5044 can be implement with a single bitwise AND. Otherwise, we need
5045 a right shift and an AND. */
5046
5047 if (bitpos < GET_MODE_BITSIZE (rmode))
5048 {
5049 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5050
5051 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5052 temp = gen_lowpart (rmode, temp);
5053 temp = expand_binop (rmode, and_optab, temp,
5054 immed_wide_int_const (mask, rmode),
5055 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5056 }
5057 else
5058 {
5059 /* Perform a logical right shift to place the signbit in the least
5060 significant bit, then truncate the result to the desired mode
5061 and mask just this bit. */
5062 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5063 temp = gen_lowpart (rmode, temp);
5064 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5065 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5066 }
5067
5068 return temp;
5069 }
5070
5071 /* Expand fork or exec calls. TARGET is the desired target of the
5072 call. EXP is the call. FN is the
5073 identificator of the actual function. IGNORE is nonzero if the
5074 value is to be ignored. */
5075
5076 static rtx
5077 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5078 {
5079 tree id, decl;
5080 tree call;
5081
5082 /* If we are not profiling, just call the function. */
5083 if (!profile_arc_flag)
5084 return NULL_RTX;
5085
5086 /* Otherwise call the wrapper. This should be equivalent for the rest of
5087 compiler, so the code does not diverge, and the wrapper may run the
5088 code necessary for keeping the profiling sane. */
5089
5090 switch (DECL_FUNCTION_CODE (fn))
5091 {
5092 case BUILT_IN_FORK:
5093 id = get_identifier ("__gcov_fork");
5094 break;
5095
5096 case BUILT_IN_EXECL:
5097 id = get_identifier ("__gcov_execl");
5098 break;
5099
5100 case BUILT_IN_EXECV:
5101 id = get_identifier ("__gcov_execv");
5102 break;
5103
5104 case BUILT_IN_EXECLP:
5105 id = get_identifier ("__gcov_execlp");
5106 break;
5107
5108 case BUILT_IN_EXECLE:
5109 id = get_identifier ("__gcov_execle");
5110 break;
5111
5112 case BUILT_IN_EXECVP:
5113 id = get_identifier ("__gcov_execvp");
5114 break;
5115
5116 case BUILT_IN_EXECVE:
5117 id = get_identifier ("__gcov_execve");
5118 break;
5119
5120 default:
5121 gcc_unreachable ();
5122 }
5123
5124 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5125 FUNCTION_DECL, id, TREE_TYPE (fn));
5126 DECL_EXTERNAL (decl) = 1;
5127 TREE_PUBLIC (decl) = 1;
5128 DECL_ARTIFICIAL (decl) = 1;
5129 TREE_NOTHROW (decl) = 1;
5130 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5131 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5132 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5133 return expand_call (call, target, ignore);
5134 }
5135
5136
5137 \f
5138 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5139 the pointer in these functions is void*, the tree optimizers may remove
5140 casts. The mode computed in expand_builtin isn't reliable either, due
5141 to __sync_bool_compare_and_swap.
5142
5143 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5144 group of builtins. This gives us log2 of the mode size. */
5145
5146 static inline machine_mode
5147 get_builtin_sync_mode (int fcode_diff)
5148 {
5149 /* The size is not negotiable, so ask not to get BLKmode in return
5150 if the target indicates that a smaller size would be better. */
5151 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5152 }
5153
5154 /* Expand the memory expression LOC and return the appropriate memory operand
5155 for the builtin_sync operations. */
5156
5157 static rtx
5158 get_builtin_sync_mem (tree loc, machine_mode mode)
5159 {
5160 rtx addr, mem;
5161
5162 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5163 addr = convert_memory_address (Pmode, addr);
5164
5165 /* Note that we explicitly do not want any alias information for this
5166 memory, so that we kill all other live memories. Otherwise we don't
5167 satisfy the full barrier semantics of the intrinsic. */
5168 mem = validize_mem (gen_rtx_MEM (mode, addr));
5169
5170 /* The alignment needs to be at least according to that of the mode. */
5171 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5172 get_pointer_alignment (loc)));
5173 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5174 MEM_VOLATILE_P (mem) = 1;
5175
5176 return mem;
5177 }
5178
5179 /* Make sure an argument is in the right mode.
5180 EXP is the tree argument.
5181 MODE is the mode it should be in. */
5182
5183 static rtx
5184 expand_expr_force_mode (tree exp, machine_mode mode)
5185 {
5186 rtx val;
5187 machine_mode old_mode;
5188
5189 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5190 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5191 of CONST_INTs, where we know the old_mode only from the call argument. */
5192
5193 old_mode = GET_MODE (val);
5194 if (old_mode == VOIDmode)
5195 old_mode = TYPE_MODE (TREE_TYPE (exp));
5196 val = convert_modes (mode, old_mode, val, 1);
5197 return val;
5198 }
5199
5200
5201 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5202 EXP is the CALL_EXPR. CODE is the rtx code
5203 that corresponds to the arithmetic or logical operation from the name;
5204 an exception here is that NOT actually means NAND. TARGET is an optional
5205 place for us to store the results; AFTER is true if this is the
5206 fetch_and_xxx form. */
5207
5208 static rtx
5209 expand_builtin_sync_operation (machine_mode mode, tree exp,
5210 enum rtx_code code, bool after,
5211 rtx target)
5212 {
5213 rtx val, mem;
5214 location_t loc = EXPR_LOCATION (exp);
5215
5216 if (code == NOT && warn_sync_nand)
5217 {
5218 tree fndecl = get_callee_fndecl (exp);
5219 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5220
5221 static bool warned_f_a_n, warned_n_a_f;
5222
5223 switch (fcode)
5224 {
5225 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5226 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5227 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5228 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5229 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5230 if (warned_f_a_n)
5231 break;
5232
5233 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5234 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5235 warned_f_a_n = true;
5236 break;
5237
5238 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5239 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5240 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5241 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5242 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5243 if (warned_n_a_f)
5244 break;
5245
5246 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5247 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5248 warned_n_a_f = true;
5249 break;
5250
5251 default:
5252 gcc_unreachable ();
5253 }
5254 }
5255
5256 /* Expand the operands. */
5257 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5258 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5259
5260 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5261 after);
5262 }
5263
5264 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5265 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5266 true if this is the boolean form. TARGET is a place for us to store the
5267 results; this is NOT optional if IS_BOOL is true. */
5268
5269 static rtx
5270 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5271 bool is_bool, rtx target)
5272 {
5273 rtx old_val, new_val, mem;
5274 rtx *pbool, *poval;
5275
5276 /* Expand the operands. */
5277 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5278 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5279 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5280
5281 pbool = poval = NULL;
5282 if (target != const0_rtx)
5283 {
5284 if (is_bool)
5285 pbool = &target;
5286 else
5287 poval = &target;
5288 }
5289 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5290 false, MEMMODEL_SYNC_SEQ_CST,
5291 MEMMODEL_SYNC_SEQ_CST))
5292 return NULL_RTX;
5293
5294 return target;
5295 }
5296
5297 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5298 general form is actually an atomic exchange, and some targets only
5299 support a reduced form with the second argument being a constant 1.
5300 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5301 the results. */
5302
5303 static rtx
5304 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5305 rtx target)
5306 {
5307 rtx val, mem;
5308
5309 /* Expand the operands. */
5310 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5311 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5312
5313 return expand_sync_lock_test_and_set (target, mem, val);
5314 }
5315
5316 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5317
5318 static void
5319 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5320 {
5321 rtx mem;
5322
5323 /* Expand the operands. */
5324 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5325
5326 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5327 }
5328
5329 /* Given an integer representing an ``enum memmodel'', verify its
5330 correctness and return the memory model enum. */
5331
5332 static enum memmodel
5333 get_memmodel (tree exp)
5334 {
5335 rtx op;
5336 unsigned HOST_WIDE_INT val;
5337
5338 /* If the parameter is not a constant, it's a run time value so we'll just
5339 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5340 if (TREE_CODE (exp) != INTEGER_CST)
5341 return MEMMODEL_SEQ_CST;
5342
5343 op = expand_normal (exp);
5344
5345 val = INTVAL (op);
5346 if (targetm.memmodel_check)
5347 val = targetm.memmodel_check (val);
5348 else if (val & ~MEMMODEL_MASK)
5349 {
5350 warning (OPT_Winvalid_memory_model,
5351 "Unknown architecture specifier in memory model to builtin.");
5352 return MEMMODEL_SEQ_CST;
5353 }
5354
5355 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5356 if (memmodel_base (val) >= MEMMODEL_LAST)
5357 {
5358 warning (OPT_Winvalid_memory_model,
5359 "invalid memory model argument to builtin");
5360 return MEMMODEL_SEQ_CST;
5361 }
5362
5363 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5364 be conservative and promote consume to acquire. */
5365 if (val == MEMMODEL_CONSUME)
5366 val = MEMMODEL_ACQUIRE;
5367
5368 return (enum memmodel) val;
5369 }
5370
5371 /* Expand the __atomic_exchange intrinsic:
5372 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5373 EXP is the CALL_EXPR.
5374 TARGET is an optional place for us to store the results. */
5375
5376 static rtx
5377 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5378 {
5379 rtx val, mem;
5380 enum memmodel model;
5381
5382 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5383
5384 if (!flag_inline_atomics)
5385 return NULL_RTX;
5386
5387 /* Expand the operands. */
5388 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5389 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5390
5391 return expand_atomic_exchange (target, mem, val, model);
5392 }
5393
5394 /* Expand the __atomic_compare_exchange intrinsic:
5395 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5396 TYPE desired, BOOL weak,
5397 enum memmodel success,
5398 enum memmodel failure)
5399 EXP is the CALL_EXPR.
5400 TARGET is an optional place for us to store the results. */
5401
5402 static rtx
5403 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5404 rtx target)
5405 {
5406 rtx expect, desired, mem, oldval;
5407 rtx_code_label *label;
5408 enum memmodel success, failure;
5409 tree weak;
5410 bool is_weak;
5411
5412 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5413 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5414
5415 if (failure > success)
5416 {
5417 warning (OPT_Winvalid_memory_model,
5418 "failure memory model cannot be stronger than success memory "
5419 "model for %<__atomic_compare_exchange%>");
5420 success = MEMMODEL_SEQ_CST;
5421 }
5422
5423 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5424 {
5425 warning (OPT_Winvalid_memory_model,
5426 "invalid failure memory model for "
5427 "%<__atomic_compare_exchange%>");
5428 failure = MEMMODEL_SEQ_CST;
5429 success = MEMMODEL_SEQ_CST;
5430 }
5431
5432
5433 if (!flag_inline_atomics)
5434 return NULL_RTX;
5435
5436 /* Expand the operands. */
5437 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5438
5439 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5440 expect = convert_memory_address (Pmode, expect);
5441 expect = gen_rtx_MEM (mode, expect);
5442 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5443
5444 weak = CALL_EXPR_ARG (exp, 3);
5445 is_weak = false;
5446 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5447 is_weak = true;
5448
5449 if (target == const0_rtx)
5450 target = NULL;
5451
5452 /* Lest the rtl backend create a race condition with an imporoper store
5453 to memory, always create a new pseudo for OLDVAL. */
5454 oldval = NULL;
5455
5456 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5457 is_weak, success, failure))
5458 return NULL_RTX;
5459
5460 /* Conditionally store back to EXPECT, lest we create a race condition
5461 with an improper store to memory. */
5462 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5463 the normal case where EXPECT is totally private, i.e. a register. At
5464 which point the store can be unconditional. */
5465 label = gen_label_rtx ();
5466 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5467 GET_MODE (target), 1, label);
5468 emit_move_insn (expect, oldval);
5469 emit_label (label);
5470
5471 return target;
5472 }
5473
5474 /* Expand the __atomic_load intrinsic:
5475 TYPE __atomic_load (TYPE *object, enum memmodel)
5476 EXP is the CALL_EXPR.
5477 TARGET is an optional place for us to store the results. */
5478
5479 static rtx
5480 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5481 {
5482 rtx mem;
5483 enum memmodel model;
5484
5485 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5486 if (is_mm_release (model) || is_mm_acq_rel (model))
5487 {
5488 warning (OPT_Winvalid_memory_model,
5489 "invalid memory model for %<__atomic_load%>");
5490 model = MEMMODEL_SEQ_CST;
5491 }
5492
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5495
5496 /* Expand the operand. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498
5499 return expand_atomic_load (target, mem, model);
5500 }
5501
5502
5503 /* Expand the __atomic_store intrinsic:
5504 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results. */
5507
5508 static rtx
5509 expand_builtin_atomic_store (machine_mode mode, tree exp)
5510 {
5511 rtx mem, val;
5512 enum memmodel model;
5513
5514 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5515 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5516 || is_mm_release (model)))
5517 {
5518 warning (OPT_Winvalid_memory_model,
5519 "invalid memory model for %<__atomic_store%>");
5520 model = MEMMODEL_SEQ_CST;
5521 }
5522
5523 if (!flag_inline_atomics)
5524 return NULL_RTX;
5525
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529
5530 return expand_atomic_store (mem, val, model, false);
5531 }
5532
5533 /* Expand the __atomic_fetch_XXX intrinsic:
5534 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5535 EXP is the CALL_EXPR.
5536 TARGET is an optional place for us to store the results.
5537 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5538 FETCH_AFTER is true if returning the result of the operation.
5539 FETCH_AFTER is false if returning the value before the operation.
5540 IGNORE is true if the result is not used.
5541 EXT_CALL is the correct builtin for an external call if this cannot be
5542 resolved to an instruction sequence. */
5543
5544 static rtx
5545 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5546 enum rtx_code code, bool fetch_after,
5547 bool ignore, enum built_in_function ext_call)
5548 {
5549 rtx val, mem, ret;
5550 enum memmodel model;
5551 tree fndecl;
5552 tree addr;
5553
5554 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5555
5556 /* Expand the operands. */
5557 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5558 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5559
5560 /* Only try generating instructions if inlining is turned on. */
5561 if (flag_inline_atomics)
5562 {
5563 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5564 if (ret)
5565 return ret;
5566 }
5567
5568 /* Return if a different routine isn't needed for the library call. */
5569 if (ext_call == BUILT_IN_NONE)
5570 return NULL_RTX;
5571
5572 /* Change the call to the specified function. */
5573 fndecl = get_callee_fndecl (exp);
5574 addr = CALL_EXPR_FN (exp);
5575 STRIP_NOPS (addr);
5576
5577 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5578 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5579
5580 /* Expand the call here so we can emit trailing code. */
5581 ret = expand_call (exp, target, ignore);
5582
5583 /* Replace the original function just in case it matters. */
5584 TREE_OPERAND (addr, 0) = fndecl;
5585
5586 /* Then issue the arithmetic correction to return the right result. */
5587 if (!ignore)
5588 {
5589 if (code == NOT)
5590 {
5591 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5592 OPTAB_LIB_WIDEN);
5593 ret = expand_simple_unop (mode, NOT, ret, target, true);
5594 }
5595 else
5596 ret = expand_simple_binop (mode, code, ret, val, target, true,
5597 OPTAB_LIB_WIDEN);
5598 }
5599 return ret;
5600 }
5601
5602
5603 #ifndef HAVE_atomic_clear
5604 # define HAVE_atomic_clear 0
5605 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5606 #endif
5607
5608 /* Expand an atomic clear operation.
5609 void _atomic_clear (BOOL *obj, enum memmodel)
5610 EXP is the call expression. */
5611
5612 static rtx
5613 expand_builtin_atomic_clear (tree exp)
5614 {
5615 machine_mode mode;
5616 rtx mem, ret;
5617 enum memmodel model;
5618
5619 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5620 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5621 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5622
5623 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5624 {
5625 warning (OPT_Winvalid_memory_model,
5626 "invalid memory model for %<__atomic_store%>");
5627 model = MEMMODEL_SEQ_CST;
5628 }
5629
5630 if (HAVE_atomic_clear)
5631 {
5632 emit_insn (gen_atomic_clear (mem, model));
5633 return const0_rtx;
5634 }
5635
5636 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5637 Failing that, a store is issued by __atomic_store. The only way this can
5638 fail is if the bool type is larger than a word size. Unlikely, but
5639 handle it anyway for completeness. Assume a single threaded model since
5640 there is no atomic support in this case, and no barriers are required. */
5641 ret = expand_atomic_store (mem, const0_rtx, model, true);
5642 if (!ret)
5643 emit_move_insn (mem, const0_rtx);
5644 return const0_rtx;
5645 }
5646
5647 /* Expand an atomic test_and_set operation.
5648 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5649 EXP is the call expression. */
5650
5651 static rtx
5652 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5653 {
5654 rtx mem;
5655 enum memmodel model;
5656 machine_mode mode;
5657
5658 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5659 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5660 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5661
5662 return expand_atomic_test_and_set (target, mem, model);
5663 }
5664
5665
5666 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5667 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5668
5669 static tree
5670 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5671 {
5672 int size;
5673 machine_mode mode;
5674 unsigned int mode_align, type_align;
5675
5676 if (TREE_CODE (arg0) != INTEGER_CST)
5677 return NULL_TREE;
5678
5679 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5680 mode = mode_for_size (size, MODE_INT, 0);
5681 mode_align = GET_MODE_ALIGNMENT (mode);
5682
5683 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5684 type_align = mode_align;
5685 else
5686 {
5687 tree ttype = TREE_TYPE (arg1);
5688
5689 /* This function is usually invoked and folded immediately by the front
5690 end before anything else has a chance to look at it. The pointer
5691 parameter at this point is usually cast to a void *, so check for that
5692 and look past the cast. */
5693 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5694 && VOID_TYPE_P (TREE_TYPE (ttype)))
5695 arg1 = TREE_OPERAND (arg1, 0);
5696
5697 ttype = TREE_TYPE (arg1);
5698 gcc_assert (POINTER_TYPE_P (ttype));
5699
5700 /* Get the underlying type of the object. */
5701 ttype = TREE_TYPE (ttype);
5702 type_align = TYPE_ALIGN (ttype);
5703 }
5704
5705 /* If the object has smaller alignment, the the lock free routines cannot
5706 be used. */
5707 if (type_align < mode_align)
5708 return boolean_false_node;
5709
5710 /* Check if a compare_and_swap pattern exists for the mode which represents
5711 the required size. The pattern is not allowed to fail, so the existence
5712 of the pattern indicates support is present. */
5713 if (can_compare_and_swap_p (mode, true))
5714 return boolean_true_node;
5715 else
5716 return boolean_false_node;
5717 }
5718
5719 /* Return true if the parameters to call EXP represent an object which will
5720 always generate lock free instructions. The first argument represents the
5721 size of the object, and the second parameter is a pointer to the object
5722 itself. If NULL is passed for the object, then the result is based on
5723 typical alignment for an object of the specified size. Otherwise return
5724 false. */
5725
5726 static rtx
5727 expand_builtin_atomic_always_lock_free (tree exp)
5728 {
5729 tree size;
5730 tree arg0 = CALL_EXPR_ARG (exp, 0);
5731 tree arg1 = CALL_EXPR_ARG (exp, 1);
5732
5733 if (TREE_CODE (arg0) != INTEGER_CST)
5734 {
5735 error ("non-constant argument 1 to __atomic_always_lock_free");
5736 return const0_rtx;
5737 }
5738
5739 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5740 if (size == boolean_true_node)
5741 return const1_rtx;
5742 return const0_rtx;
5743 }
5744
5745 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5746 is lock free on this architecture. */
5747
5748 static tree
5749 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5750 {
5751 if (!flag_inline_atomics)
5752 return NULL_TREE;
5753
5754 /* If it isn't always lock free, don't generate a result. */
5755 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5756 return boolean_true_node;
5757
5758 return NULL_TREE;
5759 }
5760
5761 /* Return true if the parameters to call EXP represent an object which will
5762 always generate lock free instructions. The first argument represents the
5763 size of the object, and the second parameter is a pointer to the object
5764 itself. If NULL is passed for the object, then the result is based on
5765 typical alignment for an object of the specified size. Otherwise return
5766 NULL*/
5767
5768 static rtx
5769 expand_builtin_atomic_is_lock_free (tree exp)
5770 {
5771 tree size;
5772 tree arg0 = CALL_EXPR_ARG (exp, 0);
5773 tree arg1 = CALL_EXPR_ARG (exp, 1);
5774
5775 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5776 {
5777 error ("non-integer argument 1 to __atomic_is_lock_free");
5778 return NULL_RTX;
5779 }
5780
5781 if (!flag_inline_atomics)
5782 return NULL_RTX;
5783
5784 /* If the value is known at compile time, return the RTX for it. */
5785 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5786 if (size == boolean_true_node)
5787 return const1_rtx;
5788
5789 return NULL_RTX;
5790 }
5791
5792 /* Expand the __atomic_thread_fence intrinsic:
5793 void __atomic_thread_fence (enum memmodel)
5794 EXP is the CALL_EXPR. */
5795
5796 static void
5797 expand_builtin_atomic_thread_fence (tree exp)
5798 {
5799 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5800 expand_mem_thread_fence (model);
5801 }
5802
5803 /* Expand the __atomic_signal_fence intrinsic:
5804 void __atomic_signal_fence (enum memmodel)
5805 EXP is the CALL_EXPR. */
5806
5807 static void
5808 expand_builtin_atomic_signal_fence (tree exp)
5809 {
5810 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5811 expand_mem_signal_fence (model);
5812 }
5813
5814 /* Expand the __sync_synchronize intrinsic. */
5815
5816 static void
5817 expand_builtin_sync_synchronize (void)
5818 {
5819 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5820 }
5821
5822 static rtx
5823 expand_builtin_thread_pointer (tree exp, rtx target)
5824 {
5825 enum insn_code icode;
5826 if (!validate_arglist (exp, VOID_TYPE))
5827 return const0_rtx;
5828 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5829 if (icode != CODE_FOR_nothing)
5830 {
5831 struct expand_operand op;
5832 /* If the target is not sutitable then create a new target. */
5833 if (target == NULL_RTX
5834 || !REG_P (target)
5835 || GET_MODE (target) != Pmode)
5836 target = gen_reg_rtx (Pmode);
5837 create_output_operand (&op, target, Pmode);
5838 expand_insn (icode, 1, &op);
5839 return target;
5840 }
5841 error ("__builtin_thread_pointer is not supported on this target");
5842 return const0_rtx;
5843 }
5844
5845 static void
5846 expand_builtin_set_thread_pointer (tree exp)
5847 {
5848 enum insn_code icode;
5849 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5850 return;
5851 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5852 if (icode != CODE_FOR_nothing)
5853 {
5854 struct expand_operand op;
5855 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5856 Pmode, EXPAND_NORMAL);
5857 create_input_operand (&op, val, Pmode);
5858 expand_insn (icode, 1, &op);
5859 return;
5860 }
5861 error ("__builtin_set_thread_pointer is not supported on this target");
5862 }
5863
5864 \f
5865 /* Emit code to restore the current value of stack. */
5866
5867 static void
5868 expand_stack_restore (tree var)
5869 {
5870 rtx_insn *prev;
5871 rtx sa = expand_normal (var);
5872
5873 sa = convert_memory_address (Pmode, sa);
5874
5875 prev = get_last_insn ();
5876 emit_stack_restore (SAVE_BLOCK, sa);
5877
5878 record_new_stack_level ();
5879
5880 fixup_args_size_notes (prev, get_last_insn (), 0);
5881 }
5882
5883 /* Emit code to save the current value of stack. */
5884
5885 static rtx
5886 expand_stack_save (void)
5887 {
5888 rtx ret = NULL_RTX;
5889
5890 emit_stack_save (SAVE_BLOCK, &ret);
5891 return ret;
5892 }
5893
5894
5895 /* Expand OpenACC acc_on_device.
5896
5897 This has to happen late (that is, not in early folding; expand_builtin_*,
5898 rather than fold_builtin_*), as we have to act differently for host and
5899 acceleration device (ACCEL_COMPILER conditional). */
5900
5901 static rtx
5902 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5903 rtx target ATTRIBUTE_UNUSED)
5904 {
5905 #ifdef ACCEL_COMPILER
5906 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5907 return NULL_RTX;
5908
5909 tree arg = CALL_EXPR_ARG (exp, 0);
5910
5911 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5912 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5913 rtx v = expand_normal (arg), v1, v2;
5914 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5915 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5916 machine_mode target_mode = TYPE_MODE (integer_type_node);
5917 if (!target || !register_operand (target, target_mode))
5918 target = gen_reg_rtx (target_mode);
5919 emit_move_insn (target, const1_rtx);
5920 rtx_code_label *done_label = gen_label_rtx ();
5921 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5922 NULL, done_label, PROB_EVEN);
5923 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5924 NULL, done_label, PROB_EVEN);
5925 emit_move_insn (target, const0_rtx);
5926 emit_label (done_label);
5927
5928 return target;
5929 #else
5930 return NULL;
5931 #endif
5932 }
5933
5934
5935 /* Expand an expression EXP that calls a built-in function,
5936 with result going to TARGET if that's convenient
5937 (and in mode MODE if that's convenient).
5938 SUBTARGET may be used as the target for computing one of EXP's operands.
5939 IGNORE is nonzero if the value is to be ignored. */
5940
5941 rtx
5942 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5943 int ignore)
5944 {
5945 tree fndecl = get_callee_fndecl (exp);
5946 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5947 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5948 int flags;
5949
5950 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5951 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5952
5953 /* When ASan is enabled, we don't want to expand some memory/string
5954 builtins and rely on libsanitizer's hooks. This allows us to avoid
5955 redundant checks and be sure, that possible overflow will be detected
5956 by ASan. */
5957
5958 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5959 return expand_call (exp, target, ignore);
5960
5961 /* When not optimizing, generate calls to library functions for a certain
5962 set of builtins. */
5963 if (!optimize
5964 && !called_as_built_in (fndecl)
5965 && fcode != BUILT_IN_FORK
5966 && fcode != BUILT_IN_EXECL
5967 && fcode != BUILT_IN_EXECV
5968 && fcode != BUILT_IN_EXECLP
5969 && fcode != BUILT_IN_EXECLE
5970 && fcode != BUILT_IN_EXECVP
5971 && fcode != BUILT_IN_EXECVE
5972 && fcode != BUILT_IN_ALLOCA
5973 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5974 && fcode != BUILT_IN_FREE
5975 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5976 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5977 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5978 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5979 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5980 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5981 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5982 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5983 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5984 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5985 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5986 && fcode != BUILT_IN_CHKP_BNDRET)
5987 return expand_call (exp, target, ignore);
5988
5989 /* The built-in function expanders test for target == const0_rtx
5990 to determine whether the function's result will be ignored. */
5991 if (ignore)
5992 target = const0_rtx;
5993
5994 /* If the result of a pure or const built-in function is ignored, and
5995 none of its arguments are volatile, we can avoid expanding the
5996 built-in call and just evaluate the arguments for side-effects. */
5997 if (target == const0_rtx
5998 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5999 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6000 {
6001 bool volatilep = false;
6002 tree arg;
6003 call_expr_arg_iterator iter;
6004
6005 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6006 if (TREE_THIS_VOLATILE (arg))
6007 {
6008 volatilep = true;
6009 break;
6010 }
6011
6012 if (! volatilep)
6013 {
6014 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6015 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6016 return const0_rtx;
6017 }
6018 }
6019
6020 /* expand_builtin_with_bounds is supposed to be used for
6021 instrumented builtin calls. */
6022 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6023
6024 switch (fcode)
6025 {
6026 CASE_FLT_FN (BUILT_IN_FABS):
6027 case BUILT_IN_FABSD32:
6028 case BUILT_IN_FABSD64:
6029 case BUILT_IN_FABSD128:
6030 target = expand_builtin_fabs (exp, target, subtarget);
6031 if (target)
6032 return target;
6033 break;
6034
6035 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6036 target = expand_builtin_copysign (exp, target, subtarget);
6037 if (target)
6038 return target;
6039 break;
6040
6041 /* Just do a normal library call if we were unable to fold
6042 the values. */
6043 CASE_FLT_FN (BUILT_IN_CABS):
6044 break;
6045
6046 CASE_FLT_FN (BUILT_IN_EXP):
6047 CASE_FLT_FN (BUILT_IN_EXP10):
6048 CASE_FLT_FN (BUILT_IN_POW10):
6049 CASE_FLT_FN (BUILT_IN_EXP2):
6050 CASE_FLT_FN (BUILT_IN_EXPM1):
6051 CASE_FLT_FN (BUILT_IN_LOGB):
6052 CASE_FLT_FN (BUILT_IN_LOG):
6053 CASE_FLT_FN (BUILT_IN_LOG10):
6054 CASE_FLT_FN (BUILT_IN_LOG2):
6055 CASE_FLT_FN (BUILT_IN_LOG1P):
6056 CASE_FLT_FN (BUILT_IN_TAN):
6057 CASE_FLT_FN (BUILT_IN_ASIN):
6058 CASE_FLT_FN (BUILT_IN_ACOS):
6059 CASE_FLT_FN (BUILT_IN_ATAN):
6060 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6061 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6062 because of possible accuracy problems. */
6063 if (! flag_unsafe_math_optimizations)
6064 break;
6065 CASE_FLT_FN (BUILT_IN_SQRT):
6066 CASE_FLT_FN (BUILT_IN_FLOOR):
6067 CASE_FLT_FN (BUILT_IN_CEIL):
6068 CASE_FLT_FN (BUILT_IN_TRUNC):
6069 CASE_FLT_FN (BUILT_IN_ROUND):
6070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6071 CASE_FLT_FN (BUILT_IN_RINT):
6072 target = expand_builtin_mathfn (exp, target, subtarget);
6073 if (target)
6074 return target;
6075 break;
6076
6077 CASE_FLT_FN (BUILT_IN_FMA):
6078 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6079 if (target)
6080 return target;
6081 break;
6082
6083 CASE_FLT_FN (BUILT_IN_ILOGB):
6084 if (! flag_unsafe_math_optimizations)
6085 break;
6086 CASE_FLT_FN (BUILT_IN_ISINF):
6087 CASE_FLT_FN (BUILT_IN_FINITE):
6088 case BUILT_IN_ISFINITE:
6089 case BUILT_IN_ISNORMAL:
6090 target = expand_builtin_interclass_mathfn (exp, target);
6091 if (target)
6092 return target;
6093 break;
6094
6095 CASE_FLT_FN (BUILT_IN_ICEIL):
6096 CASE_FLT_FN (BUILT_IN_LCEIL):
6097 CASE_FLT_FN (BUILT_IN_LLCEIL):
6098 CASE_FLT_FN (BUILT_IN_LFLOOR):
6099 CASE_FLT_FN (BUILT_IN_IFLOOR):
6100 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6101 target = expand_builtin_int_roundingfn (exp, target);
6102 if (target)
6103 return target;
6104 break;
6105
6106 CASE_FLT_FN (BUILT_IN_IRINT):
6107 CASE_FLT_FN (BUILT_IN_LRINT):
6108 CASE_FLT_FN (BUILT_IN_LLRINT):
6109 CASE_FLT_FN (BUILT_IN_IROUND):
6110 CASE_FLT_FN (BUILT_IN_LROUND):
6111 CASE_FLT_FN (BUILT_IN_LLROUND):
6112 target = expand_builtin_int_roundingfn_2 (exp, target);
6113 if (target)
6114 return target;
6115 break;
6116
6117 CASE_FLT_FN (BUILT_IN_POWI):
6118 target = expand_builtin_powi (exp, target);
6119 if (target)
6120 return target;
6121 break;
6122
6123 CASE_FLT_FN (BUILT_IN_ATAN2):
6124 CASE_FLT_FN (BUILT_IN_LDEXP):
6125 CASE_FLT_FN (BUILT_IN_SCALB):
6126 CASE_FLT_FN (BUILT_IN_SCALBN):
6127 CASE_FLT_FN (BUILT_IN_SCALBLN):
6128 if (! flag_unsafe_math_optimizations)
6129 break;
6130
6131 CASE_FLT_FN (BUILT_IN_FMOD):
6132 CASE_FLT_FN (BUILT_IN_REMAINDER):
6133 CASE_FLT_FN (BUILT_IN_DREM):
6134 CASE_FLT_FN (BUILT_IN_POW):
6135 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6136 if (target)
6137 return target;
6138 break;
6139
6140 CASE_FLT_FN (BUILT_IN_CEXPI):
6141 target = expand_builtin_cexpi (exp, target);
6142 gcc_assert (target);
6143 return target;
6144
6145 CASE_FLT_FN (BUILT_IN_SIN):
6146 CASE_FLT_FN (BUILT_IN_COS):
6147 if (! flag_unsafe_math_optimizations)
6148 break;
6149 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6150 if (target)
6151 return target;
6152 break;
6153
6154 CASE_FLT_FN (BUILT_IN_SINCOS):
6155 if (! flag_unsafe_math_optimizations)
6156 break;
6157 target = expand_builtin_sincos (exp);
6158 if (target)
6159 return target;
6160 break;
6161
6162 case BUILT_IN_APPLY_ARGS:
6163 return expand_builtin_apply_args ();
6164
6165 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6166 FUNCTION with a copy of the parameters described by
6167 ARGUMENTS, and ARGSIZE. It returns a block of memory
6168 allocated on the stack into which is stored all the registers
6169 that might possibly be used for returning the result of a
6170 function. ARGUMENTS is the value returned by
6171 __builtin_apply_args. ARGSIZE is the number of bytes of
6172 arguments that must be copied. ??? How should this value be
6173 computed? We'll also need a safe worst case value for varargs
6174 functions. */
6175 case BUILT_IN_APPLY:
6176 if (!validate_arglist (exp, POINTER_TYPE,
6177 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6178 && !validate_arglist (exp, REFERENCE_TYPE,
6179 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6180 return const0_rtx;
6181 else
6182 {
6183 rtx ops[3];
6184
6185 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6186 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6187 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6188
6189 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6190 }
6191
6192 /* __builtin_return (RESULT) causes the function to return the
6193 value described by RESULT. RESULT is address of the block of
6194 memory returned by __builtin_apply. */
6195 case BUILT_IN_RETURN:
6196 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6197 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6198 return const0_rtx;
6199
6200 case BUILT_IN_SAVEREGS:
6201 return expand_builtin_saveregs ();
6202
6203 case BUILT_IN_VA_ARG_PACK:
6204 /* All valid uses of __builtin_va_arg_pack () are removed during
6205 inlining. */
6206 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6207 return const0_rtx;
6208
6209 case BUILT_IN_VA_ARG_PACK_LEN:
6210 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6211 inlining. */
6212 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6213 return const0_rtx;
6214
6215 /* Return the address of the first anonymous stack arg. */
6216 case BUILT_IN_NEXT_ARG:
6217 if (fold_builtin_next_arg (exp, false))
6218 return const0_rtx;
6219 return expand_builtin_next_arg ();
6220
6221 case BUILT_IN_CLEAR_CACHE:
6222 target = expand_builtin___clear_cache (exp);
6223 if (target)
6224 return target;
6225 break;
6226
6227 case BUILT_IN_CLASSIFY_TYPE:
6228 return expand_builtin_classify_type (exp);
6229
6230 case BUILT_IN_CONSTANT_P:
6231 return const0_rtx;
6232
6233 case BUILT_IN_FRAME_ADDRESS:
6234 case BUILT_IN_RETURN_ADDRESS:
6235 return expand_builtin_frame_address (fndecl, exp);
6236
6237 /* Returns the address of the area where the structure is returned.
6238 0 otherwise. */
6239 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6240 if (call_expr_nargs (exp) != 0
6241 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6242 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6243 return const0_rtx;
6244 else
6245 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6246
6247 case BUILT_IN_ALLOCA:
6248 case BUILT_IN_ALLOCA_WITH_ALIGN:
6249 /* If the allocation stems from the declaration of a variable-sized
6250 object, it cannot accumulate. */
6251 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6252 if (target)
6253 return target;
6254 break;
6255
6256 case BUILT_IN_STACK_SAVE:
6257 return expand_stack_save ();
6258
6259 case BUILT_IN_STACK_RESTORE:
6260 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6261 return const0_rtx;
6262
6263 case BUILT_IN_BSWAP16:
6264 case BUILT_IN_BSWAP32:
6265 case BUILT_IN_BSWAP64:
6266 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6267 if (target)
6268 return target;
6269 break;
6270
6271 CASE_INT_FN (BUILT_IN_FFS):
6272 target = expand_builtin_unop (target_mode, exp, target,
6273 subtarget, ffs_optab);
6274 if (target)
6275 return target;
6276 break;
6277
6278 CASE_INT_FN (BUILT_IN_CLZ):
6279 target = expand_builtin_unop (target_mode, exp, target,
6280 subtarget, clz_optab);
6281 if (target)
6282 return target;
6283 break;
6284
6285 CASE_INT_FN (BUILT_IN_CTZ):
6286 target = expand_builtin_unop (target_mode, exp, target,
6287 subtarget, ctz_optab);
6288 if (target)
6289 return target;
6290 break;
6291
6292 CASE_INT_FN (BUILT_IN_CLRSB):
6293 target = expand_builtin_unop (target_mode, exp, target,
6294 subtarget, clrsb_optab);
6295 if (target)
6296 return target;
6297 break;
6298
6299 CASE_INT_FN (BUILT_IN_POPCOUNT):
6300 target = expand_builtin_unop (target_mode, exp, target,
6301 subtarget, popcount_optab);
6302 if (target)
6303 return target;
6304 break;
6305
6306 CASE_INT_FN (BUILT_IN_PARITY):
6307 target = expand_builtin_unop (target_mode, exp, target,
6308 subtarget, parity_optab);
6309 if (target)
6310 return target;
6311 break;
6312
6313 case BUILT_IN_STRLEN:
6314 target = expand_builtin_strlen (exp, target, target_mode);
6315 if (target)
6316 return target;
6317 break;
6318
6319 case BUILT_IN_STRCPY:
6320 target = expand_builtin_strcpy (exp, target);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_STRNCPY:
6326 target = expand_builtin_strncpy (exp, target);
6327 if (target)
6328 return target;
6329 break;
6330
6331 case BUILT_IN_STPCPY:
6332 target = expand_builtin_stpcpy (exp, target, mode);
6333 if (target)
6334 return target;
6335 break;
6336
6337 case BUILT_IN_MEMCPY:
6338 target = expand_builtin_memcpy (exp, target);
6339 if (target)
6340 return target;
6341 break;
6342
6343 case BUILT_IN_MEMPCPY:
6344 target = expand_builtin_mempcpy (exp, target, mode);
6345 if (target)
6346 return target;
6347 break;
6348
6349 case BUILT_IN_MEMSET:
6350 target = expand_builtin_memset (exp, target, mode);
6351 if (target)
6352 return target;
6353 break;
6354
6355 case BUILT_IN_BZERO:
6356 target = expand_builtin_bzero (exp);
6357 if (target)
6358 return target;
6359 break;
6360
6361 case BUILT_IN_STRCMP:
6362 target = expand_builtin_strcmp (exp, target);
6363 if (target)
6364 return target;
6365 break;
6366
6367 case BUILT_IN_STRNCMP:
6368 target = expand_builtin_strncmp (exp, target, mode);
6369 if (target)
6370 return target;
6371 break;
6372
6373 case BUILT_IN_BCMP:
6374 case BUILT_IN_MEMCMP:
6375 target = expand_builtin_memcmp (exp, target, mode);
6376 if (target)
6377 return target;
6378 break;
6379
6380 case BUILT_IN_SETJMP:
6381 /* This should have been lowered to the builtins below. */
6382 gcc_unreachable ();
6383
6384 case BUILT_IN_SETJMP_SETUP:
6385 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6386 and the receiver label. */
6387 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6388 {
6389 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6390 VOIDmode, EXPAND_NORMAL);
6391 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6392 rtx_insn *label_r = label_rtx (label);
6393
6394 /* This is copied from the handling of non-local gotos. */
6395 expand_builtin_setjmp_setup (buf_addr, label_r);
6396 nonlocal_goto_handler_labels
6397 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6398 nonlocal_goto_handler_labels);
6399 /* ??? Do not let expand_label treat us as such since we would
6400 not want to be both on the list of non-local labels and on
6401 the list of forced labels. */
6402 FORCED_LABEL (label) = 0;
6403 return const0_rtx;
6404 }
6405 break;
6406
6407 case BUILT_IN_SETJMP_RECEIVER:
6408 /* __builtin_setjmp_receiver is passed the receiver label. */
6409 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6410 {
6411 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6412 rtx_insn *label_r = label_rtx (label);
6413
6414 expand_builtin_setjmp_receiver (label_r);
6415 return const0_rtx;
6416 }
6417 break;
6418
6419 /* __builtin_longjmp is passed a pointer to an array of five words.
6420 It's similar to the C library longjmp function but works with
6421 __builtin_setjmp above. */
6422 case BUILT_IN_LONGJMP:
6423 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6424 {
6425 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6426 VOIDmode, EXPAND_NORMAL);
6427 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6428
6429 if (value != const1_rtx)
6430 {
6431 error ("%<__builtin_longjmp%> second argument must be 1");
6432 return const0_rtx;
6433 }
6434
6435 expand_builtin_longjmp (buf_addr, value);
6436 return const0_rtx;
6437 }
6438 break;
6439
6440 case BUILT_IN_NONLOCAL_GOTO:
6441 target = expand_builtin_nonlocal_goto (exp);
6442 if (target)
6443 return target;
6444 break;
6445
6446 /* This updates the setjmp buffer that is its argument with the value
6447 of the current stack pointer. */
6448 case BUILT_IN_UPDATE_SETJMP_BUF:
6449 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6450 {
6451 rtx buf_addr
6452 = expand_normal (CALL_EXPR_ARG (exp, 0));
6453
6454 expand_builtin_update_setjmp_buf (buf_addr);
6455 return const0_rtx;
6456 }
6457 break;
6458
6459 case BUILT_IN_TRAP:
6460 expand_builtin_trap ();
6461 return const0_rtx;
6462
6463 case BUILT_IN_UNREACHABLE:
6464 expand_builtin_unreachable ();
6465 return const0_rtx;
6466
6467 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6468 case BUILT_IN_SIGNBITD32:
6469 case BUILT_IN_SIGNBITD64:
6470 case BUILT_IN_SIGNBITD128:
6471 target = expand_builtin_signbit (exp, target);
6472 if (target)
6473 return target;
6474 break;
6475
6476 /* Various hooks for the DWARF 2 __throw routine. */
6477 case BUILT_IN_UNWIND_INIT:
6478 expand_builtin_unwind_init ();
6479 return const0_rtx;
6480 case BUILT_IN_DWARF_CFA:
6481 return virtual_cfa_rtx;
6482 #ifdef DWARF2_UNWIND_INFO
6483 case BUILT_IN_DWARF_SP_COLUMN:
6484 return expand_builtin_dwarf_sp_column ();
6485 case BUILT_IN_INIT_DWARF_REG_SIZES:
6486 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6487 return const0_rtx;
6488 #endif
6489 case BUILT_IN_FROB_RETURN_ADDR:
6490 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6491 case BUILT_IN_EXTRACT_RETURN_ADDR:
6492 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6493 case BUILT_IN_EH_RETURN:
6494 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6495 CALL_EXPR_ARG (exp, 1));
6496 return const0_rtx;
6497 case BUILT_IN_EH_RETURN_DATA_REGNO:
6498 return expand_builtin_eh_return_data_regno (exp);
6499 case BUILT_IN_EXTEND_POINTER:
6500 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6501 case BUILT_IN_EH_POINTER:
6502 return expand_builtin_eh_pointer (exp);
6503 case BUILT_IN_EH_FILTER:
6504 return expand_builtin_eh_filter (exp);
6505 case BUILT_IN_EH_COPY_VALUES:
6506 return expand_builtin_eh_copy_values (exp);
6507
6508 case BUILT_IN_VA_START:
6509 return expand_builtin_va_start (exp);
6510 case BUILT_IN_VA_END:
6511 return expand_builtin_va_end (exp);
6512 case BUILT_IN_VA_COPY:
6513 return expand_builtin_va_copy (exp);
6514 case BUILT_IN_EXPECT:
6515 return expand_builtin_expect (exp, target);
6516 case BUILT_IN_ASSUME_ALIGNED:
6517 return expand_builtin_assume_aligned (exp, target);
6518 case BUILT_IN_PREFETCH:
6519 expand_builtin_prefetch (exp);
6520 return const0_rtx;
6521
6522 case BUILT_IN_INIT_TRAMPOLINE:
6523 return expand_builtin_init_trampoline (exp, true);
6524 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6525 return expand_builtin_init_trampoline (exp, false);
6526 case BUILT_IN_ADJUST_TRAMPOLINE:
6527 return expand_builtin_adjust_trampoline (exp);
6528
6529 case BUILT_IN_FORK:
6530 case BUILT_IN_EXECL:
6531 case BUILT_IN_EXECV:
6532 case BUILT_IN_EXECLP:
6533 case BUILT_IN_EXECLE:
6534 case BUILT_IN_EXECVP:
6535 case BUILT_IN_EXECVE:
6536 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6537 if (target)
6538 return target;
6539 break;
6540
6541 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6542 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6543 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6544 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6545 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6546 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6547 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6548 if (target)
6549 return target;
6550 break;
6551
6552 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6553 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6554 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6555 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6556 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6558 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6559 if (target)
6560 return target;
6561 break;
6562
6563 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6564 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6565 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6566 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6567 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6568 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6569 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6570 if (target)
6571 return target;
6572 break;
6573
6574 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6575 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6576 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6577 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6578 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6580 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6581 if (target)
6582 return target;
6583 break;
6584
6585 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6586 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6587 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6588 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6589 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6591 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6592 if (target)
6593 return target;
6594 break;
6595
6596 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6597 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6598 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6599 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6600 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6601 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6602 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6603 if (target)
6604 return target;
6605 break;
6606
6607 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6608 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6609 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6610 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6611 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6612 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6613 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6614 if (target)
6615 return target;
6616 break;
6617
6618 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6619 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6620 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6621 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6622 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6623 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6624 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6625 if (target)
6626 return target;
6627 break;
6628
6629 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6630 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6631 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6632 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6633 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6634 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6635 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6636 if (target)
6637 return target;
6638 break;
6639
6640 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6641 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6642 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6643 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6644 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6645 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6646 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6647 if (target)
6648 return target;
6649 break;
6650
6651 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6652 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6653 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6654 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6655 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6656 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6657 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6658 if (target)
6659 return target;
6660 break;
6661
6662 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6663 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6664 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6665 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6666 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6668 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6669 if (target)
6670 return target;
6671 break;
6672
6673 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6674 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6675 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6676 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6677 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6678 if (mode == VOIDmode)
6679 mode = TYPE_MODE (boolean_type_node);
6680 if (!target || !register_operand (target, mode))
6681 target = gen_reg_rtx (mode);
6682
6683 mode = get_builtin_sync_mode
6684 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6685 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6686 if (target)
6687 return target;
6688 break;
6689
6690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6693 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6694 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6695 mode = get_builtin_sync_mode
6696 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6697 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6698 if (target)
6699 return target;
6700 break;
6701
6702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6706 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6707 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6708 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6709 if (target)
6710 return target;
6711 break;
6712
6713 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6714 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6715 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6716 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6717 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6719 expand_builtin_sync_lock_release (mode, exp);
6720 return const0_rtx;
6721
6722 case BUILT_IN_SYNC_SYNCHRONIZE:
6723 expand_builtin_sync_synchronize ();
6724 return const0_rtx;
6725
6726 case BUILT_IN_ATOMIC_EXCHANGE_1:
6727 case BUILT_IN_ATOMIC_EXCHANGE_2:
6728 case BUILT_IN_ATOMIC_EXCHANGE_4:
6729 case BUILT_IN_ATOMIC_EXCHANGE_8:
6730 case BUILT_IN_ATOMIC_EXCHANGE_16:
6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6732 target = expand_builtin_atomic_exchange (mode, exp, target);
6733 if (target)
6734 return target;
6735 break;
6736
6737 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6738 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6739 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6740 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6741 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6742 {
6743 unsigned int nargs, z;
6744 vec<tree, va_gc> *vec;
6745
6746 mode =
6747 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6748 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6749 if (target)
6750 return target;
6751
6752 /* If this is turned into an external library call, the weak parameter
6753 must be dropped to match the expected parameter list. */
6754 nargs = call_expr_nargs (exp);
6755 vec_alloc (vec, nargs - 1);
6756 for (z = 0; z < 3; z++)
6757 vec->quick_push (CALL_EXPR_ARG (exp, z));
6758 /* Skip the boolean weak parameter. */
6759 for (z = 4; z < 6; z++)
6760 vec->quick_push (CALL_EXPR_ARG (exp, z));
6761 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6762 break;
6763 }
6764
6765 case BUILT_IN_ATOMIC_LOAD_1:
6766 case BUILT_IN_ATOMIC_LOAD_2:
6767 case BUILT_IN_ATOMIC_LOAD_4:
6768 case BUILT_IN_ATOMIC_LOAD_8:
6769 case BUILT_IN_ATOMIC_LOAD_16:
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6771 target = expand_builtin_atomic_load (mode, exp, target);
6772 if (target)
6773 return target;
6774 break;
6775
6776 case BUILT_IN_ATOMIC_STORE_1:
6777 case BUILT_IN_ATOMIC_STORE_2:
6778 case BUILT_IN_ATOMIC_STORE_4:
6779 case BUILT_IN_ATOMIC_STORE_8:
6780 case BUILT_IN_ATOMIC_STORE_16:
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6782 target = expand_builtin_atomic_store (mode, exp);
6783 if (target)
6784 return const0_rtx;
6785 break;
6786
6787 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6788 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6789 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6790 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6791 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6792 {
6793 enum built_in_function lib;
6794 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6795 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6796 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6797 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6798 ignore, lib);
6799 if (target)
6800 return target;
6801 break;
6802 }
6803 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6804 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6805 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6806 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6807 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6808 {
6809 enum built_in_function lib;
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6811 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6812 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6813 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6814 ignore, lib);
6815 if (target)
6816 return target;
6817 break;
6818 }
6819 case BUILT_IN_ATOMIC_AND_FETCH_1:
6820 case BUILT_IN_ATOMIC_AND_FETCH_2:
6821 case BUILT_IN_ATOMIC_AND_FETCH_4:
6822 case BUILT_IN_ATOMIC_AND_FETCH_8:
6823 case BUILT_IN_ATOMIC_AND_FETCH_16:
6824 {
6825 enum built_in_function lib;
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6827 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6828 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6829 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6830 ignore, lib);
6831 if (target)
6832 return target;
6833 break;
6834 }
6835 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6836 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6837 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6838 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6839 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6840 {
6841 enum built_in_function lib;
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6843 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6844 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6845 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6846 ignore, lib);
6847 if (target)
6848 return target;
6849 break;
6850 }
6851 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6852 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6853 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6854 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6855 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6856 {
6857 enum built_in_function lib;
6858 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6859 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6860 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6861 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6862 ignore, lib);
6863 if (target)
6864 return target;
6865 break;
6866 }
6867 case BUILT_IN_ATOMIC_OR_FETCH_1:
6868 case BUILT_IN_ATOMIC_OR_FETCH_2:
6869 case BUILT_IN_ATOMIC_OR_FETCH_4:
6870 case BUILT_IN_ATOMIC_OR_FETCH_8:
6871 case BUILT_IN_ATOMIC_OR_FETCH_16:
6872 {
6873 enum built_in_function lib;
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6875 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6876 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6877 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6878 ignore, lib);
6879 if (target)
6880 return target;
6881 break;
6882 }
6883 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6884 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6885 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6886 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6887 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6888 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6889 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6890 ignore, BUILT_IN_NONE);
6891 if (target)
6892 return target;
6893 break;
6894
6895 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6896 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6897 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6898 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6899 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6900 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6901 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6902 ignore, BUILT_IN_NONE);
6903 if (target)
6904 return target;
6905 break;
6906
6907 case BUILT_IN_ATOMIC_FETCH_AND_1:
6908 case BUILT_IN_ATOMIC_FETCH_AND_2:
6909 case BUILT_IN_ATOMIC_FETCH_AND_4:
6910 case BUILT_IN_ATOMIC_FETCH_AND_8:
6911 case BUILT_IN_ATOMIC_FETCH_AND_16:
6912 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6913 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6914 ignore, BUILT_IN_NONE);
6915 if (target)
6916 return target;
6917 break;
6918
6919 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6920 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6921 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6922 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6923 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6924 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6925 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6926 ignore, BUILT_IN_NONE);
6927 if (target)
6928 return target;
6929 break;
6930
6931 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6932 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6933 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6934 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6935 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6936 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6937 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6938 ignore, BUILT_IN_NONE);
6939 if (target)
6940 return target;
6941 break;
6942
6943 case BUILT_IN_ATOMIC_FETCH_OR_1:
6944 case BUILT_IN_ATOMIC_FETCH_OR_2:
6945 case BUILT_IN_ATOMIC_FETCH_OR_4:
6946 case BUILT_IN_ATOMIC_FETCH_OR_8:
6947 case BUILT_IN_ATOMIC_FETCH_OR_16:
6948 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6949 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6950 ignore, BUILT_IN_NONE);
6951 if (target)
6952 return target;
6953 break;
6954
6955 case BUILT_IN_ATOMIC_TEST_AND_SET:
6956 return expand_builtin_atomic_test_and_set (exp, target);
6957
6958 case BUILT_IN_ATOMIC_CLEAR:
6959 return expand_builtin_atomic_clear (exp);
6960
6961 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6962 return expand_builtin_atomic_always_lock_free (exp);
6963
6964 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6965 target = expand_builtin_atomic_is_lock_free (exp);
6966 if (target)
6967 return target;
6968 break;
6969
6970 case BUILT_IN_ATOMIC_THREAD_FENCE:
6971 expand_builtin_atomic_thread_fence (exp);
6972 return const0_rtx;
6973
6974 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6975 expand_builtin_atomic_signal_fence (exp);
6976 return const0_rtx;
6977
6978 case BUILT_IN_OBJECT_SIZE:
6979 return expand_builtin_object_size (exp);
6980
6981 case BUILT_IN_MEMCPY_CHK:
6982 case BUILT_IN_MEMPCPY_CHK:
6983 case BUILT_IN_MEMMOVE_CHK:
6984 case BUILT_IN_MEMSET_CHK:
6985 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6986 if (target)
6987 return target;
6988 break;
6989
6990 case BUILT_IN_STRCPY_CHK:
6991 case BUILT_IN_STPCPY_CHK:
6992 case BUILT_IN_STRNCPY_CHK:
6993 case BUILT_IN_STPNCPY_CHK:
6994 case BUILT_IN_STRCAT_CHK:
6995 case BUILT_IN_STRNCAT_CHK:
6996 case BUILT_IN_SNPRINTF_CHK:
6997 case BUILT_IN_VSNPRINTF_CHK:
6998 maybe_emit_chk_warning (exp, fcode);
6999 break;
7000
7001 case BUILT_IN_SPRINTF_CHK:
7002 case BUILT_IN_VSPRINTF_CHK:
7003 maybe_emit_sprintf_chk_warning (exp, fcode);
7004 break;
7005
7006 case BUILT_IN_FREE:
7007 if (warn_free_nonheap_object)
7008 maybe_emit_free_warning (exp);
7009 break;
7010
7011 case BUILT_IN_THREAD_POINTER:
7012 return expand_builtin_thread_pointer (exp, target);
7013
7014 case BUILT_IN_SET_THREAD_POINTER:
7015 expand_builtin_set_thread_pointer (exp);
7016 return const0_rtx;
7017
7018 case BUILT_IN_CILK_DETACH:
7019 expand_builtin_cilk_detach (exp);
7020 return const0_rtx;
7021
7022 case BUILT_IN_CILK_POP_FRAME:
7023 expand_builtin_cilk_pop_frame (exp);
7024 return const0_rtx;
7025
7026 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7027 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7028 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7029 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7030 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7031 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7032 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7033 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7034 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7035 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7036 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7037 /* We allow user CHKP builtins if Pointer Bounds
7038 Checker is off. */
7039 if (!chkp_function_instrumented_p (current_function_decl))
7040 {
7041 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7042 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7043 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7044 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7045 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7046 return expand_normal (CALL_EXPR_ARG (exp, 0));
7047 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7048 return expand_normal (size_zero_node);
7049 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7050 return expand_normal (size_int (-1));
7051 else
7052 return const0_rtx;
7053 }
7054 /* FALLTHROUGH */
7055
7056 case BUILT_IN_CHKP_BNDMK:
7057 case BUILT_IN_CHKP_BNDSTX:
7058 case BUILT_IN_CHKP_BNDCL:
7059 case BUILT_IN_CHKP_BNDCU:
7060 case BUILT_IN_CHKP_BNDLDX:
7061 case BUILT_IN_CHKP_BNDRET:
7062 case BUILT_IN_CHKP_INTERSECT:
7063 case BUILT_IN_CHKP_NARROW:
7064 case BUILT_IN_CHKP_EXTRACT_LOWER:
7065 case BUILT_IN_CHKP_EXTRACT_UPPER:
7066 /* Software implementation of Pointer Bounds Checker is NYI.
7067 Target support is required. */
7068 error ("Your target platform does not support -fcheck-pointer-bounds");
7069 break;
7070
7071 case BUILT_IN_ACC_ON_DEVICE:
7072 target = expand_builtin_acc_on_device (exp, target);
7073 if (target)
7074 return target;
7075 break;
7076
7077 default: /* just do library call, if unknown builtin */
7078 break;
7079 }
7080
7081 /* The switch statement above can drop through to cause the function
7082 to be called normally. */
7083 return expand_call (exp, target, ignore);
7084 }
7085
7086 /* Similar to expand_builtin but is used for instrumented calls. */
7087
7088 rtx
7089 expand_builtin_with_bounds (tree exp, rtx target,
7090 rtx subtarget ATTRIBUTE_UNUSED,
7091 machine_mode mode, int ignore)
7092 {
7093 tree fndecl = get_callee_fndecl (exp);
7094 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7095
7096 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7097
7098 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7099 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7100
7101 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7102 && fcode < END_CHKP_BUILTINS);
7103
7104 switch (fcode)
7105 {
7106 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7107 target = expand_builtin_memcpy_with_bounds (exp, target);
7108 if (target)
7109 return target;
7110 break;
7111
7112 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7113 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7114 if (target)
7115 return target;
7116 break;
7117
7118 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7119 target = expand_builtin_memset_with_bounds (exp, target, mode);
7120 if (target)
7121 return target;
7122 break;
7123
7124 default:
7125 break;
7126 }
7127
7128 /* The switch statement above can drop through to cause the function
7129 to be called normally. */
7130 return expand_call (exp, target, ignore);
7131 }
7132
7133 /* Determine whether a tree node represents a call to a built-in
7134 function. If the tree T is a call to a built-in function with
7135 the right number of arguments of the appropriate types, return
7136 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7137 Otherwise the return value is END_BUILTINS. */
7138
7139 enum built_in_function
7140 builtin_mathfn_code (const_tree t)
7141 {
7142 const_tree fndecl, arg, parmlist;
7143 const_tree argtype, parmtype;
7144 const_call_expr_arg_iterator iter;
7145
7146 if (TREE_CODE (t) != CALL_EXPR
7147 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7148 return END_BUILTINS;
7149
7150 fndecl = get_callee_fndecl (t);
7151 if (fndecl == NULL_TREE
7152 || TREE_CODE (fndecl) != FUNCTION_DECL
7153 || ! DECL_BUILT_IN (fndecl)
7154 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7155 return END_BUILTINS;
7156
7157 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7158 init_const_call_expr_arg_iterator (t, &iter);
7159 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7160 {
7161 /* If a function doesn't take a variable number of arguments,
7162 the last element in the list will have type `void'. */
7163 parmtype = TREE_VALUE (parmlist);
7164 if (VOID_TYPE_P (parmtype))
7165 {
7166 if (more_const_call_expr_args_p (&iter))
7167 return END_BUILTINS;
7168 return DECL_FUNCTION_CODE (fndecl);
7169 }
7170
7171 if (! more_const_call_expr_args_p (&iter))
7172 return END_BUILTINS;
7173
7174 arg = next_const_call_expr_arg (&iter);
7175 argtype = TREE_TYPE (arg);
7176
7177 if (SCALAR_FLOAT_TYPE_P (parmtype))
7178 {
7179 if (! SCALAR_FLOAT_TYPE_P (argtype))
7180 return END_BUILTINS;
7181 }
7182 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7183 {
7184 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7185 return END_BUILTINS;
7186 }
7187 else if (POINTER_TYPE_P (parmtype))
7188 {
7189 if (! POINTER_TYPE_P (argtype))
7190 return END_BUILTINS;
7191 }
7192 else if (INTEGRAL_TYPE_P (parmtype))
7193 {
7194 if (! INTEGRAL_TYPE_P (argtype))
7195 return END_BUILTINS;
7196 }
7197 else
7198 return END_BUILTINS;
7199 }
7200
7201 /* Variable-length argument list. */
7202 return DECL_FUNCTION_CODE (fndecl);
7203 }
7204
7205 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7206 evaluate to a constant. */
7207
7208 static tree
7209 fold_builtin_constant_p (tree arg)
7210 {
7211 /* We return 1 for a numeric type that's known to be a constant
7212 value at compile-time or for an aggregate type that's a
7213 literal constant. */
7214 STRIP_NOPS (arg);
7215
7216 /* If we know this is a constant, emit the constant of one. */
7217 if (CONSTANT_CLASS_P (arg)
7218 || (TREE_CODE (arg) == CONSTRUCTOR
7219 && TREE_CONSTANT (arg)))
7220 return integer_one_node;
7221 if (TREE_CODE (arg) == ADDR_EXPR)
7222 {
7223 tree op = TREE_OPERAND (arg, 0);
7224 if (TREE_CODE (op) == STRING_CST
7225 || (TREE_CODE (op) == ARRAY_REF
7226 && integer_zerop (TREE_OPERAND (op, 1))
7227 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7228 return integer_one_node;
7229 }
7230
7231 /* If this expression has side effects, show we don't know it to be a
7232 constant. Likewise if it's a pointer or aggregate type since in
7233 those case we only want literals, since those are only optimized
7234 when generating RTL, not later.
7235 And finally, if we are compiling an initializer, not code, we
7236 need to return a definite result now; there's not going to be any
7237 more optimization done. */
7238 if (TREE_SIDE_EFFECTS (arg)
7239 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7240 || POINTER_TYPE_P (TREE_TYPE (arg))
7241 || cfun == 0
7242 || folding_initializer
7243 || force_folding_builtin_constant_p)
7244 return integer_zero_node;
7245
7246 return NULL_TREE;
7247 }
7248
7249 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7250 return it as a truthvalue. */
7251
7252 static tree
7253 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7254 tree predictor)
7255 {
7256 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7257
7258 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7259 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7260 ret_type = TREE_TYPE (TREE_TYPE (fn));
7261 pred_type = TREE_VALUE (arg_types);
7262 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7263
7264 pred = fold_convert_loc (loc, pred_type, pred);
7265 expected = fold_convert_loc (loc, expected_type, expected);
7266 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7267 predictor);
7268
7269 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7270 build_int_cst (ret_type, 0));
7271 }
7272
7273 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7274 NULL_TREE if no simplification is possible. */
7275
7276 tree
7277 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7278 {
7279 tree inner, fndecl, inner_arg0;
7280 enum tree_code code;
7281
7282 /* Distribute the expected value over short-circuiting operators.
7283 See through the cast from truthvalue_type_node to long. */
7284 inner_arg0 = arg0;
7285 while (CONVERT_EXPR_P (inner_arg0)
7286 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7287 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7288 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7289
7290 /* If this is a builtin_expect within a builtin_expect keep the
7291 inner one. See through a comparison against a constant. It
7292 might have been added to create a thruthvalue. */
7293 inner = inner_arg0;
7294
7295 if (COMPARISON_CLASS_P (inner)
7296 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7297 inner = TREE_OPERAND (inner, 0);
7298
7299 if (TREE_CODE (inner) == CALL_EXPR
7300 && (fndecl = get_callee_fndecl (inner))
7301 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7302 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7303 return arg0;
7304
7305 inner = inner_arg0;
7306 code = TREE_CODE (inner);
7307 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7308 {
7309 tree op0 = TREE_OPERAND (inner, 0);
7310 tree op1 = TREE_OPERAND (inner, 1);
7311
7312 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7313 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7314 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7315
7316 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7317 }
7318
7319 /* If the argument isn't invariant then there's nothing else we can do. */
7320 if (!TREE_CONSTANT (inner_arg0))
7321 return NULL_TREE;
7322
7323 /* If we expect that a comparison against the argument will fold to
7324 a constant return the constant. In practice, this means a true
7325 constant or the address of a non-weak symbol. */
7326 inner = inner_arg0;
7327 STRIP_NOPS (inner);
7328 if (TREE_CODE (inner) == ADDR_EXPR)
7329 {
7330 do
7331 {
7332 inner = TREE_OPERAND (inner, 0);
7333 }
7334 while (TREE_CODE (inner) == COMPONENT_REF
7335 || TREE_CODE (inner) == ARRAY_REF);
7336 if ((TREE_CODE (inner) == VAR_DECL
7337 || TREE_CODE (inner) == FUNCTION_DECL)
7338 && DECL_WEAK (inner))
7339 return NULL_TREE;
7340 }
7341
7342 /* Otherwise, ARG0 already has the proper type for the return value. */
7343 return arg0;
7344 }
7345
7346 /* Fold a call to __builtin_classify_type with argument ARG. */
7347
7348 static tree
7349 fold_builtin_classify_type (tree arg)
7350 {
7351 if (arg == 0)
7352 return build_int_cst (integer_type_node, no_type_class);
7353
7354 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7355 }
7356
7357 /* Fold a call to __builtin_strlen with argument ARG. */
7358
7359 static tree
7360 fold_builtin_strlen (location_t loc, tree type, tree arg)
7361 {
7362 if (!validate_arg (arg, POINTER_TYPE))
7363 return NULL_TREE;
7364 else
7365 {
7366 tree len = c_strlen (arg, 0);
7367
7368 if (len)
7369 return fold_convert_loc (loc, type, len);
7370
7371 return NULL_TREE;
7372 }
7373 }
7374
7375 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7376
7377 static tree
7378 fold_builtin_inf (location_t loc, tree type, int warn)
7379 {
7380 REAL_VALUE_TYPE real;
7381
7382 /* __builtin_inff is intended to be usable to define INFINITY on all
7383 targets. If an infinity is not available, INFINITY expands "to a
7384 positive constant of type float that overflows at translation
7385 time", footnote "In this case, using INFINITY will violate the
7386 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7387 Thus we pedwarn to ensure this constraint violation is
7388 diagnosed. */
7389 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7390 pedwarn (loc, 0, "target format does not support infinity");
7391
7392 real_inf (&real);
7393 return build_real (type, real);
7394 }
7395
7396 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7397
7398 static tree
7399 fold_builtin_nan (tree arg, tree type, int quiet)
7400 {
7401 REAL_VALUE_TYPE real;
7402 const char *str;
7403
7404 if (!validate_arg (arg, POINTER_TYPE))
7405 return NULL_TREE;
7406 str = c_getstr (arg);
7407 if (!str)
7408 return NULL_TREE;
7409
7410 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7411 return NULL_TREE;
7412
7413 return build_real (type, real);
7414 }
7415
7416 /* Return true if the floating point expression T has an integer value.
7417 We also allow +Inf, -Inf and NaN to be considered integer values. */
7418
7419 static bool
7420 integer_valued_real_p (tree t)
7421 {
7422 switch (TREE_CODE (t))
7423 {
7424 case FLOAT_EXPR:
7425 return true;
7426
7427 case ABS_EXPR:
7428 case SAVE_EXPR:
7429 return integer_valued_real_p (TREE_OPERAND (t, 0));
7430
7431 case COMPOUND_EXPR:
7432 case MODIFY_EXPR:
7433 case BIND_EXPR:
7434 return integer_valued_real_p (TREE_OPERAND (t, 1));
7435
7436 case PLUS_EXPR:
7437 case MINUS_EXPR:
7438 case MULT_EXPR:
7439 case MIN_EXPR:
7440 case MAX_EXPR:
7441 return integer_valued_real_p (TREE_OPERAND (t, 0))
7442 && integer_valued_real_p (TREE_OPERAND (t, 1));
7443
7444 case COND_EXPR:
7445 return integer_valued_real_p (TREE_OPERAND (t, 1))
7446 && integer_valued_real_p (TREE_OPERAND (t, 2));
7447
7448 case REAL_CST:
7449 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7450
7451 CASE_CONVERT:
7452 {
7453 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7454 if (TREE_CODE (type) == INTEGER_TYPE)
7455 return true;
7456 if (TREE_CODE (type) == REAL_TYPE)
7457 return integer_valued_real_p (TREE_OPERAND (t, 0));
7458 break;
7459 }
7460
7461 case CALL_EXPR:
7462 switch (builtin_mathfn_code (t))
7463 {
7464 CASE_FLT_FN (BUILT_IN_CEIL):
7465 CASE_FLT_FN (BUILT_IN_FLOOR):
7466 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7467 CASE_FLT_FN (BUILT_IN_RINT):
7468 CASE_FLT_FN (BUILT_IN_ROUND):
7469 CASE_FLT_FN (BUILT_IN_TRUNC):
7470 return true;
7471
7472 CASE_FLT_FN (BUILT_IN_FMIN):
7473 CASE_FLT_FN (BUILT_IN_FMAX):
7474 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7475 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7476
7477 default:
7478 break;
7479 }
7480 break;
7481
7482 default:
7483 break;
7484 }
7485 return false;
7486 }
7487
7488 /* FNDECL is assumed to be a builtin where truncation can be propagated
7489 across (for instance floor((double)f) == (double)floorf (f).
7490 Do the transformation for a call with argument ARG. */
7491
7492 static tree
7493 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7494 {
7495 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7496
7497 if (!validate_arg (arg, REAL_TYPE))
7498 return NULL_TREE;
7499
7500 /* Integer rounding functions are idempotent. */
7501 if (fcode == builtin_mathfn_code (arg))
7502 return arg;
7503
7504 /* If argument is already integer valued, and we don't need to worry
7505 about setting errno, there's no need to perform rounding. */
7506 if (! flag_errno_math && integer_valued_real_p (arg))
7507 return arg;
7508
7509 if (optimize)
7510 {
7511 tree arg0 = strip_float_extensions (arg);
7512 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7513 tree newtype = TREE_TYPE (arg0);
7514 tree decl;
7515
7516 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7517 && (decl = mathfn_built_in (newtype, fcode)))
7518 return fold_convert_loc (loc, ftype,
7519 build_call_expr_loc (loc, decl, 1,
7520 fold_convert_loc (loc,
7521 newtype,
7522 arg0)));
7523 }
7524 return NULL_TREE;
7525 }
7526
7527 /* FNDECL is assumed to be builtin which can narrow the FP type of
7528 the argument, for instance lround((double)f) -> lroundf (f).
7529 Do the transformation for a call with argument ARG. */
7530
7531 static tree
7532 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7533 {
7534 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7535
7536 if (!validate_arg (arg, REAL_TYPE))
7537 return NULL_TREE;
7538
7539 /* If argument is already integer valued, and we don't need to worry
7540 about setting errno, there's no need to perform rounding. */
7541 if (! flag_errno_math && integer_valued_real_p (arg))
7542 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7543 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7544
7545 if (optimize)
7546 {
7547 tree ftype = TREE_TYPE (arg);
7548 tree arg0 = strip_float_extensions (arg);
7549 tree newtype = TREE_TYPE (arg0);
7550 tree decl;
7551
7552 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7553 && (decl = mathfn_built_in (newtype, fcode)))
7554 return build_call_expr_loc (loc, decl, 1,
7555 fold_convert_loc (loc, newtype, arg0));
7556 }
7557
7558 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7559 sizeof (int) == sizeof (long). */
7560 if (TYPE_PRECISION (integer_type_node)
7561 == TYPE_PRECISION (long_integer_type_node))
7562 {
7563 tree newfn = NULL_TREE;
7564 switch (fcode)
7565 {
7566 CASE_FLT_FN (BUILT_IN_ICEIL):
7567 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7568 break;
7569
7570 CASE_FLT_FN (BUILT_IN_IFLOOR):
7571 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7572 break;
7573
7574 CASE_FLT_FN (BUILT_IN_IROUND):
7575 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7576 break;
7577
7578 CASE_FLT_FN (BUILT_IN_IRINT):
7579 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7580 break;
7581
7582 default:
7583 break;
7584 }
7585
7586 if (newfn)
7587 {
7588 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7589 return fold_convert_loc (loc,
7590 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7591 }
7592 }
7593
7594 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7595 sizeof (long long) == sizeof (long). */
7596 if (TYPE_PRECISION (long_long_integer_type_node)
7597 == TYPE_PRECISION (long_integer_type_node))
7598 {
7599 tree newfn = NULL_TREE;
7600 switch (fcode)
7601 {
7602 CASE_FLT_FN (BUILT_IN_LLCEIL):
7603 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7604 break;
7605
7606 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7607 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7608 break;
7609
7610 CASE_FLT_FN (BUILT_IN_LLROUND):
7611 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7612 break;
7613
7614 CASE_FLT_FN (BUILT_IN_LLRINT):
7615 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7616 break;
7617
7618 default:
7619 break;
7620 }
7621
7622 if (newfn)
7623 {
7624 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7625 return fold_convert_loc (loc,
7626 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7627 }
7628 }
7629
7630 return NULL_TREE;
7631 }
7632
7633 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7634 return type. Return NULL_TREE if no simplification can be made. */
7635
7636 static tree
7637 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7638 {
7639 tree res;
7640
7641 if (!validate_arg (arg, COMPLEX_TYPE)
7642 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7643 return NULL_TREE;
7644
7645 /* Calculate the result when the argument is a constant. */
7646 if (TREE_CODE (arg) == COMPLEX_CST
7647 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7648 type, mpfr_hypot)))
7649 return res;
7650
7651 if (TREE_CODE (arg) == COMPLEX_EXPR)
7652 {
7653 tree real = TREE_OPERAND (arg, 0);
7654 tree imag = TREE_OPERAND (arg, 1);
7655
7656 /* If either part is zero, cabs is fabs of the other. */
7657 if (real_zerop (real))
7658 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7659 if (real_zerop (imag))
7660 return fold_build1_loc (loc, ABS_EXPR, type, real);
7661
7662 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7663 if (flag_unsafe_math_optimizations
7664 && operand_equal_p (real, imag, OEP_PURE_SAME))
7665 {
7666 const REAL_VALUE_TYPE sqrt2_trunc
7667 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7668 STRIP_NOPS (real);
7669 return fold_build2_loc (loc, MULT_EXPR, type,
7670 fold_build1_loc (loc, ABS_EXPR, type, real),
7671 build_real (type, sqrt2_trunc));
7672 }
7673 }
7674
7675 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7676 if (TREE_CODE (arg) == NEGATE_EXPR
7677 || TREE_CODE (arg) == CONJ_EXPR)
7678 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7679
7680 /* Don't do this when optimizing for size. */
7681 if (flag_unsafe_math_optimizations
7682 && optimize && optimize_function_for_speed_p (cfun))
7683 {
7684 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7685
7686 if (sqrtfn != NULL_TREE)
7687 {
7688 tree rpart, ipart, result;
7689
7690 arg = builtin_save_expr (arg);
7691
7692 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7693 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7694
7695 rpart = builtin_save_expr (rpart);
7696 ipart = builtin_save_expr (ipart);
7697
7698 result = fold_build2_loc (loc, PLUS_EXPR, type,
7699 fold_build2_loc (loc, MULT_EXPR, type,
7700 rpart, rpart),
7701 fold_build2_loc (loc, MULT_EXPR, type,
7702 ipart, ipart));
7703
7704 return build_call_expr_loc (loc, sqrtfn, 1, result);
7705 }
7706 }
7707
7708 return NULL_TREE;
7709 }
7710
7711 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7712 complex tree type of the result. If NEG is true, the imaginary
7713 zero is negative. */
7714
7715 static tree
7716 build_complex_cproj (tree type, bool neg)
7717 {
7718 REAL_VALUE_TYPE rinf, rzero = dconst0;
7719
7720 real_inf (&rinf);
7721 rzero.sign = neg;
7722 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7723 build_real (TREE_TYPE (type), rzero));
7724 }
7725
7726 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7727 return type. Return NULL_TREE if no simplification can be made. */
7728
7729 static tree
7730 fold_builtin_cproj (location_t loc, tree arg, tree type)
7731 {
7732 if (!validate_arg (arg, COMPLEX_TYPE)
7733 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7734 return NULL_TREE;
7735
7736 /* If there are no infinities, return arg. */
7737 if (! HONOR_INFINITIES (type))
7738 return non_lvalue_loc (loc, arg);
7739
7740 /* Calculate the result when the argument is a constant. */
7741 if (TREE_CODE (arg) == COMPLEX_CST)
7742 {
7743 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7744 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7745
7746 if (real_isinf (real) || real_isinf (imag))
7747 return build_complex_cproj (type, imag->sign);
7748 else
7749 return arg;
7750 }
7751 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7752 {
7753 tree real = TREE_OPERAND (arg, 0);
7754 tree imag = TREE_OPERAND (arg, 1);
7755
7756 STRIP_NOPS (real);
7757 STRIP_NOPS (imag);
7758
7759 /* If the real part is inf and the imag part is known to be
7760 nonnegative, return (inf + 0i). Remember side-effects are
7761 possible in the imag part. */
7762 if (TREE_CODE (real) == REAL_CST
7763 && real_isinf (TREE_REAL_CST_PTR (real))
7764 && tree_expr_nonnegative_p (imag))
7765 return omit_one_operand_loc (loc, type,
7766 build_complex_cproj (type, false),
7767 arg);
7768
7769 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7770 Remember side-effects are possible in the real part. */
7771 if (TREE_CODE (imag) == REAL_CST
7772 && real_isinf (TREE_REAL_CST_PTR (imag)))
7773 return
7774 omit_one_operand_loc (loc, type,
7775 build_complex_cproj (type, TREE_REAL_CST_PTR
7776 (imag)->sign), arg);
7777 }
7778
7779 return NULL_TREE;
7780 }
7781
7782 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7783 Return NULL_TREE if no simplification can be made. */
7784
7785 static tree
7786 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7787 {
7788
7789 enum built_in_function fcode;
7790 tree res;
7791
7792 if (!validate_arg (arg, REAL_TYPE))
7793 return NULL_TREE;
7794
7795 /* Calculate the result when the argument is a constant. */
7796 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7797 return res;
7798
7799 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7800 fcode = builtin_mathfn_code (arg);
7801 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7802 {
7803 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7804 arg = fold_build2_loc (loc, MULT_EXPR, type,
7805 CALL_EXPR_ARG (arg, 0),
7806 build_real (type, dconsthalf));
7807 return build_call_expr_loc (loc, expfn, 1, arg);
7808 }
7809
7810 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7811 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7812 {
7813 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7814
7815 if (powfn)
7816 {
7817 tree arg0 = CALL_EXPR_ARG (arg, 0);
7818 tree tree_root;
7819 /* The inner root was either sqrt or cbrt. */
7820 /* This was a conditional expression but it triggered a bug
7821 in Sun C 5.5. */
7822 REAL_VALUE_TYPE dconstroot;
7823 if (BUILTIN_SQRT_P (fcode))
7824 dconstroot = dconsthalf;
7825 else
7826 dconstroot = dconst_third ();
7827
7828 /* Adjust for the outer root. */
7829 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7830 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7831 tree_root = build_real (type, dconstroot);
7832 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7833 }
7834 }
7835
7836 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7837 if (flag_unsafe_math_optimizations
7838 && (fcode == BUILT_IN_POW
7839 || fcode == BUILT_IN_POWF
7840 || fcode == BUILT_IN_POWL))
7841 {
7842 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7843 tree arg0 = CALL_EXPR_ARG (arg, 0);
7844 tree arg1 = CALL_EXPR_ARG (arg, 1);
7845 tree narg1;
7846 if (!tree_expr_nonnegative_p (arg0))
7847 arg0 = build1 (ABS_EXPR, type, arg0);
7848 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7849 build_real (type, dconsthalf));
7850 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7851 }
7852
7853 return NULL_TREE;
7854 }
7855
7856 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7858
7859 static tree
7860 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7861 {
7862 const enum built_in_function fcode = builtin_mathfn_code (arg);
7863 tree res;
7864
7865 if (!validate_arg (arg, REAL_TYPE))
7866 return NULL_TREE;
7867
7868 /* Calculate the result when the argument is a constant. */
7869 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7870 return res;
7871
7872 if (flag_unsafe_math_optimizations)
7873 {
7874 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7875 if (BUILTIN_EXPONENT_P (fcode))
7876 {
7877 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7878 const REAL_VALUE_TYPE third_trunc =
7879 real_value_truncate (TYPE_MODE (type), dconst_third ());
7880 arg = fold_build2_loc (loc, MULT_EXPR, type,
7881 CALL_EXPR_ARG (arg, 0),
7882 build_real (type, third_trunc));
7883 return build_call_expr_loc (loc, expfn, 1, arg);
7884 }
7885
7886 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7887 if (BUILTIN_SQRT_P (fcode))
7888 {
7889 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7890
7891 if (powfn)
7892 {
7893 tree arg0 = CALL_EXPR_ARG (arg, 0);
7894 tree tree_root;
7895 REAL_VALUE_TYPE dconstroot = dconst_third ();
7896
7897 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7898 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7899 tree_root = build_real (type, dconstroot);
7900 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7901 }
7902 }
7903
7904 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7905 if (BUILTIN_CBRT_P (fcode))
7906 {
7907 tree arg0 = CALL_EXPR_ARG (arg, 0);
7908 if (tree_expr_nonnegative_p (arg0))
7909 {
7910 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7911
7912 if (powfn)
7913 {
7914 tree tree_root;
7915 REAL_VALUE_TYPE dconstroot;
7916
7917 real_arithmetic (&dconstroot, MULT_EXPR,
7918 dconst_third_ptr (), dconst_third_ptr ());
7919 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7920 tree_root = build_real (type, dconstroot);
7921 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7922 }
7923 }
7924 }
7925
7926 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7927 if (fcode == BUILT_IN_POW
7928 || fcode == BUILT_IN_POWF
7929 || fcode == BUILT_IN_POWL)
7930 {
7931 tree arg00 = CALL_EXPR_ARG (arg, 0);
7932 tree arg01 = CALL_EXPR_ARG (arg, 1);
7933 if (tree_expr_nonnegative_p (arg00))
7934 {
7935 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7936 const REAL_VALUE_TYPE dconstroot
7937 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7938 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7939 build_real (type, dconstroot));
7940 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7941 }
7942 }
7943 }
7944 return NULL_TREE;
7945 }
7946
7947 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7948 TYPE is the type of the return value. Return NULL_TREE if no
7949 simplification can be made. */
7950
7951 static tree
7952 fold_builtin_cos (location_t loc,
7953 tree arg, tree type, tree fndecl)
7954 {
7955 tree res, narg;
7956
7957 if (!validate_arg (arg, REAL_TYPE))
7958 return NULL_TREE;
7959
7960 /* Calculate the result when the argument is a constant. */
7961 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7962 return res;
7963
7964 /* Optimize cos(-x) into cos (x). */
7965 if ((narg = fold_strip_sign_ops (arg)))
7966 return build_call_expr_loc (loc, fndecl, 1, narg);
7967
7968 return NULL_TREE;
7969 }
7970
7971 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7972 Return NULL_TREE if no simplification can be made. */
7973
7974 static tree
7975 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7976 {
7977 if (validate_arg (arg, REAL_TYPE))
7978 {
7979 tree res, narg;
7980
7981 /* Calculate the result when the argument is a constant. */
7982 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7983 return res;
7984
7985 /* Optimize cosh(-x) into cosh (x). */
7986 if ((narg = fold_strip_sign_ops (arg)))
7987 return build_call_expr_loc (loc, fndecl, 1, narg);
7988 }
7989
7990 return NULL_TREE;
7991 }
7992
7993 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7994 argument ARG. TYPE is the type of the return value. Return
7995 NULL_TREE if no simplification can be made. */
7996
7997 static tree
7998 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7999 bool hyper)
8000 {
8001 if (validate_arg (arg, COMPLEX_TYPE)
8002 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8003 {
8004 tree tmp;
8005
8006 /* Calculate the result when the argument is a constant. */
8007 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8008 return tmp;
8009
8010 /* Optimize fn(-x) into fn(x). */
8011 if ((tmp = fold_strip_sign_ops (arg)))
8012 return build_call_expr_loc (loc, fndecl, 1, tmp);
8013 }
8014
8015 return NULL_TREE;
8016 }
8017
8018 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8019 Return NULL_TREE if no simplification can be made. */
8020
8021 static tree
8022 fold_builtin_tan (tree arg, tree type)
8023 {
8024 enum built_in_function fcode;
8025 tree res;
8026
8027 if (!validate_arg (arg, REAL_TYPE))
8028 return NULL_TREE;
8029
8030 /* Calculate the result when the argument is a constant. */
8031 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8032 return res;
8033
8034 /* Optimize tan(atan(x)) = x. */
8035 fcode = builtin_mathfn_code (arg);
8036 if (flag_unsafe_math_optimizations
8037 && (fcode == BUILT_IN_ATAN
8038 || fcode == BUILT_IN_ATANF
8039 || fcode == BUILT_IN_ATANL))
8040 return CALL_EXPR_ARG (arg, 0);
8041
8042 return NULL_TREE;
8043 }
8044
8045 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8046 NULL_TREE if no simplification can be made. */
8047
8048 static tree
8049 fold_builtin_sincos (location_t loc,
8050 tree arg0, tree arg1, tree arg2)
8051 {
8052 tree type;
8053 tree res, fn, call;
8054
8055 if (!validate_arg (arg0, REAL_TYPE)
8056 || !validate_arg (arg1, POINTER_TYPE)
8057 || !validate_arg (arg2, POINTER_TYPE))
8058 return NULL_TREE;
8059
8060 type = TREE_TYPE (arg0);
8061
8062 /* Calculate the result when the argument is a constant. */
8063 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8064 return res;
8065
8066 /* Canonicalize sincos to cexpi. */
8067 if (!targetm.libc_has_function (function_c99_math_complex))
8068 return NULL_TREE;
8069 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8070 if (!fn)
8071 return NULL_TREE;
8072
8073 call = build_call_expr_loc (loc, fn, 1, arg0);
8074 call = builtin_save_expr (call);
8075
8076 return build2 (COMPOUND_EXPR, void_type_node,
8077 build2 (MODIFY_EXPR, void_type_node,
8078 build_fold_indirect_ref_loc (loc, arg1),
8079 build1 (IMAGPART_EXPR, type, call)),
8080 build2 (MODIFY_EXPR, void_type_node,
8081 build_fold_indirect_ref_loc (loc, arg2),
8082 build1 (REALPART_EXPR, type, call)));
8083 }
8084
8085 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8086 NULL_TREE if no simplification can be made. */
8087
8088 static tree
8089 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8090 {
8091 tree rtype;
8092 tree realp, imagp, ifn;
8093 tree res;
8094
8095 if (!validate_arg (arg0, COMPLEX_TYPE)
8096 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8097 return NULL_TREE;
8098
8099 /* Calculate the result when the argument is a constant. */
8100 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8101 return res;
8102
8103 rtype = TREE_TYPE (TREE_TYPE (arg0));
8104
8105 /* In case we can figure out the real part of arg0 and it is constant zero
8106 fold to cexpi. */
8107 if (!targetm.libc_has_function (function_c99_math_complex))
8108 return NULL_TREE;
8109 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8110 if (!ifn)
8111 return NULL_TREE;
8112
8113 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8114 && real_zerop (realp))
8115 {
8116 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8117 return build_call_expr_loc (loc, ifn, 1, narg);
8118 }
8119
8120 /* In case we can easily decompose real and imaginary parts split cexp
8121 to exp (r) * cexpi (i). */
8122 if (flag_unsafe_math_optimizations
8123 && realp)
8124 {
8125 tree rfn, rcall, icall;
8126
8127 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8128 if (!rfn)
8129 return NULL_TREE;
8130
8131 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8132 if (!imagp)
8133 return NULL_TREE;
8134
8135 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8136 icall = builtin_save_expr (icall);
8137 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8138 rcall = builtin_save_expr (rcall);
8139 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8140 fold_build2_loc (loc, MULT_EXPR, rtype,
8141 rcall,
8142 fold_build1_loc (loc, REALPART_EXPR,
8143 rtype, icall)),
8144 fold_build2_loc (loc, MULT_EXPR, rtype,
8145 rcall,
8146 fold_build1_loc (loc, IMAGPART_EXPR,
8147 rtype, icall)));
8148 }
8149
8150 return NULL_TREE;
8151 }
8152
8153 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8154 Return NULL_TREE if no simplification can be made. */
8155
8156 static tree
8157 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8158 {
8159 if (!validate_arg (arg, REAL_TYPE))
8160 return NULL_TREE;
8161
8162 /* Optimize trunc of constant value. */
8163 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8164 {
8165 REAL_VALUE_TYPE r, x;
8166 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8167
8168 x = TREE_REAL_CST (arg);
8169 real_trunc (&r, TYPE_MODE (type), &x);
8170 return build_real (type, r);
8171 }
8172
8173 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8174 }
8175
8176 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8177 Return NULL_TREE if no simplification can be made. */
8178
8179 static tree
8180 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8181 {
8182 if (!validate_arg (arg, REAL_TYPE))
8183 return NULL_TREE;
8184
8185 /* Optimize floor of constant value. */
8186 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8187 {
8188 REAL_VALUE_TYPE x;
8189
8190 x = TREE_REAL_CST (arg);
8191 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8192 {
8193 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8194 REAL_VALUE_TYPE r;
8195
8196 real_floor (&r, TYPE_MODE (type), &x);
8197 return build_real (type, r);
8198 }
8199 }
8200
8201 /* Fold floor (x) where x is nonnegative to trunc (x). */
8202 if (tree_expr_nonnegative_p (arg))
8203 {
8204 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8205 if (truncfn)
8206 return build_call_expr_loc (loc, truncfn, 1, arg);
8207 }
8208
8209 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8210 }
8211
8212 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8213 Return NULL_TREE if no simplification can be made. */
8214
8215 static tree
8216 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8217 {
8218 if (!validate_arg (arg, REAL_TYPE))
8219 return NULL_TREE;
8220
8221 /* Optimize ceil of constant value. */
8222 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8223 {
8224 REAL_VALUE_TYPE x;
8225
8226 x = TREE_REAL_CST (arg);
8227 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8228 {
8229 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8230 REAL_VALUE_TYPE r;
8231
8232 real_ceil (&r, TYPE_MODE (type), &x);
8233 return build_real (type, r);
8234 }
8235 }
8236
8237 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8238 }
8239
8240 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8241 Return NULL_TREE if no simplification can be made. */
8242
8243 static tree
8244 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8245 {
8246 if (!validate_arg (arg, REAL_TYPE))
8247 return NULL_TREE;
8248
8249 /* Optimize round of constant value. */
8250 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8251 {
8252 REAL_VALUE_TYPE x;
8253
8254 x = TREE_REAL_CST (arg);
8255 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8256 {
8257 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8258 REAL_VALUE_TYPE r;
8259
8260 real_round (&r, TYPE_MODE (type), &x);
8261 return build_real (type, r);
8262 }
8263 }
8264
8265 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8266 }
8267
8268 /* Fold function call to builtin lround, lroundf or lroundl (or the
8269 corresponding long long versions) and other rounding functions. ARG
8270 is the argument to the call. Return NULL_TREE if no simplification
8271 can be made. */
8272
8273 static tree
8274 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8275 {
8276 if (!validate_arg (arg, REAL_TYPE))
8277 return NULL_TREE;
8278
8279 /* Optimize lround of constant value. */
8280 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8281 {
8282 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8283
8284 if (real_isfinite (&x))
8285 {
8286 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8287 tree ftype = TREE_TYPE (arg);
8288 REAL_VALUE_TYPE r;
8289 bool fail = false;
8290
8291 switch (DECL_FUNCTION_CODE (fndecl))
8292 {
8293 CASE_FLT_FN (BUILT_IN_IFLOOR):
8294 CASE_FLT_FN (BUILT_IN_LFLOOR):
8295 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8296 real_floor (&r, TYPE_MODE (ftype), &x);
8297 break;
8298
8299 CASE_FLT_FN (BUILT_IN_ICEIL):
8300 CASE_FLT_FN (BUILT_IN_LCEIL):
8301 CASE_FLT_FN (BUILT_IN_LLCEIL):
8302 real_ceil (&r, TYPE_MODE (ftype), &x);
8303 break;
8304
8305 CASE_FLT_FN (BUILT_IN_IROUND):
8306 CASE_FLT_FN (BUILT_IN_LROUND):
8307 CASE_FLT_FN (BUILT_IN_LLROUND):
8308 real_round (&r, TYPE_MODE (ftype), &x);
8309 break;
8310
8311 default:
8312 gcc_unreachable ();
8313 }
8314
8315 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8316 if (!fail)
8317 return wide_int_to_tree (itype, val);
8318 }
8319 }
8320
8321 switch (DECL_FUNCTION_CODE (fndecl))
8322 {
8323 CASE_FLT_FN (BUILT_IN_LFLOOR):
8324 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8325 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8326 if (tree_expr_nonnegative_p (arg))
8327 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8328 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8329 break;
8330 default:;
8331 }
8332
8333 return fold_fixed_mathfn (loc, fndecl, arg);
8334 }
8335
8336 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8337 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8338 the argument to the call. Return NULL_TREE if no simplification can
8339 be made. */
8340
8341 static tree
8342 fold_builtin_bitop (tree fndecl, tree arg)
8343 {
8344 if (!validate_arg (arg, INTEGER_TYPE))
8345 return NULL_TREE;
8346
8347 /* Optimize for constant argument. */
8348 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8349 {
8350 tree type = TREE_TYPE (arg);
8351 int result;
8352
8353 switch (DECL_FUNCTION_CODE (fndecl))
8354 {
8355 CASE_INT_FN (BUILT_IN_FFS):
8356 result = wi::ffs (arg);
8357 break;
8358
8359 CASE_INT_FN (BUILT_IN_CLZ):
8360 if (wi::ne_p (arg, 0))
8361 result = wi::clz (arg);
8362 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8363 result = TYPE_PRECISION (type);
8364 break;
8365
8366 CASE_INT_FN (BUILT_IN_CTZ):
8367 if (wi::ne_p (arg, 0))
8368 result = wi::ctz (arg);
8369 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8370 result = TYPE_PRECISION (type);
8371 break;
8372
8373 CASE_INT_FN (BUILT_IN_CLRSB):
8374 result = wi::clrsb (arg);
8375 break;
8376
8377 CASE_INT_FN (BUILT_IN_POPCOUNT):
8378 result = wi::popcount (arg);
8379 break;
8380
8381 CASE_INT_FN (BUILT_IN_PARITY):
8382 result = wi::parity (arg);
8383 break;
8384
8385 default:
8386 gcc_unreachable ();
8387 }
8388
8389 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8390 }
8391
8392 return NULL_TREE;
8393 }
8394
8395 /* Fold function call to builtin_bswap and the short, long and long long
8396 variants. Return NULL_TREE if no simplification can be made. */
8397 static tree
8398 fold_builtin_bswap (tree fndecl, tree arg)
8399 {
8400 if (! validate_arg (arg, INTEGER_TYPE))
8401 return NULL_TREE;
8402
8403 /* Optimize constant value. */
8404 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8405 {
8406 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8407
8408 switch (DECL_FUNCTION_CODE (fndecl))
8409 {
8410 case BUILT_IN_BSWAP16:
8411 case BUILT_IN_BSWAP32:
8412 case BUILT_IN_BSWAP64:
8413 {
8414 signop sgn = TYPE_SIGN (type);
8415 tree result =
8416 wide_int_to_tree (type,
8417 wide_int::from (arg, TYPE_PRECISION (type),
8418 sgn).bswap ());
8419 return result;
8420 }
8421 default:
8422 gcc_unreachable ();
8423 }
8424 }
8425
8426 return NULL_TREE;
8427 }
8428
8429 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8430 NULL_TREE if no simplification can be made. */
8431
8432 static tree
8433 fold_builtin_hypot (location_t loc, tree fndecl,
8434 tree arg0, tree arg1, tree type)
8435 {
8436 tree res, narg0, narg1;
8437
8438 if (!validate_arg (arg0, REAL_TYPE)
8439 || !validate_arg (arg1, REAL_TYPE))
8440 return NULL_TREE;
8441
8442 /* Calculate the result when the argument is a constant. */
8443 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8444 return res;
8445
8446 /* If either argument to hypot has a negate or abs, strip that off.
8447 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8448 narg0 = fold_strip_sign_ops (arg0);
8449 narg1 = fold_strip_sign_ops (arg1);
8450 if (narg0 || narg1)
8451 {
8452 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8453 narg1 ? narg1 : arg1);
8454 }
8455
8456 /* If either argument is zero, hypot is fabs of the other. */
8457 if (real_zerop (arg0))
8458 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8459 else if (real_zerop (arg1))
8460 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8461
8462 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8463 if (flag_unsafe_math_optimizations
8464 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8465 {
8466 const REAL_VALUE_TYPE sqrt2_trunc
8467 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8468 return fold_build2_loc (loc, MULT_EXPR, type,
8469 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8470 build_real (type, sqrt2_trunc));
8471 }
8472
8473 return NULL_TREE;
8474 }
8475
8476
8477 /* Fold a builtin function call to pow, powf, or powl. Return
8478 NULL_TREE if no simplification can be made. */
8479 static tree
8480 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8481 {
8482 tree res;
8483
8484 if (!validate_arg (arg0, REAL_TYPE)
8485 || !validate_arg (arg1, REAL_TYPE))
8486 return NULL_TREE;
8487
8488 /* Calculate the result when the argument is a constant. */
8489 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8490 return res;
8491
8492 /* Optimize pow(1.0,y) = 1.0. */
8493 if (real_onep (arg0))
8494 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8495
8496 if (TREE_CODE (arg1) == REAL_CST
8497 && !TREE_OVERFLOW (arg1))
8498 {
8499 REAL_VALUE_TYPE cint;
8500 REAL_VALUE_TYPE c;
8501 HOST_WIDE_INT n;
8502
8503 c = TREE_REAL_CST (arg1);
8504
8505 /* Optimize pow(x,0.0) = 1.0. */
8506 if (REAL_VALUES_EQUAL (c, dconst0))
8507 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8508 arg0);
8509
8510 /* Optimize pow(x,1.0) = x. */
8511 if (REAL_VALUES_EQUAL (c, dconst1))
8512 return arg0;
8513
8514 /* Optimize pow(x,-1.0) = 1.0/x. */
8515 if (REAL_VALUES_EQUAL (c, dconstm1))
8516 return fold_build2_loc (loc, RDIV_EXPR, type,
8517 build_real (type, dconst1), arg0);
8518
8519 /* Optimize pow(x,0.5) = sqrt(x). */
8520 if (flag_unsafe_math_optimizations
8521 && REAL_VALUES_EQUAL (c, dconsthalf))
8522 {
8523 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8524
8525 if (sqrtfn != NULL_TREE)
8526 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8527 }
8528
8529 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8530 if (flag_unsafe_math_optimizations)
8531 {
8532 const REAL_VALUE_TYPE dconstroot
8533 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8534
8535 if (REAL_VALUES_EQUAL (c, dconstroot))
8536 {
8537 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8538 if (cbrtfn != NULL_TREE)
8539 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8540 }
8541 }
8542
8543 /* Check for an integer exponent. */
8544 n = real_to_integer (&c);
8545 real_from_integer (&cint, VOIDmode, n, SIGNED);
8546 if (real_identical (&c, &cint))
8547 {
8548 /* Attempt to evaluate pow at compile-time, unless this should
8549 raise an exception. */
8550 if (TREE_CODE (arg0) == REAL_CST
8551 && !TREE_OVERFLOW (arg0)
8552 && (n > 0
8553 || (!flag_trapping_math && !flag_errno_math)
8554 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8555 {
8556 REAL_VALUE_TYPE x;
8557 bool inexact;
8558
8559 x = TREE_REAL_CST (arg0);
8560 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8561 if (flag_unsafe_math_optimizations || !inexact)
8562 return build_real (type, x);
8563 }
8564
8565 /* Strip sign ops from even integer powers. */
8566 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8567 {
8568 tree narg0 = fold_strip_sign_ops (arg0);
8569 if (narg0)
8570 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8571 }
8572 }
8573 }
8574
8575 if (flag_unsafe_math_optimizations)
8576 {
8577 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8578
8579 /* Optimize pow(expN(x),y) = expN(x*y). */
8580 if (BUILTIN_EXPONENT_P (fcode))
8581 {
8582 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8583 tree arg = CALL_EXPR_ARG (arg0, 0);
8584 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8585 return build_call_expr_loc (loc, expfn, 1, arg);
8586 }
8587
8588 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8589 if (BUILTIN_SQRT_P (fcode))
8590 {
8591 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8592 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8593 build_real (type, dconsthalf));
8594 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8595 }
8596
8597 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8598 if (BUILTIN_CBRT_P (fcode))
8599 {
8600 tree arg = CALL_EXPR_ARG (arg0, 0);
8601 if (tree_expr_nonnegative_p (arg))
8602 {
8603 const REAL_VALUE_TYPE dconstroot
8604 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8605 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8606 build_real (type, dconstroot));
8607 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8608 }
8609 }
8610
8611 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8612 if (fcode == BUILT_IN_POW
8613 || fcode == BUILT_IN_POWF
8614 || fcode == BUILT_IN_POWL)
8615 {
8616 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8617 if (tree_expr_nonnegative_p (arg00))
8618 {
8619 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8620 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8621 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8622 }
8623 }
8624 }
8625
8626 return NULL_TREE;
8627 }
8628
8629 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8630 Return NULL_TREE if no simplification can be made. */
8631 static tree
8632 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8633 tree arg0, tree arg1, tree type)
8634 {
8635 if (!validate_arg (arg0, REAL_TYPE)
8636 || !validate_arg (arg1, INTEGER_TYPE))
8637 return NULL_TREE;
8638
8639 /* Optimize pow(1.0,y) = 1.0. */
8640 if (real_onep (arg0))
8641 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8642
8643 if (tree_fits_shwi_p (arg1))
8644 {
8645 HOST_WIDE_INT c = tree_to_shwi (arg1);
8646
8647 /* Evaluate powi at compile-time. */
8648 if (TREE_CODE (arg0) == REAL_CST
8649 && !TREE_OVERFLOW (arg0))
8650 {
8651 REAL_VALUE_TYPE x;
8652 x = TREE_REAL_CST (arg0);
8653 real_powi (&x, TYPE_MODE (type), &x, c);
8654 return build_real (type, x);
8655 }
8656
8657 /* Optimize pow(x,0) = 1.0. */
8658 if (c == 0)
8659 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8660 arg0);
8661
8662 /* Optimize pow(x,1) = x. */
8663 if (c == 1)
8664 return arg0;
8665
8666 /* Optimize pow(x,-1) = 1.0/x. */
8667 if (c == -1)
8668 return fold_build2_loc (loc, RDIV_EXPR, type,
8669 build_real (type, dconst1), arg0);
8670 }
8671
8672 return NULL_TREE;
8673 }
8674
8675 /* A subroutine of fold_builtin to fold the various exponent
8676 functions. Return NULL_TREE if no simplification can be made.
8677 FUNC is the corresponding MPFR exponent function. */
8678
8679 static tree
8680 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8681 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8682 {
8683 if (validate_arg (arg, REAL_TYPE))
8684 {
8685 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8686 tree res;
8687
8688 /* Calculate the result when the argument is a constant. */
8689 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8690 return res;
8691
8692 /* Optimize expN(logN(x)) = x. */
8693 if (flag_unsafe_math_optimizations)
8694 {
8695 const enum built_in_function fcode = builtin_mathfn_code (arg);
8696
8697 if ((func == mpfr_exp
8698 && (fcode == BUILT_IN_LOG
8699 || fcode == BUILT_IN_LOGF
8700 || fcode == BUILT_IN_LOGL))
8701 || (func == mpfr_exp2
8702 && (fcode == BUILT_IN_LOG2
8703 || fcode == BUILT_IN_LOG2F
8704 || fcode == BUILT_IN_LOG2L))
8705 || (func == mpfr_exp10
8706 && (fcode == BUILT_IN_LOG10
8707 || fcode == BUILT_IN_LOG10F
8708 || fcode == BUILT_IN_LOG10L)))
8709 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8710 }
8711 }
8712
8713 return NULL_TREE;
8714 }
8715
8716 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8717 arguments to the call, and TYPE is its return type.
8718 Return NULL_TREE if no simplification can be made. */
8719
8720 static tree
8721 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8722 {
8723 if (!validate_arg (arg1, POINTER_TYPE)
8724 || !validate_arg (arg2, INTEGER_TYPE)
8725 || !validate_arg (len, INTEGER_TYPE))
8726 return NULL_TREE;
8727 else
8728 {
8729 const char *p1;
8730
8731 if (TREE_CODE (arg2) != INTEGER_CST
8732 || !tree_fits_uhwi_p (len))
8733 return NULL_TREE;
8734
8735 p1 = c_getstr (arg1);
8736 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8737 {
8738 char c;
8739 const char *r;
8740 tree tem;
8741
8742 if (target_char_cast (arg2, &c))
8743 return NULL_TREE;
8744
8745 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8746
8747 if (r == NULL)
8748 return build_int_cst (TREE_TYPE (arg1), 0);
8749
8750 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8751 return fold_convert_loc (loc, type, tem);
8752 }
8753 return NULL_TREE;
8754 }
8755 }
8756
8757 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8758 Return NULL_TREE if no simplification can be made. */
8759
8760 static tree
8761 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8762 {
8763 const char *p1, *p2;
8764
8765 if (!validate_arg (arg1, POINTER_TYPE)
8766 || !validate_arg (arg2, POINTER_TYPE)
8767 || !validate_arg (len, INTEGER_TYPE))
8768 return NULL_TREE;
8769
8770 /* If the LEN parameter is zero, return zero. */
8771 if (integer_zerop (len))
8772 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8773 arg1, arg2);
8774
8775 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8776 if (operand_equal_p (arg1, arg2, 0))
8777 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8778
8779 p1 = c_getstr (arg1);
8780 p2 = c_getstr (arg2);
8781
8782 /* If all arguments are constant, and the value of len is not greater
8783 than the lengths of arg1 and arg2, evaluate at compile-time. */
8784 if (tree_fits_uhwi_p (len) && p1 && p2
8785 && compare_tree_int (len, strlen (p1) + 1) <= 0
8786 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8787 {
8788 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8789
8790 if (r > 0)
8791 return integer_one_node;
8792 else if (r < 0)
8793 return integer_minus_one_node;
8794 else
8795 return integer_zero_node;
8796 }
8797
8798 /* If len parameter is one, return an expression corresponding to
8799 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8800 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8801 {
8802 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8803 tree cst_uchar_ptr_node
8804 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8805
8806 tree ind1
8807 = fold_convert_loc (loc, integer_type_node,
8808 build1 (INDIRECT_REF, cst_uchar_node,
8809 fold_convert_loc (loc,
8810 cst_uchar_ptr_node,
8811 arg1)));
8812 tree ind2
8813 = fold_convert_loc (loc, integer_type_node,
8814 build1 (INDIRECT_REF, cst_uchar_node,
8815 fold_convert_loc (loc,
8816 cst_uchar_ptr_node,
8817 arg2)));
8818 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8819 }
8820
8821 return NULL_TREE;
8822 }
8823
8824 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8825 Return NULL_TREE if no simplification can be made. */
8826
8827 static tree
8828 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8829 {
8830 const char *p1, *p2;
8831
8832 if (!validate_arg (arg1, POINTER_TYPE)
8833 || !validate_arg (arg2, POINTER_TYPE))
8834 return NULL_TREE;
8835
8836 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8837 if (operand_equal_p (arg1, arg2, 0))
8838 return integer_zero_node;
8839
8840 p1 = c_getstr (arg1);
8841 p2 = c_getstr (arg2);
8842
8843 if (p1 && p2)
8844 {
8845 const int i = strcmp (p1, p2);
8846 if (i < 0)
8847 return integer_minus_one_node;
8848 else if (i > 0)
8849 return integer_one_node;
8850 else
8851 return integer_zero_node;
8852 }
8853
8854 /* If the second arg is "", return *(const unsigned char*)arg1. */
8855 if (p2 && *p2 == '\0')
8856 {
8857 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8858 tree cst_uchar_ptr_node
8859 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8860
8861 return fold_convert_loc (loc, integer_type_node,
8862 build1 (INDIRECT_REF, cst_uchar_node,
8863 fold_convert_loc (loc,
8864 cst_uchar_ptr_node,
8865 arg1)));
8866 }
8867
8868 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8869 if (p1 && *p1 == '\0')
8870 {
8871 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8872 tree cst_uchar_ptr_node
8873 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8874
8875 tree temp
8876 = fold_convert_loc (loc, integer_type_node,
8877 build1 (INDIRECT_REF, cst_uchar_node,
8878 fold_convert_loc (loc,
8879 cst_uchar_ptr_node,
8880 arg2)));
8881 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8882 }
8883
8884 return NULL_TREE;
8885 }
8886
8887 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8888 Return NULL_TREE if no simplification can be made. */
8889
8890 static tree
8891 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8892 {
8893 const char *p1, *p2;
8894
8895 if (!validate_arg (arg1, POINTER_TYPE)
8896 || !validate_arg (arg2, POINTER_TYPE)
8897 || !validate_arg (len, INTEGER_TYPE))
8898 return NULL_TREE;
8899
8900 /* If the LEN parameter is zero, return zero. */
8901 if (integer_zerop (len))
8902 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8903 arg1, arg2);
8904
8905 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8906 if (operand_equal_p (arg1, arg2, 0))
8907 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8908
8909 p1 = c_getstr (arg1);
8910 p2 = c_getstr (arg2);
8911
8912 if (tree_fits_uhwi_p (len) && p1 && p2)
8913 {
8914 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8915 if (i > 0)
8916 return integer_one_node;
8917 else if (i < 0)
8918 return integer_minus_one_node;
8919 else
8920 return integer_zero_node;
8921 }
8922
8923 /* If the second arg is "", and the length is greater than zero,
8924 return *(const unsigned char*)arg1. */
8925 if (p2 && *p2 == '\0'
8926 && TREE_CODE (len) == INTEGER_CST
8927 && tree_int_cst_sgn (len) == 1)
8928 {
8929 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8930 tree cst_uchar_ptr_node
8931 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8932
8933 return fold_convert_loc (loc, integer_type_node,
8934 build1 (INDIRECT_REF, cst_uchar_node,
8935 fold_convert_loc (loc,
8936 cst_uchar_ptr_node,
8937 arg1)));
8938 }
8939
8940 /* If the first arg is "", and the length is greater than zero,
8941 return -*(const unsigned char*)arg2. */
8942 if (p1 && *p1 == '\0'
8943 && TREE_CODE (len) == INTEGER_CST
8944 && tree_int_cst_sgn (len) == 1)
8945 {
8946 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8947 tree cst_uchar_ptr_node
8948 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8949
8950 tree temp = fold_convert_loc (loc, integer_type_node,
8951 build1 (INDIRECT_REF, cst_uchar_node,
8952 fold_convert_loc (loc,
8953 cst_uchar_ptr_node,
8954 arg2)));
8955 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8956 }
8957
8958 /* If len parameter is one, return an expression corresponding to
8959 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8960 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8961 {
8962 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8963 tree cst_uchar_ptr_node
8964 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8965
8966 tree ind1 = fold_convert_loc (loc, integer_type_node,
8967 build1 (INDIRECT_REF, cst_uchar_node,
8968 fold_convert_loc (loc,
8969 cst_uchar_ptr_node,
8970 arg1)));
8971 tree ind2 = fold_convert_loc (loc, integer_type_node,
8972 build1 (INDIRECT_REF, cst_uchar_node,
8973 fold_convert_loc (loc,
8974 cst_uchar_ptr_node,
8975 arg2)));
8976 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8977 }
8978
8979 return NULL_TREE;
8980 }
8981
8982 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8983 ARG. Return NULL_TREE if no simplification can be made. */
8984
8985 static tree
8986 fold_builtin_signbit (location_t loc, tree arg, tree type)
8987 {
8988 if (!validate_arg (arg, REAL_TYPE))
8989 return NULL_TREE;
8990
8991 /* If ARG is a compile-time constant, determine the result. */
8992 if (TREE_CODE (arg) == REAL_CST
8993 && !TREE_OVERFLOW (arg))
8994 {
8995 REAL_VALUE_TYPE c;
8996
8997 c = TREE_REAL_CST (arg);
8998 return (REAL_VALUE_NEGATIVE (c)
8999 ? build_one_cst (type)
9000 : build_zero_cst (type));
9001 }
9002
9003 /* If ARG is non-negative, the result is always zero. */
9004 if (tree_expr_nonnegative_p (arg))
9005 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9006
9007 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9008 if (!HONOR_SIGNED_ZEROS (arg))
9009 return fold_convert (type,
9010 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9011 build_real (TREE_TYPE (arg), dconst0)));
9012
9013 return NULL_TREE;
9014 }
9015
9016 /* Fold function call to builtin copysign, copysignf or copysignl with
9017 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9018 be made. */
9019
9020 static tree
9021 fold_builtin_copysign (location_t loc, tree fndecl,
9022 tree arg1, tree arg2, tree type)
9023 {
9024 tree tem;
9025
9026 if (!validate_arg (arg1, REAL_TYPE)
9027 || !validate_arg (arg2, REAL_TYPE))
9028 return NULL_TREE;
9029
9030 /* copysign(X,X) is X. */
9031 if (operand_equal_p (arg1, arg2, 0))
9032 return fold_convert_loc (loc, type, arg1);
9033
9034 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9035 if (TREE_CODE (arg1) == REAL_CST
9036 && TREE_CODE (arg2) == REAL_CST
9037 && !TREE_OVERFLOW (arg1)
9038 && !TREE_OVERFLOW (arg2))
9039 {
9040 REAL_VALUE_TYPE c1, c2;
9041
9042 c1 = TREE_REAL_CST (arg1);
9043 c2 = TREE_REAL_CST (arg2);
9044 /* c1.sign := c2.sign. */
9045 real_copysign (&c1, &c2);
9046 return build_real (type, c1);
9047 }
9048
9049 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9050 Remember to evaluate Y for side-effects. */
9051 if (tree_expr_nonnegative_p (arg2))
9052 return omit_one_operand_loc (loc, type,
9053 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9054 arg2);
9055
9056 /* Strip sign changing operations for the first argument. */
9057 tem = fold_strip_sign_ops (arg1);
9058 if (tem)
9059 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9060
9061 return NULL_TREE;
9062 }
9063
9064 /* Fold a call to builtin isascii with argument ARG. */
9065
9066 static tree
9067 fold_builtin_isascii (location_t loc, tree arg)
9068 {
9069 if (!validate_arg (arg, INTEGER_TYPE))
9070 return NULL_TREE;
9071 else
9072 {
9073 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9074 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9075 build_int_cst (integer_type_node,
9076 ~ (unsigned HOST_WIDE_INT) 0x7f));
9077 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9078 arg, integer_zero_node);
9079 }
9080 }
9081
9082 /* Fold a call to builtin toascii with argument ARG. */
9083
9084 static tree
9085 fold_builtin_toascii (location_t loc, tree arg)
9086 {
9087 if (!validate_arg (arg, INTEGER_TYPE))
9088 return NULL_TREE;
9089
9090 /* Transform toascii(c) -> (c & 0x7f). */
9091 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9092 build_int_cst (integer_type_node, 0x7f));
9093 }
9094
9095 /* Fold a call to builtin isdigit with argument ARG. */
9096
9097 static tree
9098 fold_builtin_isdigit (location_t loc, tree arg)
9099 {
9100 if (!validate_arg (arg, INTEGER_TYPE))
9101 return NULL_TREE;
9102 else
9103 {
9104 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9105 /* According to the C standard, isdigit is unaffected by locale.
9106 However, it definitely is affected by the target character set. */
9107 unsigned HOST_WIDE_INT target_digit0
9108 = lang_hooks.to_target_charset ('0');
9109
9110 if (target_digit0 == 0)
9111 return NULL_TREE;
9112
9113 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9114 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9115 build_int_cst (unsigned_type_node, target_digit0));
9116 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9117 build_int_cst (unsigned_type_node, 9));
9118 }
9119 }
9120
9121 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9122
9123 static tree
9124 fold_builtin_fabs (location_t loc, tree arg, tree type)
9125 {
9126 if (!validate_arg (arg, REAL_TYPE))
9127 return NULL_TREE;
9128
9129 arg = fold_convert_loc (loc, type, arg);
9130 if (TREE_CODE (arg) == REAL_CST)
9131 return fold_abs_const (arg, type);
9132 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9133 }
9134
9135 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9136
9137 static tree
9138 fold_builtin_abs (location_t loc, tree arg, tree type)
9139 {
9140 if (!validate_arg (arg, INTEGER_TYPE))
9141 return NULL_TREE;
9142
9143 arg = fold_convert_loc (loc, type, arg);
9144 if (TREE_CODE (arg) == INTEGER_CST)
9145 return fold_abs_const (arg, type);
9146 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9147 }
9148
9149 /* Fold a fma operation with arguments ARG[012]. */
9150
9151 tree
9152 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9153 tree type, tree arg0, tree arg1, tree arg2)
9154 {
9155 if (TREE_CODE (arg0) == REAL_CST
9156 && TREE_CODE (arg1) == REAL_CST
9157 && TREE_CODE (arg2) == REAL_CST)
9158 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9159
9160 return NULL_TREE;
9161 }
9162
9163 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9164
9165 static tree
9166 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9167 {
9168 if (validate_arg (arg0, REAL_TYPE)
9169 && validate_arg (arg1, REAL_TYPE)
9170 && validate_arg (arg2, REAL_TYPE))
9171 {
9172 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9173 if (tem)
9174 return tem;
9175
9176 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9177 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9178 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9179 }
9180 return NULL_TREE;
9181 }
9182
9183 /* Fold a call to builtin fmin or fmax. */
9184
9185 static tree
9186 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9187 tree type, bool max)
9188 {
9189 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9190 {
9191 /* Calculate the result when the argument is a constant. */
9192 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9193
9194 if (res)
9195 return res;
9196
9197 /* If either argument is NaN, return the other one. Avoid the
9198 transformation if we get (and honor) a signalling NaN. Using
9199 omit_one_operand() ensures we create a non-lvalue. */
9200 if (TREE_CODE (arg0) == REAL_CST
9201 && real_isnan (&TREE_REAL_CST (arg0))
9202 && (! HONOR_SNANS (arg0)
9203 || ! TREE_REAL_CST (arg0).signalling))
9204 return omit_one_operand_loc (loc, type, arg1, arg0);
9205 if (TREE_CODE (arg1) == REAL_CST
9206 && real_isnan (&TREE_REAL_CST (arg1))
9207 && (! HONOR_SNANS (arg1)
9208 || ! TREE_REAL_CST (arg1).signalling))
9209 return omit_one_operand_loc (loc, type, arg0, arg1);
9210
9211 /* Transform fmin/fmax(x,x) -> x. */
9212 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9213 return omit_one_operand_loc (loc, type, arg0, arg1);
9214
9215 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9216 functions to return the numeric arg if the other one is NaN.
9217 These tree codes don't honor that, so only transform if
9218 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9219 handled, so we don't have to worry about it either. */
9220 if (flag_finite_math_only)
9221 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9222 fold_convert_loc (loc, type, arg0),
9223 fold_convert_loc (loc, type, arg1));
9224 }
9225 return NULL_TREE;
9226 }
9227
9228 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9229
9230 static tree
9231 fold_builtin_carg (location_t loc, tree arg, tree type)
9232 {
9233 if (validate_arg (arg, COMPLEX_TYPE)
9234 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9235 {
9236 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9237
9238 if (atan2_fn)
9239 {
9240 tree new_arg = builtin_save_expr (arg);
9241 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9242 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9243 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9244 }
9245 }
9246
9247 return NULL_TREE;
9248 }
9249
9250 /* Fold a call to builtin logb/ilogb. */
9251
9252 static tree
9253 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9254 {
9255 if (! validate_arg (arg, REAL_TYPE))
9256 return NULL_TREE;
9257
9258 STRIP_NOPS (arg);
9259
9260 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9261 {
9262 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9263
9264 switch (value->cl)
9265 {
9266 case rvc_nan:
9267 case rvc_inf:
9268 /* If arg is Inf or NaN and we're logb, return it. */
9269 if (TREE_CODE (rettype) == REAL_TYPE)
9270 {
9271 /* For logb(-Inf) we have to return +Inf. */
9272 if (real_isinf (value) && real_isneg (value))
9273 {
9274 REAL_VALUE_TYPE tem;
9275 real_inf (&tem);
9276 return build_real (rettype, tem);
9277 }
9278 return fold_convert_loc (loc, rettype, arg);
9279 }
9280 /* Fall through... */
9281 case rvc_zero:
9282 /* Zero may set errno and/or raise an exception for logb, also
9283 for ilogb we don't know FP_ILOGB0. */
9284 return NULL_TREE;
9285 case rvc_normal:
9286 /* For normal numbers, proceed iff radix == 2. In GCC,
9287 normalized significands are in the range [0.5, 1.0). We
9288 want the exponent as if they were [1.0, 2.0) so get the
9289 exponent and subtract 1. */
9290 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9291 return fold_convert_loc (loc, rettype,
9292 build_int_cst (integer_type_node,
9293 REAL_EXP (value)-1));
9294 break;
9295 }
9296 }
9297
9298 return NULL_TREE;
9299 }
9300
9301 /* Fold a call to builtin significand, if radix == 2. */
9302
9303 static tree
9304 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9305 {
9306 if (! validate_arg (arg, REAL_TYPE))
9307 return NULL_TREE;
9308
9309 STRIP_NOPS (arg);
9310
9311 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9312 {
9313 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9314
9315 switch (value->cl)
9316 {
9317 case rvc_zero:
9318 case rvc_nan:
9319 case rvc_inf:
9320 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9321 return fold_convert_loc (loc, rettype, arg);
9322 case rvc_normal:
9323 /* For normal numbers, proceed iff radix == 2. */
9324 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9325 {
9326 REAL_VALUE_TYPE result = *value;
9327 /* In GCC, normalized significands are in the range [0.5,
9328 1.0). We want them to be [1.0, 2.0) so set the
9329 exponent to 1. */
9330 SET_REAL_EXP (&result, 1);
9331 return build_real (rettype, result);
9332 }
9333 break;
9334 }
9335 }
9336
9337 return NULL_TREE;
9338 }
9339
9340 /* Fold a call to builtin frexp, we can assume the base is 2. */
9341
9342 static tree
9343 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9344 {
9345 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9346 return NULL_TREE;
9347
9348 STRIP_NOPS (arg0);
9349
9350 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9351 return NULL_TREE;
9352
9353 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9354
9355 /* Proceed if a valid pointer type was passed in. */
9356 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9357 {
9358 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9359 tree frac, exp;
9360
9361 switch (value->cl)
9362 {
9363 case rvc_zero:
9364 /* For +-0, return (*exp = 0, +-0). */
9365 exp = integer_zero_node;
9366 frac = arg0;
9367 break;
9368 case rvc_nan:
9369 case rvc_inf:
9370 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9371 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9372 case rvc_normal:
9373 {
9374 /* Since the frexp function always expects base 2, and in
9375 GCC normalized significands are already in the range
9376 [0.5, 1.0), we have exactly what frexp wants. */
9377 REAL_VALUE_TYPE frac_rvt = *value;
9378 SET_REAL_EXP (&frac_rvt, 0);
9379 frac = build_real (rettype, frac_rvt);
9380 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9381 }
9382 break;
9383 default:
9384 gcc_unreachable ();
9385 }
9386
9387 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9388 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9389 TREE_SIDE_EFFECTS (arg1) = 1;
9390 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9391 }
9392
9393 return NULL_TREE;
9394 }
9395
9396 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9397 then we can assume the base is two. If it's false, then we have to
9398 check the mode of the TYPE parameter in certain cases. */
9399
9400 static tree
9401 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9402 tree type, bool ldexp)
9403 {
9404 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9405 {
9406 STRIP_NOPS (arg0);
9407 STRIP_NOPS (arg1);
9408
9409 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9410 if (real_zerop (arg0) || integer_zerop (arg1)
9411 || (TREE_CODE (arg0) == REAL_CST
9412 && !real_isfinite (&TREE_REAL_CST (arg0))))
9413 return omit_one_operand_loc (loc, type, arg0, arg1);
9414
9415 /* If both arguments are constant, then try to evaluate it. */
9416 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9417 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9418 && tree_fits_shwi_p (arg1))
9419 {
9420 /* Bound the maximum adjustment to twice the range of the
9421 mode's valid exponents. Use abs to ensure the range is
9422 positive as a sanity check. */
9423 const long max_exp_adj = 2 *
9424 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9425 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9426
9427 /* Get the user-requested adjustment. */
9428 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9429
9430 /* The requested adjustment must be inside this range. This
9431 is a preliminary cap to avoid things like overflow, we
9432 may still fail to compute the result for other reasons. */
9433 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9434 {
9435 REAL_VALUE_TYPE initial_result;
9436
9437 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9438
9439 /* Ensure we didn't overflow. */
9440 if (! real_isinf (&initial_result))
9441 {
9442 const REAL_VALUE_TYPE trunc_result
9443 = real_value_truncate (TYPE_MODE (type), initial_result);
9444
9445 /* Only proceed if the target mode can hold the
9446 resulting value. */
9447 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9448 return build_real (type, trunc_result);
9449 }
9450 }
9451 }
9452 }
9453
9454 return NULL_TREE;
9455 }
9456
9457 /* Fold a call to builtin modf. */
9458
9459 static tree
9460 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9461 {
9462 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9463 return NULL_TREE;
9464
9465 STRIP_NOPS (arg0);
9466
9467 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9468 return NULL_TREE;
9469
9470 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9471
9472 /* Proceed if a valid pointer type was passed in. */
9473 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9474 {
9475 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9476 REAL_VALUE_TYPE trunc, frac;
9477
9478 switch (value->cl)
9479 {
9480 case rvc_nan:
9481 case rvc_zero:
9482 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9483 trunc = frac = *value;
9484 break;
9485 case rvc_inf:
9486 /* For +-Inf, return (*arg1 = arg0, +-0). */
9487 frac = dconst0;
9488 frac.sign = value->sign;
9489 trunc = *value;
9490 break;
9491 case rvc_normal:
9492 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9493 real_trunc (&trunc, VOIDmode, value);
9494 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9495 /* If the original number was negative and already
9496 integral, then the fractional part is -0.0. */
9497 if (value->sign && frac.cl == rvc_zero)
9498 frac.sign = value->sign;
9499 break;
9500 }
9501
9502 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9503 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9504 build_real (rettype, trunc));
9505 TREE_SIDE_EFFECTS (arg1) = 1;
9506 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9507 build_real (rettype, frac));
9508 }
9509
9510 return NULL_TREE;
9511 }
9512
9513 /* Given a location LOC, an interclass builtin function decl FNDECL
9514 and its single argument ARG, return an folded expression computing
9515 the same, or NULL_TREE if we either couldn't or didn't want to fold
9516 (the latter happen if there's an RTL instruction available). */
9517
9518 static tree
9519 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9520 {
9521 machine_mode mode;
9522
9523 if (!validate_arg (arg, REAL_TYPE))
9524 return NULL_TREE;
9525
9526 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9527 return NULL_TREE;
9528
9529 mode = TYPE_MODE (TREE_TYPE (arg));
9530
9531 /* If there is no optab, try generic code. */
9532 switch (DECL_FUNCTION_CODE (fndecl))
9533 {
9534 tree result;
9535
9536 CASE_FLT_FN (BUILT_IN_ISINF):
9537 {
9538 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9539 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9540 tree const type = TREE_TYPE (arg);
9541 REAL_VALUE_TYPE r;
9542 char buf[128];
9543
9544 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9545 real_from_string (&r, buf);
9546 result = build_call_expr (isgr_fn, 2,
9547 fold_build1_loc (loc, ABS_EXPR, type, arg),
9548 build_real (type, r));
9549 return result;
9550 }
9551 CASE_FLT_FN (BUILT_IN_FINITE):
9552 case BUILT_IN_ISFINITE:
9553 {
9554 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9555 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9556 tree const type = TREE_TYPE (arg);
9557 REAL_VALUE_TYPE r;
9558 char buf[128];
9559
9560 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9561 real_from_string (&r, buf);
9562 result = build_call_expr (isle_fn, 2,
9563 fold_build1_loc (loc, ABS_EXPR, type, arg),
9564 build_real (type, r));
9565 /*result = fold_build2_loc (loc, UNGT_EXPR,
9566 TREE_TYPE (TREE_TYPE (fndecl)),
9567 fold_build1_loc (loc, ABS_EXPR, type, arg),
9568 build_real (type, r));
9569 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9570 TREE_TYPE (TREE_TYPE (fndecl)),
9571 result);*/
9572 return result;
9573 }
9574 case BUILT_IN_ISNORMAL:
9575 {
9576 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9577 islessequal(fabs(x),DBL_MAX). */
9578 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9579 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9580 tree const type = TREE_TYPE (arg);
9581 REAL_VALUE_TYPE rmax, rmin;
9582 char buf[128];
9583
9584 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9585 real_from_string (&rmax, buf);
9586 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9587 real_from_string (&rmin, buf);
9588 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9589 result = build_call_expr (isle_fn, 2, arg,
9590 build_real (type, rmax));
9591 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9592 build_call_expr (isge_fn, 2, arg,
9593 build_real (type, rmin)));
9594 return result;
9595 }
9596 default:
9597 break;
9598 }
9599
9600 return NULL_TREE;
9601 }
9602
9603 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9604 ARG is the argument for the call. */
9605
9606 static tree
9607 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9608 {
9609 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9610 REAL_VALUE_TYPE r;
9611
9612 if (!validate_arg (arg, REAL_TYPE))
9613 return NULL_TREE;
9614
9615 switch (builtin_index)
9616 {
9617 case BUILT_IN_ISINF:
9618 if (!HONOR_INFINITIES (arg))
9619 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9620
9621 if (TREE_CODE (arg) == REAL_CST)
9622 {
9623 r = TREE_REAL_CST (arg);
9624 if (real_isinf (&r))
9625 return real_compare (GT_EXPR, &r, &dconst0)
9626 ? integer_one_node : integer_minus_one_node;
9627 else
9628 return integer_zero_node;
9629 }
9630
9631 return NULL_TREE;
9632
9633 case BUILT_IN_ISINF_SIGN:
9634 {
9635 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9636 /* In a boolean context, GCC will fold the inner COND_EXPR to
9637 1. So e.g. "if (isinf_sign(x))" would be folded to just
9638 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9639 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9640 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9641 tree tmp = NULL_TREE;
9642
9643 arg = builtin_save_expr (arg);
9644
9645 if (signbit_fn && isinf_fn)
9646 {
9647 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9648 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9649
9650 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9651 signbit_call, integer_zero_node);
9652 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9653 isinf_call, integer_zero_node);
9654
9655 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9656 integer_minus_one_node, integer_one_node);
9657 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9658 isinf_call, tmp,
9659 integer_zero_node);
9660 }
9661
9662 return tmp;
9663 }
9664
9665 case BUILT_IN_ISFINITE:
9666 if (!HONOR_NANS (arg)
9667 && !HONOR_INFINITIES (arg))
9668 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9669
9670 if (TREE_CODE (arg) == REAL_CST)
9671 {
9672 r = TREE_REAL_CST (arg);
9673 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9674 }
9675
9676 return NULL_TREE;
9677
9678 case BUILT_IN_ISNAN:
9679 if (!HONOR_NANS (arg))
9680 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9681
9682 if (TREE_CODE (arg) == REAL_CST)
9683 {
9684 r = TREE_REAL_CST (arg);
9685 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9686 }
9687
9688 arg = builtin_save_expr (arg);
9689 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9690
9691 default:
9692 gcc_unreachable ();
9693 }
9694 }
9695
9696 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9697 This builtin will generate code to return the appropriate floating
9698 point classification depending on the value of the floating point
9699 number passed in. The possible return values must be supplied as
9700 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9701 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9702 one floating point argument which is "type generic". */
9703
9704 static tree
9705 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9706 {
9707 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9708 arg, type, res, tmp;
9709 machine_mode mode;
9710 REAL_VALUE_TYPE r;
9711 char buf[128];
9712
9713 /* Verify the required arguments in the original call. */
9714 if (nargs != 6
9715 || !validate_arg (args[0], INTEGER_TYPE)
9716 || !validate_arg (args[1], INTEGER_TYPE)
9717 || !validate_arg (args[2], INTEGER_TYPE)
9718 || !validate_arg (args[3], INTEGER_TYPE)
9719 || !validate_arg (args[4], INTEGER_TYPE)
9720 || !validate_arg (args[5], REAL_TYPE))
9721 return NULL_TREE;
9722
9723 fp_nan = args[0];
9724 fp_infinite = args[1];
9725 fp_normal = args[2];
9726 fp_subnormal = args[3];
9727 fp_zero = args[4];
9728 arg = args[5];
9729 type = TREE_TYPE (arg);
9730 mode = TYPE_MODE (type);
9731 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9732
9733 /* fpclassify(x) ->
9734 isnan(x) ? FP_NAN :
9735 (fabs(x) == Inf ? FP_INFINITE :
9736 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9737 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9738
9739 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9740 build_real (type, dconst0));
9741 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9742 tmp, fp_zero, fp_subnormal);
9743
9744 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9745 real_from_string (&r, buf);
9746 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9747 arg, build_real (type, r));
9748 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9749
9750 if (HONOR_INFINITIES (mode))
9751 {
9752 real_inf (&r);
9753 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9754 build_real (type, r));
9755 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9756 fp_infinite, res);
9757 }
9758
9759 if (HONOR_NANS (mode))
9760 {
9761 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9762 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9763 }
9764
9765 return res;
9766 }
9767
9768 /* Fold a call to an unordered comparison function such as
9769 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9770 being called and ARG0 and ARG1 are the arguments for the call.
9771 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9772 the opposite of the desired result. UNORDERED_CODE is used
9773 for modes that can hold NaNs and ORDERED_CODE is used for
9774 the rest. */
9775
9776 static tree
9777 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9778 enum tree_code unordered_code,
9779 enum tree_code ordered_code)
9780 {
9781 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9782 enum tree_code code;
9783 tree type0, type1;
9784 enum tree_code code0, code1;
9785 tree cmp_type = NULL_TREE;
9786
9787 type0 = TREE_TYPE (arg0);
9788 type1 = TREE_TYPE (arg1);
9789
9790 code0 = TREE_CODE (type0);
9791 code1 = TREE_CODE (type1);
9792
9793 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9794 /* Choose the wider of two real types. */
9795 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9796 ? type0 : type1;
9797 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9798 cmp_type = type0;
9799 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9800 cmp_type = type1;
9801
9802 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9803 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9804
9805 if (unordered_code == UNORDERED_EXPR)
9806 {
9807 if (!HONOR_NANS (arg0))
9808 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9809 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9810 }
9811
9812 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9813 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9814 fold_build2_loc (loc, code, type, arg0, arg1));
9815 }
9816
9817 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9818 arithmetics if it can never overflow, or into internal functions that
9819 return both result of arithmetics and overflowed boolean flag in
9820 a complex integer result, or some other check for overflow. */
9821
9822 static tree
9823 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9824 tree arg0, tree arg1, tree arg2)
9825 {
9826 enum internal_fn ifn = IFN_LAST;
9827 tree type = TREE_TYPE (TREE_TYPE (arg2));
9828 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9829 switch (fcode)
9830 {
9831 case BUILT_IN_ADD_OVERFLOW:
9832 case BUILT_IN_SADD_OVERFLOW:
9833 case BUILT_IN_SADDL_OVERFLOW:
9834 case BUILT_IN_SADDLL_OVERFLOW:
9835 case BUILT_IN_UADD_OVERFLOW:
9836 case BUILT_IN_UADDL_OVERFLOW:
9837 case BUILT_IN_UADDLL_OVERFLOW:
9838 ifn = IFN_ADD_OVERFLOW;
9839 break;
9840 case BUILT_IN_SUB_OVERFLOW:
9841 case BUILT_IN_SSUB_OVERFLOW:
9842 case BUILT_IN_SSUBL_OVERFLOW:
9843 case BUILT_IN_SSUBLL_OVERFLOW:
9844 case BUILT_IN_USUB_OVERFLOW:
9845 case BUILT_IN_USUBL_OVERFLOW:
9846 case BUILT_IN_USUBLL_OVERFLOW:
9847 ifn = IFN_SUB_OVERFLOW;
9848 break;
9849 case BUILT_IN_MUL_OVERFLOW:
9850 case BUILT_IN_SMUL_OVERFLOW:
9851 case BUILT_IN_SMULL_OVERFLOW:
9852 case BUILT_IN_SMULLL_OVERFLOW:
9853 case BUILT_IN_UMUL_OVERFLOW:
9854 case BUILT_IN_UMULL_OVERFLOW:
9855 case BUILT_IN_UMULLL_OVERFLOW:
9856 ifn = IFN_MUL_OVERFLOW;
9857 break;
9858 default:
9859 gcc_unreachable ();
9860 }
9861 tree ctype = build_complex_type (type);
9862 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9863 2, arg0, arg1);
9864 tree tgt = save_expr (call);
9865 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9866 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9867 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9868 tree store
9869 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9870 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9871 }
9872
9873 /* Fold a call to built-in function FNDECL with 0 arguments.
9874 This function returns NULL_TREE if no simplification was possible. */
9875
9876 static tree
9877 fold_builtin_0 (location_t loc, tree fndecl)
9878 {
9879 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9880 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9881 switch (fcode)
9882 {
9883 CASE_FLT_FN (BUILT_IN_INF):
9884 case BUILT_IN_INFD32:
9885 case BUILT_IN_INFD64:
9886 case BUILT_IN_INFD128:
9887 return fold_builtin_inf (loc, type, true);
9888
9889 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9890 return fold_builtin_inf (loc, type, false);
9891
9892 case BUILT_IN_CLASSIFY_TYPE:
9893 return fold_builtin_classify_type (NULL_TREE);
9894
9895 default:
9896 break;
9897 }
9898 return NULL_TREE;
9899 }
9900
9901 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9902 This function returns NULL_TREE if no simplification was possible. */
9903
9904 static tree
9905 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9906 {
9907 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9908 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9909 switch (fcode)
9910 {
9911 case BUILT_IN_CONSTANT_P:
9912 {
9913 tree val = fold_builtin_constant_p (arg0);
9914
9915 /* Gimplification will pull the CALL_EXPR for the builtin out of
9916 an if condition. When not optimizing, we'll not CSE it back.
9917 To avoid link error types of regressions, return false now. */
9918 if (!val && !optimize)
9919 val = integer_zero_node;
9920
9921 return val;
9922 }
9923
9924 case BUILT_IN_CLASSIFY_TYPE:
9925 return fold_builtin_classify_type (arg0);
9926
9927 case BUILT_IN_STRLEN:
9928 return fold_builtin_strlen (loc, type, arg0);
9929
9930 CASE_FLT_FN (BUILT_IN_FABS):
9931 case BUILT_IN_FABSD32:
9932 case BUILT_IN_FABSD64:
9933 case BUILT_IN_FABSD128:
9934 return fold_builtin_fabs (loc, arg0, type);
9935
9936 case BUILT_IN_ABS:
9937 case BUILT_IN_LABS:
9938 case BUILT_IN_LLABS:
9939 case BUILT_IN_IMAXABS:
9940 return fold_builtin_abs (loc, arg0, type);
9941
9942 CASE_FLT_FN (BUILT_IN_CONJ):
9943 if (validate_arg (arg0, COMPLEX_TYPE)
9944 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9945 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9946 break;
9947
9948 CASE_FLT_FN (BUILT_IN_CREAL):
9949 if (validate_arg (arg0, COMPLEX_TYPE)
9950 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9951 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9952 break;
9953
9954 CASE_FLT_FN (BUILT_IN_CIMAG):
9955 if (validate_arg (arg0, COMPLEX_TYPE)
9956 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9957 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9958 break;
9959
9960 CASE_FLT_FN (BUILT_IN_CCOS):
9961 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9962
9963 CASE_FLT_FN (BUILT_IN_CCOSH):
9964 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9965
9966 CASE_FLT_FN (BUILT_IN_CPROJ):
9967 return fold_builtin_cproj (loc, arg0, type);
9968
9969 CASE_FLT_FN (BUILT_IN_CSIN):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9972 return do_mpc_arg1 (arg0, type, mpc_sin);
9973 break;
9974
9975 CASE_FLT_FN (BUILT_IN_CSINH):
9976 if (validate_arg (arg0, COMPLEX_TYPE)
9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9978 return do_mpc_arg1 (arg0, type, mpc_sinh);
9979 break;
9980
9981 CASE_FLT_FN (BUILT_IN_CTAN):
9982 if (validate_arg (arg0, COMPLEX_TYPE)
9983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9984 return do_mpc_arg1 (arg0, type, mpc_tan);
9985 break;
9986
9987 CASE_FLT_FN (BUILT_IN_CTANH):
9988 if (validate_arg (arg0, COMPLEX_TYPE)
9989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9990 return do_mpc_arg1 (arg0, type, mpc_tanh);
9991 break;
9992
9993 CASE_FLT_FN (BUILT_IN_CLOG):
9994 if (validate_arg (arg0, COMPLEX_TYPE)
9995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9996 return do_mpc_arg1 (arg0, type, mpc_log);
9997 break;
9998
9999 CASE_FLT_FN (BUILT_IN_CSQRT):
10000 if (validate_arg (arg0, COMPLEX_TYPE)
10001 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10002 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10003 break;
10004
10005 CASE_FLT_FN (BUILT_IN_CASIN):
10006 if (validate_arg (arg0, COMPLEX_TYPE)
10007 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10008 return do_mpc_arg1 (arg0, type, mpc_asin);
10009 break;
10010
10011 CASE_FLT_FN (BUILT_IN_CACOS):
10012 if (validate_arg (arg0, COMPLEX_TYPE)
10013 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10014 return do_mpc_arg1 (arg0, type, mpc_acos);
10015 break;
10016
10017 CASE_FLT_FN (BUILT_IN_CATAN):
10018 if (validate_arg (arg0, COMPLEX_TYPE)
10019 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10020 return do_mpc_arg1 (arg0, type, mpc_atan);
10021 break;
10022
10023 CASE_FLT_FN (BUILT_IN_CASINH):
10024 if (validate_arg (arg0, COMPLEX_TYPE)
10025 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10026 return do_mpc_arg1 (arg0, type, mpc_asinh);
10027 break;
10028
10029 CASE_FLT_FN (BUILT_IN_CACOSH):
10030 if (validate_arg (arg0, COMPLEX_TYPE)
10031 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10032 return do_mpc_arg1 (arg0, type, mpc_acosh);
10033 break;
10034
10035 CASE_FLT_FN (BUILT_IN_CATANH):
10036 if (validate_arg (arg0, COMPLEX_TYPE)
10037 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10038 return do_mpc_arg1 (arg0, type, mpc_atanh);
10039 break;
10040
10041 CASE_FLT_FN (BUILT_IN_CABS):
10042 return fold_builtin_cabs (loc, arg0, type, fndecl);
10043
10044 CASE_FLT_FN (BUILT_IN_CARG):
10045 return fold_builtin_carg (loc, arg0, type);
10046
10047 CASE_FLT_FN (BUILT_IN_SQRT):
10048 return fold_builtin_sqrt (loc, arg0, type);
10049
10050 CASE_FLT_FN (BUILT_IN_CBRT):
10051 return fold_builtin_cbrt (loc, arg0, type);
10052
10053 CASE_FLT_FN (BUILT_IN_ASIN):
10054 if (validate_arg (arg0, REAL_TYPE))
10055 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10056 &dconstm1, &dconst1, true);
10057 break;
10058
10059 CASE_FLT_FN (BUILT_IN_ACOS):
10060 if (validate_arg (arg0, REAL_TYPE))
10061 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10062 &dconstm1, &dconst1, true);
10063 break;
10064
10065 CASE_FLT_FN (BUILT_IN_ATAN):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10068 break;
10069
10070 CASE_FLT_FN (BUILT_IN_ASINH):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10073 break;
10074
10075 CASE_FLT_FN (BUILT_IN_ACOSH):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10078 &dconst1, NULL, true);
10079 break;
10080
10081 CASE_FLT_FN (BUILT_IN_ATANH):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10084 &dconstm1, &dconst1, false);
10085 break;
10086
10087 CASE_FLT_FN (BUILT_IN_SIN):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10090 break;
10091
10092 CASE_FLT_FN (BUILT_IN_COS):
10093 return fold_builtin_cos (loc, arg0, type, fndecl);
10094
10095 CASE_FLT_FN (BUILT_IN_TAN):
10096 return fold_builtin_tan (arg0, type);
10097
10098 CASE_FLT_FN (BUILT_IN_CEXP):
10099 return fold_builtin_cexp (loc, arg0, type);
10100
10101 CASE_FLT_FN (BUILT_IN_CEXPI):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10104 break;
10105
10106 CASE_FLT_FN (BUILT_IN_SINH):
10107 if (validate_arg (arg0, REAL_TYPE))
10108 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_COSH):
10112 return fold_builtin_cosh (loc, arg0, type, fndecl);
10113
10114 CASE_FLT_FN (BUILT_IN_TANH):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10117 break;
10118
10119 CASE_FLT_FN (BUILT_IN_ERF):
10120 if (validate_arg (arg0, REAL_TYPE))
10121 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10122 break;
10123
10124 CASE_FLT_FN (BUILT_IN_ERFC):
10125 if (validate_arg (arg0, REAL_TYPE))
10126 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10127 break;
10128
10129 CASE_FLT_FN (BUILT_IN_TGAMMA):
10130 if (validate_arg (arg0, REAL_TYPE))
10131 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10132 break;
10133
10134 CASE_FLT_FN (BUILT_IN_EXP):
10135 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10136
10137 CASE_FLT_FN (BUILT_IN_EXP2):
10138 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10139
10140 CASE_FLT_FN (BUILT_IN_EXP10):
10141 CASE_FLT_FN (BUILT_IN_POW10):
10142 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10143
10144 CASE_FLT_FN (BUILT_IN_EXPM1):
10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10147 break;
10148
10149 CASE_FLT_FN (BUILT_IN_LOG):
10150 if (validate_arg (arg0, REAL_TYPE))
10151 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10152 break;
10153
10154 CASE_FLT_FN (BUILT_IN_LOG2):
10155 if (validate_arg (arg0, REAL_TYPE))
10156 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10157 break;
10158
10159 CASE_FLT_FN (BUILT_IN_LOG10):
10160 if (validate_arg (arg0, REAL_TYPE))
10161 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10162 break;
10163
10164 CASE_FLT_FN (BUILT_IN_LOG1P):
10165 if (validate_arg (arg0, REAL_TYPE))
10166 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10167 &dconstm1, NULL, false);
10168 break;
10169
10170 CASE_FLT_FN (BUILT_IN_J0):
10171 if (validate_arg (arg0, REAL_TYPE))
10172 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10173 NULL, NULL, 0);
10174 break;
10175
10176 CASE_FLT_FN (BUILT_IN_J1):
10177 if (validate_arg (arg0, REAL_TYPE))
10178 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10179 NULL, NULL, 0);
10180 break;
10181
10182 CASE_FLT_FN (BUILT_IN_Y0):
10183 if (validate_arg (arg0, REAL_TYPE))
10184 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10185 &dconst0, NULL, false);
10186 break;
10187
10188 CASE_FLT_FN (BUILT_IN_Y1):
10189 if (validate_arg (arg0, REAL_TYPE))
10190 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10191 &dconst0, NULL, false);
10192 break;
10193
10194 CASE_FLT_FN (BUILT_IN_NAN):
10195 case BUILT_IN_NAND32:
10196 case BUILT_IN_NAND64:
10197 case BUILT_IN_NAND128:
10198 return fold_builtin_nan (arg0, type, true);
10199
10200 CASE_FLT_FN (BUILT_IN_NANS):
10201 return fold_builtin_nan (arg0, type, false);
10202
10203 CASE_FLT_FN (BUILT_IN_FLOOR):
10204 return fold_builtin_floor (loc, fndecl, arg0);
10205
10206 CASE_FLT_FN (BUILT_IN_CEIL):
10207 return fold_builtin_ceil (loc, fndecl, arg0);
10208
10209 CASE_FLT_FN (BUILT_IN_TRUNC):
10210 return fold_builtin_trunc (loc, fndecl, arg0);
10211
10212 CASE_FLT_FN (BUILT_IN_ROUND):
10213 return fold_builtin_round (loc, fndecl, arg0);
10214
10215 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10216 CASE_FLT_FN (BUILT_IN_RINT):
10217 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10218
10219 CASE_FLT_FN (BUILT_IN_ICEIL):
10220 CASE_FLT_FN (BUILT_IN_LCEIL):
10221 CASE_FLT_FN (BUILT_IN_LLCEIL):
10222 CASE_FLT_FN (BUILT_IN_LFLOOR):
10223 CASE_FLT_FN (BUILT_IN_IFLOOR):
10224 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10225 CASE_FLT_FN (BUILT_IN_IROUND):
10226 CASE_FLT_FN (BUILT_IN_LROUND):
10227 CASE_FLT_FN (BUILT_IN_LLROUND):
10228 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10229
10230 CASE_FLT_FN (BUILT_IN_IRINT):
10231 CASE_FLT_FN (BUILT_IN_LRINT):
10232 CASE_FLT_FN (BUILT_IN_LLRINT):
10233 return fold_fixed_mathfn (loc, fndecl, arg0);
10234
10235 case BUILT_IN_BSWAP16:
10236 case BUILT_IN_BSWAP32:
10237 case BUILT_IN_BSWAP64:
10238 return fold_builtin_bswap (fndecl, arg0);
10239
10240 CASE_INT_FN (BUILT_IN_FFS):
10241 CASE_INT_FN (BUILT_IN_CLZ):
10242 CASE_INT_FN (BUILT_IN_CTZ):
10243 CASE_INT_FN (BUILT_IN_CLRSB):
10244 CASE_INT_FN (BUILT_IN_POPCOUNT):
10245 CASE_INT_FN (BUILT_IN_PARITY):
10246 return fold_builtin_bitop (fndecl, arg0);
10247
10248 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10249 return fold_builtin_signbit (loc, arg0, type);
10250
10251 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10252 return fold_builtin_significand (loc, arg0, type);
10253
10254 CASE_FLT_FN (BUILT_IN_ILOGB):
10255 CASE_FLT_FN (BUILT_IN_LOGB):
10256 return fold_builtin_logb (loc, arg0, type);
10257
10258 case BUILT_IN_ISASCII:
10259 return fold_builtin_isascii (loc, arg0);
10260
10261 case BUILT_IN_TOASCII:
10262 return fold_builtin_toascii (loc, arg0);
10263
10264 case BUILT_IN_ISDIGIT:
10265 return fold_builtin_isdigit (loc, arg0);
10266
10267 CASE_FLT_FN (BUILT_IN_FINITE):
10268 case BUILT_IN_FINITED32:
10269 case BUILT_IN_FINITED64:
10270 case BUILT_IN_FINITED128:
10271 case BUILT_IN_ISFINITE:
10272 {
10273 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10274 if (ret)
10275 return ret;
10276 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10277 }
10278
10279 CASE_FLT_FN (BUILT_IN_ISINF):
10280 case BUILT_IN_ISINFD32:
10281 case BUILT_IN_ISINFD64:
10282 case BUILT_IN_ISINFD128:
10283 {
10284 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10285 if (ret)
10286 return ret;
10287 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10288 }
10289
10290 case BUILT_IN_ISNORMAL:
10291 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10292
10293 case BUILT_IN_ISINF_SIGN:
10294 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10295
10296 CASE_FLT_FN (BUILT_IN_ISNAN):
10297 case BUILT_IN_ISNAND32:
10298 case BUILT_IN_ISNAND64:
10299 case BUILT_IN_ISNAND128:
10300 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10301
10302 case BUILT_IN_FREE:
10303 if (integer_zerop (arg0))
10304 return build_empty_stmt (loc);
10305 break;
10306
10307 default:
10308 break;
10309 }
10310
10311 return NULL_TREE;
10312
10313 }
10314
10315 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10316 This function returns NULL_TREE if no simplification was possible. */
10317
10318 static tree
10319 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10320 {
10321 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10322 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10323
10324 switch (fcode)
10325 {
10326 CASE_FLT_FN (BUILT_IN_JN):
10327 if (validate_arg (arg0, INTEGER_TYPE)
10328 && validate_arg (arg1, REAL_TYPE))
10329 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10330 break;
10331
10332 CASE_FLT_FN (BUILT_IN_YN):
10333 if (validate_arg (arg0, INTEGER_TYPE)
10334 && validate_arg (arg1, REAL_TYPE))
10335 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10336 &dconst0, false);
10337 break;
10338
10339 CASE_FLT_FN (BUILT_IN_DREM):
10340 CASE_FLT_FN (BUILT_IN_REMAINDER):
10341 if (validate_arg (arg0, REAL_TYPE)
10342 && validate_arg (arg1, REAL_TYPE))
10343 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10344 break;
10345
10346 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10347 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10348 if (validate_arg (arg0, REAL_TYPE)
10349 && validate_arg (arg1, POINTER_TYPE))
10350 return do_mpfr_lgamma_r (arg0, arg1, type);
10351 break;
10352
10353 CASE_FLT_FN (BUILT_IN_ATAN2):
10354 if (validate_arg (arg0, REAL_TYPE)
10355 && validate_arg (arg1, REAL_TYPE))
10356 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10357 break;
10358
10359 CASE_FLT_FN (BUILT_IN_FDIM):
10360 if (validate_arg (arg0, REAL_TYPE)
10361 && validate_arg (arg1, REAL_TYPE))
10362 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10363 break;
10364
10365 CASE_FLT_FN (BUILT_IN_HYPOT):
10366 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10367
10368 CASE_FLT_FN (BUILT_IN_CPOW):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10371 && validate_arg (arg1, COMPLEX_TYPE)
10372 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10373 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10374 break;
10375
10376 CASE_FLT_FN (BUILT_IN_LDEXP):
10377 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10378 CASE_FLT_FN (BUILT_IN_SCALBN):
10379 CASE_FLT_FN (BUILT_IN_SCALBLN):
10380 return fold_builtin_load_exponent (loc, arg0, arg1,
10381 type, /*ldexp=*/false);
10382
10383 CASE_FLT_FN (BUILT_IN_FREXP):
10384 return fold_builtin_frexp (loc, arg0, arg1, type);
10385
10386 CASE_FLT_FN (BUILT_IN_MODF):
10387 return fold_builtin_modf (loc, arg0, arg1, type);
10388
10389 case BUILT_IN_STRSTR:
10390 return fold_builtin_strstr (loc, arg0, arg1, type);
10391
10392 case BUILT_IN_STRSPN:
10393 return fold_builtin_strspn (loc, arg0, arg1);
10394
10395 case BUILT_IN_STRCSPN:
10396 return fold_builtin_strcspn (loc, arg0, arg1);
10397
10398 case BUILT_IN_STRCHR:
10399 case BUILT_IN_INDEX:
10400 return fold_builtin_strchr (loc, arg0, arg1, type);
10401
10402 case BUILT_IN_STRRCHR:
10403 case BUILT_IN_RINDEX:
10404 return fold_builtin_strrchr (loc, arg0, arg1, type);
10405
10406 case BUILT_IN_STRCMP:
10407 return fold_builtin_strcmp (loc, arg0, arg1);
10408
10409 case BUILT_IN_STRPBRK:
10410 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10411
10412 case BUILT_IN_EXPECT:
10413 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10414
10415 CASE_FLT_FN (BUILT_IN_POW):
10416 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10417
10418 CASE_FLT_FN (BUILT_IN_POWI):
10419 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10420
10421 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10422 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10423
10424 CASE_FLT_FN (BUILT_IN_FMIN):
10425 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10426
10427 CASE_FLT_FN (BUILT_IN_FMAX):
10428 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10429
10430 case BUILT_IN_ISGREATER:
10431 return fold_builtin_unordered_cmp (loc, fndecl,
10432 arg0, arg1, UNLE_EXPR, LE_EXPR);
10433 case BUILT_IN_ISGREATEREQUAL:
10434 return fold_builtin_unordered_cmp (loc, fndecl,
10435 arg0, arg1, UNLT_EXPR, LT_EXPR);
10436 case BUILT_IN_ISLESS:
10437 return fold_builtin_unordered_cmp (loc, fndecl,
10438 arg0, arg1, UNGE_EXPR, GE_EXPR);
10439 case BUILT_IN_ISLESSEQUAL:
10440 return fold_builtin_unordered_cmp (loc, fndecl,
10441 arg0, arg1, UNGT_EXPR, GT_EXPR);
10442 case BUILT_IN_ISLESSGREATER:
10443 return fold_builtin_unordered_cmp (loc, fndecl,
10444 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10445 case BUILT_IN_ISUNORDERED:
10446 return fold_builtin_unordered_cmp (loc, fndecl,
10447 arg0, arg1, UNORDERED_EXPR,
10448 NOP_EXPR);
10449
10450 /* We do the folding for va_start in the expander. */
10451 case BUILT_IN_VA_START:
10452 break;
10453
10454 case BUILT_IN_OBJECT_SIZE:
10455 return fold_builtin_object_size (arg0, arg1);
10456
10457 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10458 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10459
10460 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10461 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10462
10463 default:
10464 break;
10465 }
10466 return NULL_TREE;
10467 }
10468
10469 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10470 and ARG2.
10471 This function returns NULL_TREE if no simplification was possible. */
10472
10473 static tree
10474 fold_builtin_3 (location_t loc, tree fndecl,
10475 tree arg0, tree arg1, tree arg2)
10476 {
10477 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10478 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10479 switch (fcode)
10480 {
10481
10482 CASE_FLT_FN (BUILT_IN_SINCOS):
10483 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10484
10485 CASE_FLT_FN (BUILT_IN_FMA):
10486 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10487 break;
10488
10489 CASE_FLT_FN (BUILT_IN_REMQUO):
10490 if (validate_arg (arg0, REAL_TYPE)
10491 && validate_arg (arg1, REAL_TYPE)
10492 && validate_arg (arg2, POINTER_TYPE))
10493 return do_mpfr_remquo (arg0, arg1, arg2);
10494 break;
10495
10496 case BUILT_IN_STRNCMP:
10497 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10498
10499 case BUILT_IN_MEMCHR:
10500 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10501
10502 case BUILT_IN_BCMP:
10503 case BUILT_IN_MEMCMP:
10504 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10505
10506 case BUILT_IN_EXPECT:
10507 return fold_builtin_expect (loc, arg0, arg1, arg2);
10508
10509 case BUILT_IN_ADD_OVERFLOW:
10510 case BUILT_IN_SUB_OVERFLOW:
10511 case BUILT_IN_MUL_OVERFLOW:
10512 case BUILT_IN_SADD_OVERFLOW:
10513 case BUILT_IN_SADDL_OVERFLOW:
10514 case BUILT_IN_SADDLL_OVERFLOW:
10515 case BUILT_IN_SSUB_OVERFLOW:
10516 case BUILT_IN_SSUBL_OVERFLOW:
10517 case BUILT_IN_SSUBLL_OVERFLOW:
10518 case BUILT_IN_SMUL_OVERFLOW:
10519 case BUILT_IN_SMULL_OVERFLOW:
10520 case BUILT_IN_SMULLL_OVERFLOW:
10521 case BUILT_IN_UADD_OVERFLOW:
10522 case BUILT_IN_UADDL_OVERFLOW:
10523 case BUILT_IN_UADDLL_OVERFLOW:
10524 case BUILT_IN_USUB_OVERFLOW:
10525 case BUILT_IN_USUBL_OVERFLOW:
10526 case BUILT_IN_USUBLL_OVERFLOW:
10527 case BUILT_IN_UMUL_OVERFLOW:
10528 case BUILT_IN_UMULL_OVERFLOW:
10529 case BUILT_IN_UMULLL_OVERFLOW:
10530 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10531
10532 default:
10533 break;
10534 }
10535 return NULL_TREE;
10536 }
10537
10538 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10539 arguments. IGNORE is true if the result of the
10540 function call is ignored. This function returns NULL_TREE if no
10541 simplification was possible. */
10542
10543 tree
10544 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10545 {
10546 tree ret = NULL_TREE;
10547
10548 switch (nargs)
10549 {
10550 case 0:
10551 ret = fold_builtin_0 (loc, fndecl);
10552 break;
10553 case 1:
10554 ret = fold_builtin_1 (loc, fndecl, args[0]);
10555 break;
10556 case 2:
10557 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10558 break;
10559 case 3:
10560 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10561 break;
10562 default:
10563 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10564 break;
10565 }
10566 if (ret)
10567 {
10568 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10569 SET_EXPR_LOCATION (ret, loc);
10570 TREE_NO_WARNING (ret) = 1;
10571 return ret;
10572 }
10573 return NULL_TREE;
10574 }
10575
10576 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10577 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10578 of arguments in ARGS to be omitted. OLDNARGS is the number of
10579 elements in ARGS. */
10580
10581 static tree
10582 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10583 int skip, tree fndecl, int n, va_list newargs)
10584 {
10585 int nargs = oldnargs - skip + n;
10586 tree *buffer;
10587
10588 if (n > 0)
10589 {
10590 int i, j;
10591
10592 buffer = XALLOCAVEC (tree, nargs);
10593 for (i = 0; i < n; i++)
10594 buffer[i] = va_arg (newargs, tree);
10595 for (j = skip; j < oldnargs; j++, i++)
10596 buffer[i] = args[j];
10597 }
10598 else
10599 buffer = args + skip;
10600
10601 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10602 }
10603
10604 /* Return true if FNDECL shouldn't be folded right now.
10605 If a built-in function has an inline attribute always_inline
10606 wrapper, defer folding it after always_inline functions have
10607 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10608 might not be performed. */
10609
10610 bool
10611 avoid_folding_inline_builtin (tree fndecl)
10612 {
10613 return (DECL_DECLARED_INLINE_P (fndecl)
10614 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10615 && cfun
10616 && !cfun->always_inline_functions_inlined
10617 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10618 }
10619
10620 /* A wrapper function for builtin folding that prevents warnings for
10621 "statement without effect" and the like, caused by removing the
10622 call node earlier than the warning is generated. */
10623
10624 tree
10625 fold_call_expr (location_t loc, tree exp, bool ignore)
10626 {
10627 tree ret = NULL_TREE;
10628 tree fndecl = get_callee_fndecl (exp);
10629 if (fndecl
10630 && TREE_CODE (fndecl) == FUNCTION_DECL
10631 && DECL_BUILT_IN (fndecl)
10632 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10633 yet. Defer folding until we see all the arguments
10634 (after inlining). */
10635 && !CALL_EXPR_VA_ARG_PACK (exp))
10636 {
10637 int nargs = call_expr_nargs (exp);
10638
10639 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10640 instead last argument is __builtin_va_arg_pack (). Defer folding
10641 even in that case, until arguments are finalized. */
10642 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10643 {
10644 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10645 if (fndecl2
10646 && TREE_CODE (fndecl2) == FUNCTION_DECL
10647 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10648 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10649 return NULL_TREE;
10650 }
10651
10652 if (avoid_folding_inline_builtin (fndecl))
10653 return NULL_TREE;
10654
10655 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10656 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10657 CALL_EXPR_ARGP (exp), ignore);
10658 else
10659 {
10660 tree *args = CALL_EXPR_ARGP (exp);
10661 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10662 if (ret)
10663 return ret;
10664 }
10665 }
10666 return NULL_TREE;
10667 }
10668
10669 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10670 N arguments are passed in the array ARGARRAY. Return a folded
10671 expression or NULL_TREE if no simplification was possible. */
10672
10673 tree
10674 fold_builtin_call_array (location_t loc, tree,
10675 tree fn,
10676 int n,
10677 tree *argarray)
10678 {
10679 if (TREE_CODE (fn) != ADDR_EXPR)
10680 return NULL_TREE;
10681
10682 tree fndecl = TREE_OPERAND (fn, 0);
10683 if (TREE_CODE (fndecl) == FUNCTION_DECL
10684 && DECL_BUILT_IN (fndecl))
10685 {
10686 /* If last argument is __builtin_va_arg_pack (), arguments to this
10687 function are not finalized yet. Defer folding until they are. */
10688 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10689 {
10690 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10691 if (fndecl2
10692 && TREE_CODE (fndecl2) == FUNCTION_DECL
10693 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10694 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10695 return NULL_TREE;
10696 }
10697 if (avoid_folding_inline_builtin (fndecl))
10698 return NULL_TREE;
10699 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10700 return targetm.fold_builtin (fndecl, n, argarray, false);
10701 else
10702 return fold_builtin_n (loc, fndecl, argarray, n, false);
10703 }
10704
10705 return NULL_TREE;
10706 }
10707
10708 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10709 along with N new arguments specified as the "..." parameters. SKIP
10710 is the number of arguments in EXP to be omitted. This function is used
10711 to do varargs-to-varargs transformations. */
10712
10713 static tree
10714 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10715 {
10716 va_list ap;
10717 tree t;
10718
10719 va_start (ap, n);
10720 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10721 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10722 va_end (ap);
10723
10724 return t;
10725 }
10726
10727 /* Validate a single argument ARG against a tree code CODE representing
10728 a type. */
10729
10730 static bool
10731 validate_arg (const_tree arg, enum tree_code code)
10732 {
10733 if (!arg)
10734 return false;
10735 else if (code == POINTER_TYPE)
10736 return POINTER_TYPE_P (TREE_TYPE (arg));
10737 else if (code == INTEGER_TYPE)
10738 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10739 return code == TREE_CODE (TREE_TYPE (arg));
10740 }
10741
10742 /* This function validates the types of a function call argument list
10743 against a specified list of tree_codes. If the last specifier is a 0,
10744 that represents an ellipses, otherwise the last specifier must be a
10745 VOID_TYPE.
10746
10747 This is the GIMPLE version of validate_arglist. Eventually we want to
10748 completely convert builtins.c to work from GIMPLEs and the tree based
10749 validate_arglist will then be removed. */
10750
10751 bool
10752 validate_gimple_arglist (const gcall *call, ...)
10753 {
10754 enum tree_code code;
10755 bool res = 0;
10756 va_list ap;
10757 const_tree arg;
10758 size_t i;
10759
10760 va_start (ap, call);
10761 i = 0;
10762
10763 do
10764 {
10765 code = (enum tree_code) va_arg (ap, int);
10766 switch (code)
10767 {
10768 case 0:
10769 /* This signifies an ellipses, any further arguments are all ok. */
10770 res = true;
10771 goto end;
10772 case VOID_TYPE:
10773 /* This signifies an endlink, if no arguments remain, return
10774 true, otherwise return false. */
10775 res = (i == gimple_call_num_args (call));
10776 goto end;
10777 default:
10778 /* If no parameters remain or the parameter's code does not
10779 match the specified code, return false. Otherwise continue
10780 checking any remaining arguments. */
10781 arg = gimple_call_arg (call, i++);
10782 if (!validate_arg (arg, code))
10783 goto end;
10784 break;
10785 }
10786 }
10787 while (1);
10788
10789 /* We need gotos here since we can only have one VA_CLOSE in a
10790 function. */
10791 end: ;
10792 va_end (ap);
10793
10794 return res;
10795 }
10796
10797 /* Default target-specific builtin expander that does nothing. */
10798
10799 rtx
10800 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10801 rtx target ATTRIBUTE_UNUSED,
10802 rtx subtarget ATTRIBUTE_UNUSED,
10803 machine_mode mode ATTRIBUTE_UNUSED,
10804 int ignore ATTRIBUTE_UNUSED)
10805 {
10806 return NULL_RTX;
10807 }
10808
10809 /* Returns true is EXP represents data that would potentially reside
10810 in a readonly section. */
10811
10812 bool
10813 readonly_data_expr (tree exp)
10814 {
10815 STRIP_NOPS (exp);
10816
10817 if (TREE_CODE (exp) != ADDR_EXPR)
10818 return false;
10819
10820 exp = get_base_address (TREE_OPERAND (exp, 0));
10821 if (!exp)
10822 return false;
10823
10824 /* Make sure we call decl_readonly_section only for trees it
10825 can handle (since it returns true for everything it doesn't
10826 understand). */
10827 if (TREE_CODE (exp) == STRING_CST
10828 || TREE_CODE (exp) == CONSTRUCTOR
10829 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10830 return decl_readonly_section (exp, 0);
10831 else
10832 return false;
10833 }
10834
10835 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10836 to the call, and TYPE is its return type.
10837
10838 Return NULL_TREE if no simplification was possible, otherwise return the
10839 simplified form of the call as a tree.
10840
10841 The simplified form may be a constant or other expression which
10842 computes the same value, but in a more efficient manner (including
10843 calls to other builtin functions).
10844
10845 The call may contain arguments which need to be evaluated, but
10846 which are not useful to determine the result of the call. In
10847 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10848 COMPOUND_EXPR will be an argument which must be evaluated.
10849 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10850 COMPOUND_EXPR in the chain will contain the tree for the simplified
10851 form of the builtin function call. */
10852
10853 static tree
10854 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10855 {
10856 if (!validate_arg (s1, POINTER_TYPE)
10857 || !validate_arg (s2, POINTER_TYPE))
10858 return NULL_TREE;
10859 else
10860 {
10861 tree fn;
10862 const char *p1, *p2;
10863
10864 p2 = c_getstr (s2);
10865 if (p2 == NULL)
10866 return NULL_TREE;
10867
10868 p1 = c_getstr (s1);
10869 if (p1 != NULL)
10870 {
10871 const char *r = strstr (p1, p2);
10872 tree tem;
10873
10874 if (r == NULL)
10875 return build_int_cst (TREE_TYPE (s1), 0);
10876
10877 /* Return an offset into the constant string argument. */
10878 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10879 return fold_convert_loc (loc, type, tem);
10880 }
10881
10882 /* The argument is const char *, and the result is char *, so we need
10883 a type conversion here to avoid a warning. */
10884 if (p2[0] == '\0')
10885 return fold_convert_loc (loc, type, s1);
10886
10887 if (p2[1] != '\0')
10888 return NULL_TREE;
10889
10890 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10891 if (!fn)
10892 return NULL_TREE;
10893
10894 /* New argument list transforming strstr(s1, s2) to
10895 strchr(s1, s2[0]). */
10896 return build_call_expr_loc (loc, fn, 2, s1,
10897 build_int_cst (integer_type_node, p2[0]));
10898 }
10899 }
10900
10901 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10902 the call, and TYPE is its return type.
10903
10904 Return NULL_TREE if no simplification was possible, otherwise return the
10905 simplified form of the call as a tree.
10906
10907 The simplified form may be a constant or other expression which
10908 computes the same value, but in a more efficient manner (including
10909 calls to other builtin functions).
10910
10911 The call may contain arguments which need to be evaluated, but
10912 which are not useful to determine the result of the call. In
10913 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10914 COMPOUND_EXPR will be an argument which must be evaluated.
10915 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10916 COMPOUND_EXPR in the chain will contain the tree for the simplified
10917 form of the builtin function call. */
10918
10919 static tree
10920 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10921 {
10922 if (!validate_arg (s1, POINTER_TYPE)
10923 || !validate_arg (s2, INTEGER_TYPE))
10924 return NULL_TREE;
10925 else
10926 {
10927 const char *p1;
10928
10929 if (TREE_CODE (s2) != INTEGER_CST)
10930 return NULL_TREE;
10931
10932 p1 = c_getstr (s1);
10933 if (p1 != NULL)
10934 {
10935 char c;
10936 const char *r;
10937 tree tem;
10938
10939 if (target_char_cast (s2, &c))
10940 return NULL_TREE;
10941
10942 r = strchr (p1, c);
10943
10944 if (r == NULL)
10945 return build_int_cst (TREE_TYPE (s1), 0);
10946
10947 /* Return an offset into the constant string argument. */
10948 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10949 return fold_convert_loc (loc, type, tem);
10950 }
10951 return NULL_TREE;
10952 }
10953 }
10954
10955 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10956 the call, and TYPE is its return type.
10957
10958 Return NULL_TREE if no simplification was possible, otherwise return the
10959 simplified form of the call as a tree.
10960
10961 The simplified form may be a constant or other expression which
10962 computes the same value, but in a more efficient manner (including
10963 calls to other builtin functions).
10964
10965 The call may contain arguments which need to be evaluated, but
10966 which are not useful to determine the result of the call. In
10967 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10968 COMPOUND_EXPR will be an argument which must be evaluated.
10969 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10970 COMPOUND_EXPR in the chain will contain the tree for the simplified
10971 form of the builtin function call. */
10972
10973 static tree
10974 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10975 {
10976 if (!validate_arg (s1, POINTER_TYPE)
10977 || !validate_arg (s2, INTEGER_TYPE))
10978 return NULL_TREE;
10979 else
10980 {
10981 tree fn;
10982 const char *p1;
10983
10984 if (TREE_CODE (s2) != INTEGER_CST)
10985 return NULL_TREE;
10986
10987 p1 = c_getstr (s1);
10988 if (p1 != NULL)
10989 {
10990 char c;
10991 const char *r;
10992 tree tem;
10993
10994 if (target_char_cast (s2, &c))
10995 return NULL_TREE;
10996
10997 r = strrchr (p1, c);
10998
10999 if (r == NULL)
11000 return build_int_cst (TREE_TYPE (s1), 0);
11001
11002 /* Return an offset into the constant string argument. */
11003 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11004 return fold_convert_loc (loc, type, tem);
11005 }
11006
11007 if (! integer_zerop (s2))
11008 return NULL_TREE;
11009
11010 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11011 if (!fn)
11012 return NULL_TREE;
11013
11014 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11015 return build_call_expr_loc (loc, fn, 2, s1, s2);
11016 }
11017 }
11018
11019 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11020 to the call, and TYPE is its return type.
11021
11022 Return NULL_TREE if no simplification was possible, otherwise return the
11023 simplified form of the call as a tree.
11024
11025 The simplified form may be a constant or other expression which
11026 computes the same value, but in a more efficient manner (including
11027 calls to other builtin functions).
11028
11029 The call may contain arguments which need to be evaluated, but
11030 which are not useful to determine the result of the call. In
11031 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11032 COMPOUND_EXPR will be an argument which must be evaluated.
11033 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11034 COMPOUND_EXPR in the chain will contain the tree for the simplified
11035 form of the builtin function call. */
11036
11037 static tree
11038 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11039 {
11040 if (!validate_arg (s1, POINTER_TYPE)
11041 || !validate_arg (s2, POINTER_TYPE))
11042 return NULL_TREE;
11043 else
11044 {
11045 tree fn;
11046 const char *p1, *p2;
11047
11048 p2 = c_getstr (s2);
11049 if (p2 == NULL)
11050 return NULL_TREE;
11051
11052 p1 = c_getstr (s1);
11053 if (p1 != NULL)
11054 {
11055 const char *r = strpbrk (p1, p2);
11056 tree tem;
11057
11058 if (r == NULL)
11059 return build_int_cst (TREE_TYPE (s1), 0);
11060
11061 /* Return an offset into the constant string argument. */
11062 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11063 return fold_convert_loc (loc, type, tem);
11064 }
11065
11066 if (p2[0] == '\0')
11067 /* strpbrk(x, "") == NULL.
11068 Evaluate and ignore s1 in case it had side-effects. */
11069 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11070
11071 if (p2[1] != '\0')
11072 return NULL_TREE; /* Really call strpbrk. */
11073
11074 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11075 if (!fn)
11076 return NULL_TREE;
11077
11078 /* New argument list transforming strpbrk(s1, s2) to
11079 strchr(s1, s2[0]). */
11080 return build_call_expr_loc (loc, fn, 2, s1,
11081 build_int_cst (integer_type_node, p2[0]));
11082 }
11083 }
11084
11085 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11086 to the call.
11087
11088 Return NULL_TREE if no simplification was possible, otherwise return the
11089 simplified form of the call as a tree.
11090
11091 The simplified form may be a constant or other expression which
11092 computes the same value, but in a more efficient manner (including
11093 calls to other builtin functions).
11094
11095 The call may contain arguments which need to be evaluated, but
11096 which are not useful to determine the result of the call. In
11097 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11098 COMPOUND_EXPR will be an argument which must be evaluated.
11099 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11100 COMPOUND_EXPR in the chain will contain the tree for the simplified
11101 form of the builtin function call. */
11102
11103 static tree
11104 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11105 {
11106 if (!validate_arg (s1, POINTER_TYPE)
11107 || !validate_arg (s2, POINTER_TYPE))
11108 return NULL_TREE;
11109 else
11110 {
11111 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11112
11113 /* If both arguments are constants, evaluate at compile-time. */
11114 if (p1 && p2)
11115 {
11116 const size_t r = strspn (p1, p2);
11117 return build_int_cst (size_type_node, r);
11118 }
11119
11120 /* If either argument is "", return NULL_TREE. */
11121 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11122 /* Evaluate and ignore both arguments in case either one has
11123 side-effects. */
11124 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11125 s1, s2);
11126 return NULL_TREE;
11127 }
11128 }
11129
11130 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11131 to the call.
11132
11133 Return NULL_TREE if no simplification was possible, otherwise return the
11134 simplified form of the call as a tree.
11135
11136 The simplified form may be a constant or other expression which
11137 computes the same value, but in a more efficient manner (including
11138 calls to other builtin functions).
11139
11140 The call may contain arguments which need to be evaluated, but
11141 which are not useful to determine the result of the call. In
11142 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11143 COMPOUND_EXPR will be an argument which must be evaluated.
11144 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11145 COMPOUND_EXPR in the chain will contain the tree for the simplified
11146 form of the builtin function call. */
11147
11148 static tree
11149 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11150 {
11151 if (!validate_arg (s1, POINTER_TYPE)
11152 || !validate_arg (s2, POINTER_TYPE))
11153 return NULL_TREE;
11154 else
11155 {
11156 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11157
11158 /* If both arguments are constants, evaluate at compile-time. */
11159 if (p1 && p2)
11160 {
11161 const size_t r = strcspn (p1, p2);
11162 return build_int_cst (size_type_node, r);
11163 }
11164
11165 /* If the first argument is "", return NULL_TREE. */
11166 if (p1 && *p1 == '\0')
11167 {
11168 /* Evaluate and ignore argument s2 in case it has
11169 side-effects. */
11170 return omit_one_operand_loc (loc, size_type_node,
11171 size_zero_node, s2);
11172 }
11173
11174 /* If the second argument is "", return __builtin_strlen(s1). */
11175 if (p2 && *p2 == '\0')
11176 {
11177 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11178
11179 /* If the replacement _DECL isn't initialized, don't do the
11180 transformation. */
11181 if (!fn)
11182 return NULL_TREE;
11183
11184 return build_call_expr_loc (loc, fn, 1, s1);
11185 }
11186 return NULL_TREE;
11187 }
11188 }
11189
11190 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11191 produced. False otherwise. This is done so that we don't output the error
11192 or warning twice or three times. */
11193
11194 bool
11195 fold_builtin_next_arg (tree exp, bool va_start_p)
11196 {
11197 tree fntype = TREE_TYPE (current_function_decl);
11198 int nargs = call_expr_nargs (exp);
11199 tree arg;
11200 /* There is good chance the current input_location points inside the
11201 definition of the va_start macro (perhaps on the token for
11202 builtin) in a system header, so warnings will not be emitted.
11203 Use the location in real source code. */
11204 source_location current_location =
11205 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11206 NULL);
11207
11208 if (!stdarg_p (fntype))
11209 {
11210 error ("%<va_start%> used in function with fixed args");
11211 return true;
11212 }
11213
11214 if (va_start_p)
11215 {
11216 if (va_start_p && (nargs != 2))
11217 {
11218 error ("wrong number of arguments to function %<va_start%>");
11219 return true;
11220 }
11221 arg = CALL_EXPR_ARG (exp, 1);
11222 }
11223 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11224 when we checked the arguments and if needed issued a warning. */
11225 else
11226 {
11227 if (nargs == 0)
11228 {
11229 /* Evidently an out of date version of <stdarg.h>; can't validate
11230 va_start's second argument, but can still work as intended. */
11231 warning_at (current_location,
11232 OPT_Wvarargs,
11233 "%<__builtin_next_arg%> called without an argument");
11234 return true;
11235 }
11236 else if (nargs > 1)
11237 {
11238 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11239 return true;
11240 }
11241 arg = CALL_EXPR_ARG (exp, 0);
11242 }
11243
11244 if (TREE_CODE (arg) == SSA_NAME)
11245 arg = SSA_NAME_VAR (arg);
11246
11247 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11248 or __builtin_next_arg (0) the first time we see it, after checking
11249 the arguments and if needed issuing a warning. */
11250 if (!integer_zerop (arg))
11251 {
11252 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11253
11254 /* Strip off all nops for the sake of the comparison. This
11255 is not quite the same as STRIP_NOPS. It does more.
11256 We must also strip off INDIRECT_EXPR for C++ reference
11257 parameters. */
11258 while (CONVERT_EXPR_P (arg)
11259 || TREE_CODE (arg) == INDIRECT_REF)
11260 arg = TREE_OPERAND (arg, 0);
11261 if (arg != last_parm)
11262 {
11263 /* FIXME: Sometimes with the tree optimizers we can get the
11264 not the last argument even though the user used the last
11265 argument. We just warn and set the arg to be the last
11266 argument so that we will get wrong-code because of
11267 it. */
11268 warning_at (current_location,
11269 OPT_Wvarargs,
11270 "second parameter of %<va_start%> not last named argument");
11271 }
11272
11273 /* Undefined by C99 7.15.1.4p4 (va_start):
11274 "If the parameter parmN is declared with the register storage
11275 class, with a function or array type, or with a type that is
11276 not compatible with the type that results after application of
11277 the default argument promotions, the behavior is undefined."
11278 */
11279 else if (DECL_REGISTER (arg))
11280 {
11281 warning_at (current_location,
11282 OPT_Wvarargs,
11283 "undefined behaviour when second parameter of "
11284 "%<va_start%> is declared with %<register%> storage");
11285 }
11286
11287 /* We want to verify the second parameter just once before the tree
11288 optimizers are run and then avoid keeping it in the tree,
11289 as otherwise we could warn even for correct code like:
11290 void foo (int i, ...)
11291 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11292 if (va_start_p)
11293 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11294 else
11295 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11296 }
11297 return false;
11298 }
11299
11300
11301 /* Expand a call EXP to __builtin_object_size. */
11302
11303 static rtx
11304 expand_builtin_object_size (tree exp)
11305 {
11306 tree ost;
11307 int object_size_type;
11308 tree fndecl = get_callee_fndecl (exp);
11309
11310 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11311 {
11312 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11313 exp, fndecl);
11314 expand_builtin_trap ();
11315 return const0_rtx;
11316 }
11317
11318 ost = CALL_EXPR_ARG (exp, 1);
11319 STRIP_NOPS (ost);
11320
11321 if (TREE_CODE (ost) != INTEGER_CST
11322 || tree_int_cst_sgn (ost) < 0
11323 || compare_tree_int (ost, 3) > 0)
11324 {
11325 error ("%Klast argument of %D is not integer constant between 0 and 3",
11326 exp, fndecl);
11327 expand_builtin_trap ();
11328 return const0_rtx;
11329 }
11330
11331 object_size_type = tree_to_shwi (ost);
11332
11333 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11334 }
11335
11336 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11337 FCODE is the BUILT_IN_* to use.
11338 Return NULL_RTX if we failed; the caller should emit a normal call,
11339 otherwise try to get the result in TARGET, if convenient (and in
11340 mode MODE if that's convenient). */
11341
11342 static rtx
11343 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11344 enum built_in_function fcode)
11345 {
11346 tree dest, src, len, size;
11347
11348 if (!validate_arglist (exp,
11349 POINTER_TYPE,
11350 fcode == BUILT_IN_MEMSET_CHK
11351 ? INTEGER_TYPE : POINTER_TYPE,
11352 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11353 return NULL_RTX;
11354
11355 dest = CALL_EXPR_ARG (exp, 0);
11356 src = CALL_EXPR_ARG (exp, 1);
11357 len = CALL_EXPR_ARG (exp, 2);
11358 size = CALL_EXPR_ARG (exp, 3);
11359
11360 if (! tree_fits_uhwi_p (size))
11361 return NULL_RTX;
11362
11363 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11364 {
11365 tree fn;
11366
11367 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11368 {
11369 warning_at (tree_nonartificial_location (exp),
11370 0, "%Kcall to %D will always overflow destination buffer",
11371 exp, get_callee_fndecl (exp));
11372 return NULL_RTX;
11373 }
11374
11375 fn = NULL_TREE;
11376 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11377 mem{cpy,pcpy,move,set} is available. */
11378 switch (fcode)
11379 {
11380 case BUILT_IN_MEMCPY_CHK:
11381 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11382 break;
11383 case BUILT_IN_MEMPCPY_CHK:
11384 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11385 break;
11386 case BUILT_IN_MEMMOVE_CHK:
11387 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11388 break;
11389 case BUILT_IN_MEMSET_CHK:
11390 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11391 break;
11392 default:
11393 break;
11394 }
11395
11396 if (! fn)
11397 return NULL_RTX;
11398
11399 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11400 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11401 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11402 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11403 }
11404 else if (fcode == BUILT_IN_MEMSET_CHK)
11405 return NULL_RTX;
11406 else
11407 {
11408 unsigned int dest_align = get_pointer_alignment (dest);
11409
11410 /* If DEST is not a pointer type, call the normal function. */
11411 if (dest_align == 0)
11412 return NULL_RTX;
11413
11414 /* If SRC and DEST are the same (and not volatile), do nothing. */
11415 if (operand_equal_p (src, dest, 0))
11416 {
11417 tree expr;
11418
11419 if (fcode != BUILT_IN_MEMPCPY_CHK)
11420 {
11421 /* Evaluate and ignore LEN in case it has side-effects. */
11422 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11423 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11424 }
11425
11426 expr = fold_build_pointer_plus (dest, len);
11427 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11428 }
11429
11430 /* __memmove_chk special case. */
11431 if (fcode == BUILT_IN_MEMMOVE_CHK)
11432 {
11433 unsigned int src_align = get_pointer_alignment (src);
11434
11435 if (src_align == 0)
11436 return NULL_RTX;
11437
11438 /* If src is categorized for a readonly section we can use
11439 normal __memcpy_chk. */
11440 if (readonly_data_expr (src))
11441 {
11442 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11443 if (!fn)
11444 return NULL_RTX;
11445 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11446 dest, src, len, size);
11447 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11448 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11449 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11450 }
11451 }
11452 return NULL_RTX;
11453 }
11454 }
11455
11456 /* Emit warning if a buffer overflow is detected at compile time. */
11457
11458 static void
11459 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11460 {
11461 int is_strlen = 0;
11462 tree len, size;
11463 location_t loc = tree_nonartificial_location (exp);
11464
11465 switch (fcode)
11466 {
11467 case BUILT_IN_STRCPY_CHK:
11468 case BUILT_IN_STPCPY_CHK:
11469 /* For __strcat_chk the warning will be emitted only if overflowing
11470 by at least strlen (dest) + 1 bytes. */
11471 case BUILT_IN_STRCAT_CHK:
11472 len = CALL_EXPR_ARG (exp, 1);
11473 size = CALL_EXPR_ARG (exp, 2);
11474 is_strlen = 1;
11475 break;
11476 case BUILT_IN_STRNCAT_CHK:
11477 case BUILT_IN_STRNCPY_CHK:
11478 case BUILT_IN_STPNCPY_CHK:
11479 len = CALL_EXPR_ARG (exp, 2);
11480 size = CALL_EXPR_ARG (exp, 3);
11481 break;
11482 case BUILT_IN_SNPRINTF_CHK:
11483 case BUILT_IN_VSNPRINTF_CHK:
11484 len = CALL_EXPR_ARG (exp, 1);
11485 size = CALL_EXPR_ARG (exp, 3);
11486 break;
11487 default:
11488 gcc_unreachable ();
11489 }
11490
11491 if (!len || !size)
11492 return;
11493
11494 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11495 return;
11496
11497 if (is_strlen)
11498 {
11499 len = c_strlen (len, 1);
11500 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11501 return;
11502 }
11503 else if (fcode == BUILT_IN_STRNCAT_CHK)
11504 {
11505 tree src = CALL_EXPR_ARG (exp, 1);
11506 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11507 return;
11508 src = c_strlen (src, 1);
11509 if (! src || ! tree_fits_uhwi_p (src))
11510 {
11511 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11512 exp, get_callee_fndecl (exp));
11513 return;
11514 }
11515 else if (tree_int_cst_lt (src, size))
11516 return;
11517 }
11518 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11519 return;
11520
11521 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11522 exp, get_callee_fndecl (exp));
11523 }
11524
11525 /* Emit warning if a buffer overflow is detected at compile time
11526 in __sprintf_chk/__vsprintf_chk calls. */
11527
11528 static void
11529 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11530 {
11531 tree size, len, fmt;
11532 const char *fmt_str;
11533 int nargs = call_expr_nargs (exp);
11534
11535 /* Verify the required arguments in the original call. */
11536
11537 if (nargs < 4)
11538 return;
11539 size = CALL_EXPR_ARG (exp, 2);
11540 fmt = CALL_EXPR_ARG (exp, 3);
11541
11542 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11543 return;
11544
11545 /* Check whether the format is a literal string constant. */
11546 fmt_str = c_getstr (fmt);
11547 if (fmt_str == NULL)
11548 return;
11549
11550 if (!init_target_chars ())
11551 return;
11552
11553 /* If the format doesn't contain % args or %%, we know its size. */
11554 if (strchr (fmt_str, target_percent) == 0)
11555 len = build_int_cstu (size_type_node, strlen (fmt_str));
11556 /* If the format is "%s" and first ... argument is a string literal,
11557 we know it too. */
11558 else if (fcode == BUILT_IN_SPRINTF_CHK
11559 && strcmp (fmt_str, target_percent_s) == 0)
11560 {
11561 tree arg;
11562
11563 if (nargs < 5)
11564 return;
11565 arg = CALL_EXPR_ARG (exp, 4);
11566 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11567 return;
11568
11569 len = c_strlen (arg, 1);
11570 if (!len || ! tree_fits_uhwi_p (len))
11571 return;
11572 }
11573 else
11574 return;
11575
11576 if (! tree_int_cst_lt (len, size))
11577 warning_at (tree_nonartificial_location (exp),
11578 0, "%Kcall to %D will always overflow destination buffer",
11579 exp, get_callee_fndecl (exp));
11580 }
11581
11582 /* Emit warning if a free is called with address of a variable. */
11583
11584 static void
11585 maybe_emit_free_warning (tree exp)
11586 {
11587 tree arg = CALL_EXPR_ARG (exp, 0);
11588
11589 STRIP_NOPS (arg);
11590 if (TREE_CODE (arg) != ADDR_EXPR)
11591 return;
11592
11593 arg = get_base_address (TREE_OPERAND (arg, 0));
11594 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11595 return;
11596
11597 if (SSA_VAR_P (arg))
11598 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11599 "%Kattempt to free a non-heap object %qD", exp, arg);
11600 else
11601 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11602 "%Kattempt to free a non-heap object", exp);
11603 }
11604
11605 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11606 if possible. */
11607
11608 static tree
11609 fold_builtin_object_size (tree ptr, tree ost)
11610 {
11611 unsigned HOST_WIDE_INT bytes;
11612 int object_size_type;
11613
11614 if (!validate_arg (ptr, POINTER_TYPE)
11615 || !validate_arg (ost, INTEGER_TYPE))
11616 return NULL_TREE;
11617
11618 STRIP_NOPS (ost);
11619
11620 if (TREE_CODE (ost) != INTEGER_CST
11621 || tree_int_cst_sgn (ost) < 0
11622 || compare_tree_int (ost, 3) > 0)
11623 return NULL_TREE;
11624
11625 object_size_type = tree_to_shwi (ost);
11626
11627 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11628 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11629 and (size_t) 0 for types 2 and 3. */
11630 if (TREE_SIDE_EFFECTS (ptr))
11631 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11632
11633 if (TREE_CODE (ptr) == ADDR_EXPR)
11634 {
11635 bytes = compute_builtin_object_size (ptr, object_size_type);
11636 if (wi::fits_to_tree_p (bytes, size_type_node))
11637 return build_int_cstu (size_type_node, bytes);
11638 }
11639 else if (TREE_CODE (ptr) == SSA_NAME)
11640 {
11641 /* If object size is not known yet, delay folding until
11642 later. Maybe subsequent passes will help determining
11643 it. */
11644 bytes = compute_builtin_object_size (ptr, object_size_type);
11645 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11646 && wi::fits_to_tree_p (bytes, size_type_node))
11647 return build_int_cstu (size_type_node, bytes);
11648 }
11649
11650 return NULL_TREE;
11651 }
11652
11653 /* Builtins with folding operations that operate on "..." arguments
11654 need special handling; we need to store the arguments in a convenient
11655 data structure before attempting any folding. Fortunately there are
11656 only a few builtins that fall into this category. FNDECL is the
11657 function, EXP is the CALL_EXPR for the call. */
11658
11659 static tree
11660 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11661 {
11662 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11663 tree ret = NULL_TREE;
11664
11665 switch (fcode)
11666 {
11667 case BUILT_IN_FPCLASSIFY:
11668 ret = fold_builtin_fpclassify (loc, args, nargs);
11669 break;
11670
11671 default:
11672 break;
11673 }
11674 if (ret)
11675 {
11676 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11677 SET_EXPR_LOCATION (ret, loc);
11678 TREE_NO_WARNING (ret) = 1;
11679 return ret;
11680 }
11681 return NULL_TREE;
11682 }
11683
11684 /* Initialize format string characters in the target charset. */
11685
11686 bool
11687 init_target_chars (void)
11688 {
11689 static bool init;
11690 if (!init)
11691 {
11692 target_newline = lang_hooks.to_target_charset ('\n');
11693 target_percent = lang_hooks.to_target_charset ('%');
11694 target_c = lang_hooks.to_target_charset ('c');
11695 target_s = lang_hooks.to_target_charset ('s');
11696 if (target_newline == 0 || target_percent == 0 || target_c == 0
11697 || target_s == 0)
11698 return false;
11699
11700 target_percent_c[0] = target_percent;
11701 target_percent_c[1] = target_c;
11702 target_percent_c[2] = '\0';
11703
11704 target_percent_s[0] = target_percent;
11705 target_percent_s[1] = target_s;
11706 target_percent_s[2] = '\0';
11707
11708 target_percent_s_newline[0] = target_percent;
11709 target_percent_s_newline[1] = target_s;
11710 target_percent_s_newline[2] = target_newline;
11711 target_percent_s_newline[3] = '\0';
11712
11713 init = true;
11714 }
11715 return true;
11716 }
11717
11718 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11719 and no overflow/underflow occurred. INEXACT is true if M was not
11720 exactly calculated. TYPE is the tree type for the result. This
11721 function assumes that you cleared the MPFR flags and then
11722 calculated M to see if anything subsequently set a flag prior to
11723 entering this function. Return NULL_TREE if any checks fail. */
11724
11725 static tree
11726 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11727 {
11728 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11729 overflow/underflow occurred. If -frounding-math, proceed iff the
11730 result of calling FUNC was exact. */
11731 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11732 && (!flag_rounding_math || !inexact))
11733 {
11734 REAL_VALUE_TYPE rr;
11735
11736 real_from_mpfr (&rr, m, type, GMP_RNDN);
11737 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11738 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11739 but the mpft_t is not, then we underflowed in the
11740 conversion. */
11741 if (real_isfinite (&rr)
11742 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11743 {
11744 REAL_VALUE_TYPE rmode;
11745
11746 real_convert (&rmode, TYPE_MODE (type), &rr);
11747 /* Proceed iff the specified mode can hold the value. */
11748 if (real_identical (&rmode, &rr))
11749 return build_real (type, rmode);
11750 }
11751 }
11752 return NULL_TREE;
11753 }
11754
11755 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11756 number and no overflow/underflow occurred. INEXACT is true if M
11757 was not exactly calculated. TYPE is the tree type for the result.
11758 This function assumes that you cleared the MPFR flags and then
11759 calculated M to see if anything subsequently set a flag prior to
11760 entering this function. Return NULL_TREE if any checks fail, if
11761 FORCE_CONVERT is true, then bypass the checks. */
11762
11763 static tree
11764 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11765 {
11766 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11767 overflow/underflow occurred. If -frounding-math, proceed iff the
11768 result of calling FUNC was exact. */
11769 if (force_convert
11770 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11771 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11772 && (!flag_rounding_math || !inexact)))
11773 {
11774 REAL_VALUE_TYPE re, im;
11775
11776 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11777 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11778 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11779 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11780 but the mpft_t is not, then we underflowed in the
11781 conversion. */
11782 if (force_convert
11783 || (real_isfinite (&re) && real_isfinite (&im)
11784 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11785 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11786 {
11787 REAL_VALUE_TYPE re_mode, im_mode;
11788
11789 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11790 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11791 /* Proceed iff the specified mode can hold the value. */
11792 if (force_convert
11793 || (real_identical (&re_mode, &re)
11794 && real_identical (&im_mode, &im)))
11795 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11796 build_real (TREE_TYPE (type), im_mode));
11797 }
11798 }
11799 return NULL_TREE;
11800 }
11801
11802 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11803 FUNC on it and return the resulting value as a tree with type TYPE.
11804 If MIN and/or MAX are not NULL, then the supplied ARG must be
11805 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11806 acceptable values, otherwise they are not. The mpfr precision is
11807 set to the precision of TYPE. We assume that function FUNC returns
11808 zero if the result could be calculated exactly within the requested
11809 precision. */
11810
11811 static tree
11812 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11813 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11814 bool inclusive)
11815 {
11816 tree result = NULL_TREE;
11817
11818 STRIP_NOPS (arg);
11819
11820 /* To proceed, MPFR must exactly represent the target floating point
11821 format, which only happens when the target base equals two. */
11822 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11823 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11824 {
11825 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11826
11827 if (real_isfinite (ra)
11828 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11829 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11830 {
11831 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11832 const int prec = fmt->p;
11833 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11834 int inexact;
11835 mpfr_t m;
11836
11837 mpfr_init2 (m, prec);
11838 mpfr_from_real (m, ra, GMP_RNDN);
11839 mpfr_clear_flags ();
11840 inexact = func (m, m, rnd);
11841 result = do_mpfr_ckconv (m, type, inexact);
11842 mpfr_clear (m);
11843 }
11844 }
11845
11846 return result;
11847 }
11848
11849 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11850 FUNC on it and return the resulting value as a tree with type TYPE.
11851 The mpfr precision is set to the precision of TYPE. We assume that
11852 function FUNC returns zero if the result could be calculated
11853 exactly within the requested precision. */
11854
11855 static tree
11856 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11857 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11858 {
11859 tree result = NULL_TREE;
11860
11861 STRIP_NOPS (arg1);
11862 STRIP_NOPS (arg2);
11863
11864 /* To proceed, MPFR must exactly represent the target floating point
11865 format, which only happens when the target base equals two. */
11866 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11867 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11868 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11869 {
11870 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11871 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11872
11873 if (real_isfinite (ra1) && real_isfinite (ra2))
11874 {
11875 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11876 const int prec = fmt->p;
11877 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11878 int inexact;
11879 mpfr_t m1, m2;
11880
11881 mpfr_inits2 (prec, m1, m2, NULL);
11882 mpfr_from_real (m1, ra1, GMP_RNDN);
11883 mpfr_from_real (m2, ra2, GMP_RNDN);
11884 mpfr_clear_flags ();
11885 inexact = func (m1, m1, m2, rnd);
11886 result = do_mpfr_ckconv (m1, type, inexact);
11887 mpfr_clears (m1, m2, NULL);
11888 }
11889 }
11890
11891 return result;
11892 }
11893
11894 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11895 FUNC on it and return the resulting value as a tree with type TYPE.
11896 The mpfr precision is set to the precision of TYPE. We assume that
11897 function FUNC returns zero if the result could be calculated
11898 exactly within the requested precision. */
11899
11900 static tree
11901 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11902 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11903 {
11904 tree result = NULL_TREE;
11905
11906 STRIP_NOPS (arg1);
11907 STRIP_NOPS (arg2);
11908 STRIP_NOPS (arg3);
11909
11910 /* To proceed, MPFR must exactly represent the target floating point
11911 format, which only happens when the target base equals two. */
11912 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11913 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11914 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11915 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11916 {
11917 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11918 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11919 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11920
11921 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11922 {
11923 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11924 const int prec = fmt->p;
11925 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11926 int inexact;
11927 mpfr_t m1, m2, m3;
11928
11929 mpfr_inits2 (prec, m1, m2, m3, NULL);
11930 mpfr_from_real (m1, ra1, GMP_RNDN);
11931 mpfr_from_real (m2, ra2, GMP_RNDN);
11932 mpfr_from_real (m3, ra3, GMP_RNDN);
11933 mpfr_clear_flags ();
11934 inexact = func (m1, m1, m2, m3, rnd);
11935 result = do_mpfr_ckconv (m1, type, inexact);
11936 mpfr_clears (m1, m2, m3, NULL);
11937 }
11938 }
11939
11940 return result;
11941 }
11942
11943 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11944 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11945 If ARG_SINP and ARG_COSP are NULL then the result is returned
11946 as a complex value.
11947 The type is taken from the type of ARG and is used for setting the
11948 precision of the calculation and results. */
11949
11950 static tree
11951 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11952 {
11953 tree const type = TREE_TYPE (arg);
11954 tree result = NULL_TREE;
11955
11956 STRIP_NOPS (arg);
11957
11958 /* To proceed, MPFR must exactly represent the target floating point
11959 format, which only happens when the target base equals two. */
11960 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11961 && TREE_CODE (arg) == REAL_CST
11962 && !TREE_OVERFLOW (arg))
11963 {
11964 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11965
11966 if (real_isfinite (ra))
11967 {
11968 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11969 const int prec = fmt->p;
11970 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11971 tree result_s, result_c;
11972 int inexact;
11973 mpfr_t m, ms, mc;
11974
11975 mpfr_inits2 (prec, m, ms, mc, NULL);
11976 mpfr_from_real (m, ra, GMP_RNDN);
11977 mpfr_clear_flags ();
11978 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11979 result_s = do_mpfr_ckconv (ms, type, inexact);
11980 result_c = do_mpfr_ckconv (mc, type, inexact);
11981 mpfr_clears (m, ms, mc, NULL);
11982 if (result_s && result_c)
11983 {
11984 /* If we are to return in a complex value do so. */
11985 if (!arg_sinp && !arg_cosp)
11986 return build_complex (build_complex_type (type),
11987 result_c, result_s);
11988
11989 /* Dereference the sin/cos pointer arguments. */
11990 arg_sinp = build_fold_indirect_ref (arg_sinp);
11991 arg_cosp = build_fold_indirect_ref (arg_cosp);
11992 /* Proceed if valid pointer type were passed in. */
11993 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11994 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11995 {
11996 /* Set the values. */
11997 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11998 result_s);
11999 TREE_SIDE_EFFECTS (result_s) = 1;
12000 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12001 result_c);
12002 TREE_SIDE_EFFECTS (result_c) = 1;
12003 /* Combine the assignments into a compound expr. */
12004 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12005 result_s, result_c));
12006 }
12007 }
12008 }
12009 }
12010 return result;
12011 }
12012
12013 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12014 two-argument mpfr order N Bessel function FUNC on them and return
12015 the resulting value as a tree with type TYPE. The mpfr precision
12016 is set to the precision of TYPE. We assume that function FUNC
12017 returns zero if the result could be calculated exactly within the
12018 requested precision. */
12019 static tree
12020 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12021 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12022 const REAL_VALUE_TYPE *min, bool inclusive)
12023 {
12024 tree result = NULL_TREE;
12025
12026 STRIP_NOPS (arg1);
12027 STRIP_NOPS (arg2);
12028
12029 /* To proceed, MPFR must exactly represent the target floating point
12030 format, which only happens when the target base equals two. */
12031 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12032 && tree_fits_shwi_p (arg1)
12033 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12034 {
12035 const HOST_WIDE_INT n = tree_to_shwi (arg1);
12036 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12037
12038 if (n == (long)n
12039 && real_isfinite (ra)
12040 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12041 {
12042 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12043 const int prec = fmt->p;
12044 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12045 int inexact;
12046 mpfr_t m;
12047
12048 mpfr_init2 (m, prec);
12049 mpfr_from_real (m, ra, GMP_RNDN);
12050 mpfr_clear_flags ();
12051 inexact = func (m, n, m, rnd);
12052 result = do_mpfr_ckconv (m, type, inexact);
12053 mpfr_clear (m);
12054 }
12055 }
12056
12057 return result;
12058 }
12059
12060 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12061 the pointer *(ARG_QUO) and return the result. The type is taken
12062 from the type of ARG0 and is used for setting the precision of the
12063 calculation and results. */
12064
12065 static tree
12066 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12067 {
12068 tree const type = TREE_TYPE (arg0);
12069 tree result = NULL_TREE;
12070
12071 STRIP_NOPS (arg0);
12072 STRIP_NOPS (arg1);
12073
12074 /* To proceed, MPFR must exactly represent the target floating point
12075 format, which only happens when the target base equals two. */
12076 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12077 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12078 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12079 {
12080 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12081 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12082
12083 if (real_isfinite (ra0) && real_isfinite (ra1))
12084 {
12085 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12086 const int prec = fmt->p;
12087 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12088 tree result_rem;
12089 long integer_quo;
12090 mpfr_t m0, m1;
12091
12092 mpfr_inits2 (prec, m0, m1, NULL);
12093 mpfr_from_real (m0, ra0, GMP_RNDN);
12094 mpfr_from_real (m1, ra1, GMP_RNDN);
12095 mpfr_clear_flags ();
12096 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12097 /* Remquo is independent of the rounding mode, so pass
12098 inexact=0 to do_mpfr_ckconv(). */
12099 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12100 mpfr_clears (m0, m1, NULL);
12101 if (result_rem)
12102 {
12103 /* MPFR calculates quo in the host's long so it may
12104 return more bits in quo than the target int can hold
12105 if sizeof(host long) > sizeof(target int). This can
12106 happen even for native compilers in LP64 mode. In
12107 these cases, modulo the quo value with the largest
12108 number that the target int can hold while leaving one
12109 bit for the sign. */
12110 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12111 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12112
12113 /* Dereference the quo pointer argument. */
12114 arg_quo = build_fold_indirect_ref (arg_quo);
12115 /* Proceed iff a valid pointer type was passed in. */
12116 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12117 {
12118 /* Set the value. */
12119 tree result_quo
12120 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12121 build_int_cst (TREE_TYPE (arg_quo),
12122 integer_quo));
12123 TREE_SIDE_EFFECTS (result_quo) = 1;
12124 /* Combine the quo assignment with the rem. */
12125 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12126 result_quo, result_rem));
12127 }
12128 }
12129 }
12130 }
12131 return result;
12132 }
12133
12134 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12135 resulting value as a tree with type TYPE. The mpfr precision is
12136 set to the precision of TYPE. We assume that this mpfr function
12137 returns zero if the result could be calculated exactly within the
12138 requested precision. In addition, the integer pointer represented
12139 by ARG_SG will be dereferenced and set to the appropriate signgam
12140 (-1,1) value. */
12141
12142 static tree
12143 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12144 {
12145 tree result = NULL_TREE;
12146
12147 STRIP_NOPS (arg);
12148
12149 /* To proceed, MPFR must exactly represent the target floating point
12150 format, which only happens when the target base equals two. Also
12151 verify ARG is a constant and that ARG_SG is an int pointer. */
12152 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12153 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12154 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12155 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12156 {
12157 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12158
12159 /* In addition to NaN and Inf, the argument cannot be zero or a
12160 negative integer. */
12161 if (real_isfinite (ra)
12162 && ra->cl != rvc_zero
12163 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12164 {
12165 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12166 const int prec = fmt->p;
12167 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12168 int inexact, sg;
12169 mpfr_t m;
12170 tree result_lg;
12171
12172 mpfr_init2 (m, prec);
12173 mpfr_from_real (m, ra, GMP_RNDN);
12174 mpfr_clear_flags ();
12175 inexact = mpfr_lgamma (m, &sg, m, rnd);
12176 result_lg = do_mpfr_ckconv (m, type, inexact);
12177 mpfr_clear (m);
12178 if (result_lg)
12179 {
12180 tree result_sg;
12181
12182 /* Dereference the arg_sg pointer argument. */
12183 arg_sg = build_fold_indirect_ref (arg_sg);
12184 /* Assign the signgam value into *arg_sg. */
12185 result_sg = fold_build2 (MODIFY_EXPR,
12186 TREE_TYPE (arg_sg), arg_sg,
12187 build_int_cst (TREE_TYPE (arg_sg), sg));
12188 TREE_SIDE_EFFECTS (result_sg) = 1;
12189 /* Combine the signgam assignment with the lgamma result. */
12190 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12191 result_sg, result_lg));
12192 }
12193 }
12194 }
12195
12196 return result;
12197 }
12198
12199 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12200 function FUNC on it and return the resulting value as a tree with
12201 type TYPE. The mpfr precision is set to the precision of TYPE. We
12202 assume that function FUNC returns zero if the result could be
12203 calculated exactly within the requested precision. */
12204
12205 static tree
12206 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12207 {
12208 tree result = NULL_TREE;
12209
12210 STRIP_NOPS (arg);
12211
12212 /* To proceed, MPFR must exactly represent the target floating point
12213 format, which only happens when the target base equals two. */
12214 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12215 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12216 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12217 {
12218 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12219 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12220
12221 if (real_isfinite (re) && real_isfinite (im))
12222 {
12223 const struct real_format *const fmt =
12224 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12225 const int prec = fmt->p;
12226 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12227 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12228 int inexact;
12229 mpc_t m;
12230
12231 mpc_init2 (m, prec);
12232 mpfr_from_real (mpc_realref (m), re, rnd);
12233 mpfr_from_real (mpc_imagref (m), im, rnd);
12234 mpfr_clear_flags ();
12235 inexact = func (m, m, crnd);
12236 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12237 mpc_clear (m);
12238 }
12239 }
12240
12241 return result;
12242 }
12243
12244 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12245 mpc function FUNC on it and return the resulting value as a tree
12246 with type TYPE. The mpfr precision is set to the precision of
12247 TYPE. We assume that function FUNC returns zero if the result
12248 could be calculated exactly within the requested precision. If
12249 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12250 in the arguments and/or results. */
12251
12252 tree
12253 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12254 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12255 {
12256 tree result = NULL_TREE;
12257
12258 STRIP_NOPS (arg0);
12259 STRIP_NOPS (arg1);
12260
12261 /* To proceed, MPFR must exactly represent the target floating point
12262 format, which only happens when the target base equals two. */
12263 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12264 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12265 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12267 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12268 {
12269 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12270 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12271 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12272 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12273
12274 if (do_nonfinite
12275 || (real_isfinite (re0) && real_isfinite (im0)
12276 && real_isfinite (re1) && real_isfinite (im1)))
12277 {
12278 const struct real_format *const fmt =
12279 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12280 const int prec = fmt->p;
12281 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12282 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12283 int inexact;
12284 mpc_t m0, m1;
12285
12286 mpc_init2 (m0, prec);
12287 mpc_init2 (m1, prec);
12288 mpfr_from_real (mpc_realref (m0), re0, rnd);
12289 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12290 mpfr_from_real (mpc_realref (m1), re1, rnd);
12291 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12292 mpfr_clear_flags ();
12293 inexact = func (m0, m0, m1, crnd);
12294 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12295 mpc_clear (m0);
12296 mpc_clear (m1);
12297 }
12298 }
12299
12300 return result;
12301 }
12302
12303 /* A wrapper function for builtin folding that prevents warnings for
12304 "statement without effect" and the like, caused by removing the
12305 call node earlier than the warning is generated. */
12306
12307 tree
12308 fold_call_stmt (gcall *stmt, bool ignore)
12309 {
12310 tree ret = NULL_TREE;
12311 tree fndecl = gimple_call_fndecl (stmt);
12312 location_t loc = gimple_location (stmt);
12313 if (fndecl
12314 && TREE_CODE (fndecl) == FUNCTION_DECL
12315 && DECL_BUILT_IN (fndecl)
12316 && !gimple_call_va_arg_pack_p (stmt))
12317 {
12318 int nargs = gimple_call_num_args (stmt);
12319 tree *args = (nargs > 0
12320 ? gimple_call_arg_ptr (stmt, 0)
12321 : &error_mark_node);
12322
12323 if (avoid_folding_inline_builtin (fndecl))
12324 return NULL_TREE;
12325 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12326 {
12327 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12328 }
12329 else
12330 {
12331 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12332 if (ret)
12333 {
12334 /* Propagate location information from original call to
12335 expansion of builtin. Otherwise things like
12336 maybe_emit_chk_warning, that operate on the expansion
12337 of a builtin, will use the wrong location information. */
12338 if (gimple_has_location (stmt))
12339 {
12340 tree realret = ret;
12341 if (TREE_CODE (ret) == NOP_EXPR)
12342 realret = TREE_OPERAND (ret, 0);
12343 if (CAN_HAVE_LOCATION_P (realret)
12344 && !EXPR_HAS_LOCATION (realret))
12345 SET_EXPR_LOCATION (realret, loc);
12346 return realret;
12347 }
12348 return ret;
12349 }
12350 }
12351 }
12352 return NULL_TREE;
12353 }
12354
12355 /* Look up the function in builtin_decl that corresponds to DECL
12356 and set ASMSPEC as its user assembler name. DECL must be a
12357 function decl that declares a builtin. */
12358
12359 void
12360 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12361 {
12362 tree builtin;
12363 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12364 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12365 && asmspec != 0);
12366
12367 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12368 set_user_assembler_name (builtin, asmspec);
12369 switch (DECL_FUNCTION_CODE (decl))
12370 {
12371 case BUILT_IN_MEMCPY:
12372 init_block_move_fn (asmspec);
12373 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12374 break;
12375 case BUILT_IN_MEMSET:
12376 init_block_clear_fn (asmspec);
12377 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12378 break;
12379 case BUILT_IN_MEMMOVE:
12380 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12381 break;
12382 case BUILT_IN_MEMCMP:
12383 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12384 break;
12385 case BUILT_IN_ABORT:
12386 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12387 break;
12388 case BUILT_IN_FFS:
12389 if (INT_TYPE_SIZE < BITS_PER_WORD)
12390 {
12391 set_user_assembler_libfunc ("ffs", asmspec);
12392 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12393 MODE_INT, 0), "ffs");
12394 }
12395 break;
12396 default:
12397 break;
12398 }
12399 }
12400
12401 /* Return true if DECL is a builtin that expands to a constant or similarly
12402 simple code. */
12403 bool
12404 is_simple_builtin (tree decl)
12405 {
12406 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12407 switch (DECL_FUNCTION_CODE (decl))
12408 {
12409 /* Builtins that expand to constants. */
12410 case BUILT_IN_CONSTANT_P:
12411 case BUILT_IN_EXPECT:
12412 case BUILT_IN_OBJECT_SIZE:
12413 case BUILT_IN_UNREACHABLE:
12414 /* Simple register moves or loads from stack. */
12415 case BUILT_IN_ASSUME_ALIGNED:
12416 case BUILT_IN_RETURN_ADDRESS:
12417 case BUILT_IN_EXTRACT_RETURN_ADDR:
12418 case BUILT_IN_FROB_RETURN_ADDR:
12419 case BUILT_IN_RETURN:
12420 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12421 case BUILT_IN_FRAME_ADDRESS:
12422 case BUILT_IN_VA_END:
12423 case BUILT_IN_STACK_SAVE:
12424 case BUILT_IN_STACK_RESTORE:
12425 /* Exception state returns or moves registers around. */
12426 case BUILT_IN_EH_FILTER:
12427 case BUILT_IN_EH_POINTER:
12428 case BUILT_IN_EH_COPY_VALUES:
12429 return true;
12430
12431 default:
12432 return false;
12433 }
12434
12435 return false;
12436 }
12437
12438 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12439 most probably expanded inline into reasonably simple code. This is a
12440 superset of is_simple_builtin. */
12441 bool
12442 is_inexpensive_builtin (tree decl)
12443 {
12444 if (!decl)
12445 return false;
12446 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12447 return true;
12448 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12449 switch (DECL_FUNCTION_CODE (decl))
12450 {
12451 case BUILT_IN_ABS:
12452 case BUILT_IN_ALLOCA:
12453 case BUILT_IN_ALLOCA_WITH_ALIGN:
12454 case BUILT_IN_BSWAP16:
12455 case BUILT_IN_BSWAP32:
12456 case BUILT_IN_BSWAP64:
12457 case BUILT_IN_CLZ:
12458 case BUILT_IN_CLZIMAX:
12459 case BUILT_IN_CLZL:
12460 case BUILT_IN_CLZLL:
12461 case BUILT_IN_CTZ:
12462 case BUILT_IN_CTZIMAX:
12463 case BUILT_IN_CTZL:
12464 case BUILT_IN_CTZLL:
12465 case BUILT_IN_FFS:
12466 case BUILT_IN_FFSIMAX:
12467 case BUILT_IN_FFSL:
12468 case BUILT_IN_FFSLL:
12469 case BUILT_IN_IMAXABS:
12470 case BUILT_IN_FINITE:
12471 case BUILT_IN_FINITEF:
12472 case BUILT_IN_FINITEL:
12473 case BUILT_IN_FINITED32:
12474 case BUILT_IN_FINITED64:
12475 case BUILT_IN_FINITED128:
12476 case BUILT_IN_FPCLASSIFY:
12477 case BUILT_IN_ISFINITE:
12478 case BUILT_IN_ISINF_SIGN:
12479 case BUILT_IN_ISINF:
12480 case BUILT_IN_ISINFF:
12481 case BUILT_IN_ISINFL:
12482 case BUILT_IN_ISINFD32:
12483 case BUILT_IN_ISINFD64:
12484 case BUILT_IN_ISINFD128:
12485 case BUILT_IN_ISNAN:
12486 case BUILT_IN_ISNANF:
12487 case BUILT_IN_ISNANL:
12488 case BUILT_IN_ISNAND32:
12489 case BUILT_IN_ISNAND64:
12490 case BUILT_IN_ISNAND128:
12491 case BUILT_IN_ISNORMAL:
12492 case BUILT_IN_ISGREATER:
12493 case BUILT_IN_ISGREATEREQUAL:
12494 case BUILT_IN_ISLESS:
12495 case BUILT_IN_ISLESSEQUAL:
12496 case BUILT_IN_ISLESSGREATER:
12497 case BUILT_IN_ISUNORDERED:
12498 case BUILT_IN_VA_ARG_PACK:
12499 case BUILT_IN_VA_ARG_PACK_LEN:
12500 case BUILT_IN_VA_COPY:
12501 case BUILT_IN_TRAP:
12502 case BUILT_IN_SAVEREGS:
12503 case BUILT_IN_POPCOUNTL:
12504 case BUILT_IN_POPCOUNTLL:
12505 case BUILT_IN_POPCOUNTIMAX:
12506 case BUILT_IN_POPCOUNT:
12507 case BUILT_IN_PARITYL:
12508 case BUILT_IN_PARITYLL:
12509 case BUILT_IN_PARITYIMAX:
12510 case BUILT_IN_PARITY:
12511 case BUILT_IN_LABS:
12512 case BUILT_IN_LLABS:
12513 case BUILT_IN_PREFETCH:
12514 case BUILT_IN_ACC_ON_DEVICE:
12515 return true;
12516
12517 default:
12518 return is_simple_builtin (decl);
12519 }
12520
12521 return false;
12522 }