Replace REAL_VALUES_EQUAL with real_equal
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (location_t, tree, tree);
162 static tree fold_builtin_cbrt (location_t, tree, tree);
163 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_cos (location_t, tree, tree, tree);
166 static tree fold_builtin_cosh (location_t, tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (location_t, tree, tree);
169 static tree fold_builtin_floor (location_t, tree, tree);
170 static tree fold_builtin_ceil (location_t, tree, tree);
171 static tree fold_builtin_round (location_t, tree, tree);
172 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_strchr (location_t, tree, tree, tree);
175 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
177 static tree fold_builtin_strcmp (location_t, tree, tree);
178 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
179 static tree fold_builtin_signbit (location_t, tree, tree);
180 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
181 static tree fold_builtin_isascii (location_t, tree);
182 static tree fold_builtin_toascii (location_t, tree);
183 static tree fold_builtin_isdigit (location_t, tree);
184 static tree fold_builtin_fabs (location_t, tree, tree);
185 static tree fold_builtin_abs (location_t, tree, tree);
186 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_0 (location_t, tree);
189 static tree fold_builtin_1 (location_t, tree, tree);
190 static tree fold_builtin_2 (location_t, tree, tree, tree);
191 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
192 static tree fold_builtin_varargs (location_t, tree, tree*, int);
193
194 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
195 static tree fold_builtin_strstr (location_t, tree, tree, tree);
196 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
197 static tree fold_builtin_strspn (location_t, tree, tree);
198 static tree fold_builtin_strcspn (location_t, tree, tree);
199
200 static rtx expand_builtin_object_size (tree);
201 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
202 enum built_in_function);
203 static void maybe_emit_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_free_warning (tree);
206 static tree fold_builtin_object_size (tree, tree);
207
208 unsigned HOST_WIDE_INT target_newline;
209 unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 char target_percent_c[3];
213 char target_percent_s[3];
214 char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227 static void expand_builtin_sync_synchronize (void);
228
229 /* Return true if NAME starts with __builtin_ or __sync_. */
230
231 static bool
232 is_builtin_name (const char *name)
233 {
234 if (strncmp (name, "__builtin_", 10) == 0)
235 return true;
236 if (strncmp (name, "__sync_", 7) == 0)
237 return true;
238 if (strncmp (name, "__atomic_", 9) == 0)
239 return true;
240 if (flag_cilkplus
241 && (!strcmp (name, "__cilkrts_detach")
242 || !strcmp (name, "__cilkrts_pop_frame")))
243 return true;
244 return false;
245 }
246
247
248 /* Return true if DECL is a function symbol representing a built-in. */
249
250 bool
251 is_builtin_fn (tree decl)
252 {
253 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
254 }
255
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
259
260 static bool
261 called_as_built_in (tree node)
262 {
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
265 will have. */
266 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
267 return is_builtin_name (name);
268 }
269
270 /* Compute values M and N such that M divides (address of EXP - N) and such
271 that N < M. If these numbers can be determined, store M in alignp and N in
272 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
273 *alignp and any bit-offset to *bitposp.
274
275 Note that the address (and thus the alignment) computed here is based
276 on the address to which a symbol resolves, whereas DECL_ALIGN is based
277 on the address at which an object is actually located. These two
278 addresses are not always the same. For example, on ARM targets,
279 the address &foo of a Thumb function foo() has the lowest bit set,
280 whereas foo() itself starts on an even address.
281
282 If ADDR_P is true we are taking the address of the memory reference EXP
283 and thus cannot rely on the access taking place. */
284
285 static bool
286 get_object_alignment_2 (tree exp, unsigned int *alignp,
287 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
288 {
289 HOST_WIDE_INT bitsize, bitpos;
290 tree offset;
291 machine_mode mode;
292 int unsignedp, volatilep;
293 unsigned int align = BITS_PER_UNIT;
294 bool known_alignment = false;
295
296 /* Get the innermost object and the constant (bitpos) and possibly
297 variable (offset) offset of the access. */
298 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
299 &mode, &unsignedp, &volatilep, true);
300
301 /* Extract alignment information from the innermost object and
302 possibly adjust bitpos and offset. */
303 if (TREE_CODE (exp) == FUNCTION_DECL)
304 {
305 /* Function addresses can encode extra information besides their
306 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
307 allows the low bit to be used as a virtual bit, we know
308 that the address itself must be at least 2-byte aligned. */
309 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
310 align = 2 * BITS_PER_UNIT;
311 }
312 else if (TREE_CODE (exp) == LABEL_DECL)
313 ;
314 else if (TREE_CODE (exp) == CONST_DECL)
315 {
316 /* The alignment of a CONST_DECL is determined by its initializer. */
317 exp = DECL_INITIAL (exp);
318 align = TYPE_ALIGN (TREE_TYPE (exp));
319 if (CONSTANT_CLASS_P (exp))
320 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
321
322 known_alignment = true;
323 }
324 else if (DECL_P (exp))
325 {
326 align = DECL_ALIGN (exp);
327 known_alignment = true;
328 }
329 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
330 {
331 align = TYPE_ALIGN (TREE_TYPE (exp));
332 }
333 else if (TREE_CODE (exp) == INDIRECT_REF
334 || TREE_CODE (exp) == MEM_REF
335 || TREE_CODE (exp) == TARGET_MEM_REF)
336 {
337 tree addr = TREE_OPERAND (exp, 0);
338 unsigned ptr_align;
339 unsigned HOST_WIDE_INT ptr_bitpos;
340 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
341
342 /* If the address is explicitely aligned, handle that. */
343 if (TREE_CODE (addr) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 {
346 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
347 ptr_bitmask *= BITS_PER_UNIT;
348 align = ptr_bitmask & -ptr_bitmask;
349 addr = TREE_OPERAND (addr, 0);
350 }
351
352 known_alignment
353 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
354 align = MAX (ptr_align, align);
355
356 /* Re-apply explicit alignment to the bitpos. */
357 ptr_bitpos &= ptr_bitmask;
358
359 /* The alignment of the pointer operand in a TARGET_MEM_REF
360 has to take the variable offset parts into account. */
361 if (TREE_CODE (exp) == TARGET_MEM_REF)
362 {
363 if (TMR_INDEX (exp))
364 {
365 unsigned HOST_WIDE_INT step = 1;
366 if (TMR_STEP (exp))
367 step = TREE_INT_CST_LOW (TMR_STEP (exp));
368 align = MIN (align, (step & -step) * BITS_PER_UNIT);
369 }
370 if (TMR_INDEX2 (exp))
371 align = BITS_PER_UNIT;
372 known_alignment = false;
373 }
374
375 /* When EXP is an actual memory reference then we can use
376 TYPE_ALIGN of a pointer indirection to derive alignment.
377 Do so only if get_pointer_alignment_1 did not reveal absolute
378 alignment knowledge and if using that alignment would
379 improve the situation. */
380 if (!addr_p && !known_alignment
381 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
382 align = TYPE_ALIGN (TREE_TYPE (exp));
383 else
384 {
385 /* Else adjust bitpos accordingly. */
386 bitpos += ptr_bitpos;
387 if (TREE_CODE (exp) == MEM_REF
388 || TREE_CODE (exp) == TARGET_MEM_REF)
389 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
390 }
391 }
392 else if (TREE_CODE (exp) == STRING_CST)
393 {
394 /* STRING_CST are the only constant objects we allow to be not
395 wrapped inside a CONST_DECL. */
396 align = TYPE_ALIGN (TREE_TYPE (exp));
397 if (CONSTANT_CLASS_P (exp))
398 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
399
400 known_alignment = true;
401 }
402
403 /* If there is a non-constant offset part extract the maximum
404 alignment that can prevail. */
405 if (offset)
406 {
407 unsigned int trailing_zeros = tree_ctz (offset);
408 if (trailing_zeros < HOST_BITS_PER_INT)
409 {
410 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
411 if (inner)
412 align = MIN (align, inner);
413 }
414 }
415
416 *alignp = align;
417 *bitposp = bitpos & (*alignp - 1);
418 return known_alignment;
419 }
420
421 /* For a memory reference expression EXP compute values M and N such that M
422 divides (&EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Otherwise return false
424 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425
426 bool
427 get_object_alignment_1 (tree exp, unsigned int *alignp,
428 unsigned HOST_WIDE_INT *bitposp)
429 {
430 return get_object_alignment_2 (exp, alignp, bitposp, false);
431 }
432
433 /* Return the alignment in bits of EXP, an object. */
434
435 unsigned int
436 get_object_alignment (tree exp)
437 {
438 unsigned HOST_WIDE_INT bitpos = 0;
439 unsigned int align;
440
441 get_object_alignment_1 (exp, &align, &bitpos);
442
443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
445
446 if (bitpos != 0)
447 align = (bitpos & -bitpos);
448 return align;
449 }
450
451 /* For a pointer valued expression EXP compute values M and N such that M
452 divides (EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Return false if
454 the results are just a conservative approximation.
455
456 If EXP is not a pointer, false is returned too. */
457
458 bool
459 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
460 unsigned HOST_WIDE_INT *bitposp)
461 {
462 STRIP_NOPS (exp);
463
464 if (TREE_CODE (exp) == ADDR_EXPR)
465 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
466 alignp, bitposp, true);
467 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
468 {
469 unsigned int align;
470 unsigned HOST_WIDE_INT bitpos;
471 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
472 &align, &bitpos);
473 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
474 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
475 else
476 {
477 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
478 if (trailing_zeros < HOST_BITS_PER_INT)
479 {
480 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
481 if (inner)
482 align = MIN (align, inner);
483 }
484 }
485 *alignp = align;
486 *bitposp = bitpos & (align - 1);
487 return res;
488 }
489 else if (TREE_CODE (exp) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (exp)))
491 {
492 unsigned int ptr_align, ptr_misalign;
493 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
494
495 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
496 {
497 *bitposp = ptr_misalign * BITS_PER_UNIT;
498 *alignp = ptr_align * BITS_PER_UNIT;
499 /* We cannot really tell whether this result is an approximation. */
500 return true;
501 }
502 else
503 {
504 *bitposp = 0;
505 *alignp = BITS_PER_UNIT;
506 return false;
507 }
508 }
509 else if (TREE_CODE (exp) == INTEGER_CST)
510 {
511 *alignp = BIGGEST_ALIGNMENT;
512 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
513 & (BIGGEST_ALIGNMENT - 1));
514 return true;
515 }
516
517 *bitposp = 0;
518 *alignp = BITS_PER_UNIT;
519 return false;
520 }
521
522 /* Return the alignment in bits of EXP, a pointer valued expression.
523 The alignment returned is, by default, the alignment of the thing that
524 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
525
526 Otherwise, look at the expression to see if we can do better, i.e., if the
527 expression is actually pointing at an object whose alignment is tighter. */
528
529 unsigned int
530 get_pointer_alignment (tree exp)
531 {
532 unsigned HOST_WIDE_INT bitpos = 0;
533 unsigned int align;
534
535 get_pointer_alignment_1 (exp, &align, &bitpos);
536
537 /* align and bitpos now specify known low bits of the pointer.
538 ptr & (align - 1) == bitpos. */
539
540 if (bitpos != 0)
541 align = (bitpos & -bitpos);
542
543 return align;
544 }
545
546 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
547 way, because it could contain a zero byte in the middle.
548 TREE_STRING_LENGTH is the size of the character array, not the string.
549
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
556
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
560
561 The value returned is of type `ssizetype'.
562
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
565
566 tree
567 c_strlen (tree src, int only_value)
568 {
569 tree offset_node;
570 HOST_WIDE_INT offset;
571 int max;
572 const char *ptr;
573 location_t loc;
574
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 {
579 tree len1, len2;
580
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
585 }
586
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
590
591 loc = EXPR_LOC_OR_LOC (src, input_location);
592
593 src = string_constant (src, &offset_node);
594 if (src == 0)
595 return NULL_TREE;
596
597 max = TREE_STRING_LENGTH (src) - 1;
598 ptr = TREE_STRING_POINTER (src);
599
600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
601 {
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
606
607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
609 return NULL_TREE;
610
611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
617
618 return size_diffop_loc (loc, size_int (max), offset_node);
619 }
620
621 /* We have a known offset into the string. Start searching there for
622 a null character if we can represent it as a single HOST_WIDE_INT. */
623 if (offset_node == 0)
624 offset = 0;
625 else if (! tree_fits_shwi_p (offset_node))
626 offset = -1;
627 else
628 offset = tree_to_shwi (offset_node);
629
630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
632 if (offset < 0 || offset > max)
633 {
634 /* Suppress multiple warnings for propagated constant strings. */
635 if (only_value != 2
636 && !TREE_NO_WARNING (src))
637 {
638 warning_at (loc, 0, "offset outside bounds of constant string");
639 TREE_NO_WARNING (src) = 1;
640 }
641 return NULL_TREE;
642 }
643
644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
647
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
650 return ssize_int (strlen (ptr + offset));
651 }
652
653 /* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
655
656 const char *
657 c_getstr (tree src)
658 {
659 tree offset_node;
660
661 src = string_constant (src, &offset_node);
662 if (src == 0)
663 return 0;
664
665 if (offset_node == 0)
666 return TREE_STRING_POINTER (src);
667 else if (!tree_fits_uhwi_p (offset_node)
668 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
669 return 0;
670
671 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
672 }
673
674 /* Return a constant integer corresponding to target reading
675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676
677 static rtx
678 c_readstr (const char *str, machine_mode mode)
679 {
680 HOST_WIDE_INT ch;
681 unsigned int i, j;
682 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
683
684 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
685 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
686 / HOST_BITS_PER_WIDE_INT;
687
688 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
689 for (i = 0; i < len; i++)
690 tmp[i] = 0;
691
692 ch = 1;
693 for (i = 0; i < GET_MODE_SIZE (mode); i++)
694 {
695 j = i;
696 if (WORDS_BIG_ENDIAN)
697 j = GET_MODE_SIZE (mode) - i - 1;
698 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
699 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
700 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
701 j *= BITS_PER_UNIT;
702
703 if (ch)
704 ch = (unsigned char) str[i];
705 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
706 }
707
708 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
709 return immed_wide_int_const (c, mode);
710 }
711
712 /* Cast a target constant CST to target CHAR and if that value fits into
713 host char type, return zero and put that value into variable pointed to by
714 P. */
715
716 static int
717 target_char_cast (tree cst, char *p)
718 {
719 unsigned HOST_WIDE_INT val, hostval;
720
721 if (TREE_CODE (cst) != INTEGER_CST
722 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
723 return 1;
724
725 /* Do not care if it fits or not right here. */
726 val = TREE_INT_CST_LOW (cst);
727
728 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
729 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
730
731 hostval = val;
732 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
733 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
734
735 if (val != hostval)
736 return 1;
737
738 *p = hostval;
739 return 0;
740 }
741
742 /* Similar to save_expr, but assumes that arbitrary code is not executed
743 in between the multiple evaluations. In particular, we assume that a
744 non-addressable local variable will not be modified. */
745
746 static tree
747 builtin_save_expr (tree exp)
748 {
749 if (TREE_CODE (exp) == SSA_NAME
750 || (TREE_ADDRESSABLE (exp) == 0
751 && (TREE_CODE (exp) == PARM_DECL
752 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
753 return exp;
754
755 return save_expr (exp);
756 }
757
758 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
759 times to get the address of either a higher stack frame, or a return
760 address located within it (depending on FNDECL_CODE). */
761
762 static rtx
763 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
764 {
765 int i;
766
767 #ifdef INITIAL_FRAME_ADDRESS_RTX
768 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
769 #else
770 rtx tem;
771
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
776
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 tem = frame_pointer_rtx;
783 else
784 {
785 tem = hard_frame_pointer_rtx;
786
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl->accesses_prior_frames = 1;
789 }
790 #endif
791
792 /* Some machines need special handling before we can access
793 arbitrary frames. For example, on the SPARC, we must first flush
794 all register windows to the stack. */
795 #ifdef SETUP_FRAME_ADDRESSES
796 if (count > 0)
797 SETUP_FRAME_ADDRESSES ();
798 #endif
799
800 /* On the SPARC, the return address is not in the frame, it is in a
801 register. There is no way to access it off of the current frame
802 pointer, but it can be accessed off the previous frame pointer by
803 reading the value from the register window save area. */
804 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
805 count--;
806
807 /* Scan back COUNT frames to the specified frame. */
808 for (i = 0; i < count; i++)
809 {
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812 #ifdef DYNAMIC_CHAIN_ADDRESS
813 tem = DYNAMIC_CHAIN_ADDRESS (tem);
814 #endif
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
818 }
819
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 #ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem);
825 #else
826 return tem;
827 #endif
828
829 /* For __builtin_return_address, get the return address from that frame. */
830 #ifdef RETURN_ADDR_RTX
831 tem = RETURN_ADDR_RTX (count, tem);
832 #else
833 tem = memory_address (Pmode,
834 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
835 tem = gen_frame_mem (Pmode, tem);
836 #endif
837 return tem;
838 }
839
840 /* Alias set used for setjmp buffer. */
841 static alias_set_type setjmp_alias_set = -1;
842
843 /* Construct the leading half of a __builtin_setjmp call. Control will
844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
846
847 void
848 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
849 {
850 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
851 rtx stack_save;
852 rtx mem;
853
854 if (setjmp_alias_set == -1)
855 setjmp_alias_set = new_alias_set ();
856
857 buf_addr = convert_memory_address (Pmode, buf_addr);
858
859 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
860
861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
864
865 mem = gen_rtx_MEM (Pmode, buf_addr);
866 set_mem_alias_set (mem, setjmp_alias_set);
867 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
868
869 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
870 GET_MODE_SIZE (Pmode))),
871 set_mem_alias_set (mem, setjmp_alias_set);
872
873 emit_move_insn (validize_mem (mem),
874 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
875
876 stack_save = gen_rtx_MEM (sa_mode,
877 plus_constant (Pmode, buf_addr,
878 2 * GET_MODE_SIZE (Pmode)));
879 set_mem_alias_set (stack_save, setjmp_alias_set);
880 emit_stack_save (SAVE_NONLOCAL, &stack_save);
881
882 /* If there is further processing to do, do it. */
883 if (targetm.have_builtin_setjmp_setup ())
884 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
885
886 /* We have a nonlocal label. */
887 cfun->has_nonlocal_label = 1;
888 }
889
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893
894 void
895 expand_builtin_setjmp_receiver (rtx receiver_label)
896 {
897 rtx chain;
898
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx);
902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
908
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 if (! targetm.have_nonlocal_goto ())
912 {
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
917
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
925
926 /* Restoring the frame pointer also modifies the hard frame pointer.
927 Mark it used (so that the previous assignment remains live once
928 the frame pointer is eliminated) and clobbered (to represent the
929 implicit update from the assignment). */
930 emit_use (hard_frame_pointer_rtx);
931 emit_clobber (hard_frame_pointer_rtx);
932 }
933
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs[ARG_POINTER_REGNUM])
936 {
937 #ifdef ELIMINABLE_REGS
938 /* If the argument pointer can be eliminated in favor of the
939 frame pointer, we don't need to restore it. We assume here
940 that if such an elimination is present, it can always be used.
941 This is the case on all known machines; if we don't make this
942 assumption, we do unnecessary saving on many machines. */
943 size_t i;
944 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
945
946 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
947 if (elim_regs[i].from == ARG_POINTER_REGNUM
948 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
949 break;
950
951 if (i == ARRAY_SIZE (elim_regs))
952 #endif
953 {
954 /* Now restore our arg pointer from the address at which it
955 was saved in our stack frame. */
956 emit_move_insn (crtl->args.internal_arg_pointer,
957 copy_to_reg (get_arg_pointer_save_area ()));
958 }
959 }
960 #endif
961
962 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
963 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
964 else if (targetm.have_nonlocal_goto_receiver ())
965 emit_insn (targetm.gen_nonlocal_goto_receiver ());
966 else
967 { /* Nothing */ }
968
969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
971 happen immediately, not later. */
972 emit_insn (gen_blockage ());
973 }
974
975 /* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
978 the code below is copied from the handling of non-local gotos. */
979
980 static void
981 expand_builtin_longjmp (rtx buf_addr, rtx value)
982 {
983 rtx fp, lab, stack;
984 rtx_insn *insn, *last;
985 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
986
987 /* DRAP is needed for stack realign if longjmp is expanded to current
988 function */
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
991
992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
994
995 buf_addr = convert_memory_address (Pmode, buf_addr);
996
997 buf_addr = force_reg (Pmode, buf_addr);
998
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value == const1_rtx);
1002
1003 last = get_last_insn ();
1004 if (targetm.have_builtin_longjmp ())
1005 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1006 else
1007 {
1008 fp = gen_rtx_MEM (Pmode, buf_addr);
1009 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1010 GET_MODE_SIZE (Pmode)));
1011
1012 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1013 2 * GET_MODE_SIZE (Pmode)));
1014 set_mem_alias_set (fp, setjmp_alias_set);
1015 set_mem_alias_set (lab, setjmp_alias_set);
1016 set_mem_alias_set (stack, setjmp_alias_set);
1017
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
1020 if (targetm.have_nonlocal_goto ())
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1025 else
1026 {
1027 lab = copy_to_reg (lab);
1028
1029 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1030 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1031
1032 emit_move_insn (hard_frame_pointer_rtx, fp);
1033 emit_stack_restore (SAVE_NONLOCAL, stack);
1034
1035 emit_use (hard_frame_pointer_rtx);
1036 emit_use (stack_pointer_rtx);
1037 emit_indirect_jump (lab);
1038 }
1039 }
1040
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
1046 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1047 {
1048 gcc_assert (insn != last);
1049
1050 if (JUMP_P (insn))
1051 {
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1053 break;
1054 }
1055 else if (CALL_P (insn))
1056 break;
1057 }
1058 }
1059
1060 static inline bool
1061 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1062 {
1063 return (iter->i < iter->n);
1064 }
1065
1066 /* This function validates the types of a function call argument list
1067 against a specified list of tree_codes. If the last specifier is a 0,
1068 that represents an ellipses, otherwise the last specifier must be a
1069 VOID_TYPE. */
1070
1071 static bool
1072 validate_arglist (const_tree callexpr, ...)
1073 {
1074 enum tree_code code;
1075 bool res = 0;
1076 va_list ap;
1077 const_call_expr_arg_iterator iter;
1078 const_tree arg;
1079
1080 va_start (ap, callexpr);
1081 init_const_call_expr_arg_iterator (callexpr, &iter);
1082
1083 do
1084 {
1085 code = (enum tree_code) va_arg (ap, int);
1086 switch (code)
1087 {
1088 case 0:
1089 /* This signifies an ellipses, any further arguments are all ok. */
1090 res = true;
1091 goto end;
1092 case VOID_TYPE:
1093 /* This signifies an endlink, if no arguments remain, return
1094 true, otherwise return false. */
1095 res = !more_const_call_expr_args_p (&iter);
1096 goto end;
1097 default:
1098 /* If no parameters remain or the parameter's code does not
1099 match the specified code, return false. Otherwise continue
1100 checking any remaining arguments. */
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code))
1103 goto end;
1104 break;
1105 }
1106 }
1107 while (1);
1108
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1113
1114 return res;
1115 }
1116
1117 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1119
1120 static rtx
1121 expand_builtin_nonlocal_goto (tree exp)
1122 {
1123 tree t_label, t_save_area;
1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
1126
1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1128 return NULL_RTX;
1129
1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
1132
1133 r_label = expand_normal (t_label);
1134 r_label = convert_memory_address (Pmode, r_label);
1135 r_save_area = expand_normal (t_save_area);
1136 r_save_area = convert_memory_address (Pmode, r_save_area);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area = copy_to_reg (r_save_area);
1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
1144
1145 crtl->has_nonlocal_goto = 1;
1146
1147 /* ??? We no longer need to pass the static chain value, afaik. */
1148 if (targetm.have_nonlocal_goto ())
1149 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1150 else
1151 {
1152 r_label = copy_to_reg (r_label);
1153
1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1156
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1160
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
1165
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1176 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1177 emit_use (pic_offset_table_rtx);
1178
1179 emit_indirect_jump (r_label);
1180 }
1181
1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1185 {
1186 if (JUMP_P (insn))
1187 {
1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1189 break;
1190 }
1191 else if (CALL_P (insn))
1192 break;
1193 }
1194
1195 return const0_rtx;
1196 }
1197
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
1202
1203 void
1204 expand_builtin_update_setjmp_buf (rtx buf_addr)
1205 {
1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1207 rtx stack_save
1208 = gen_rtx_MEM (sa_mode,
1209 memory_address
1210 (sa_mode,
1211 plus_constant (Pmode, buf_addr,
1212 2 * GET_MODE_SIZE (Pmode))));
1213
1214 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1215 }
1216
1217 /* Expand a call to __builtin_prefetch. For a target that does not support
1218 data prefetch, evaluate the memory address argument in case it has side
1219 effects. */
1220
1221 static void
1222 expand_builtin_prefetch (tree exp)
1223 {
1224 tree arg0, arg1, arg2;
1225 int nargs;
1226 rtx op0, op1, op2;
1227
1228 if (!validate_arglist (exp, POINTER_TYPE, 0))
1229 return;
1230
1231 arg0 = CALL_EXPR_ARG (exp, 0);
1232
1233 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1234 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1235 locality). */
1236 nargs = call_expr_nargs (exp);
1237 if (nargs > 1)
1238 arg1 = CALL_EXPR_ARG (exp, 1);
1239 else
1240 arg1 = integer_zero_node;
1241 if (nargs > 2)
1242 arg2 = CALL_EXPR_ARG (exp, 2);
1243 else
1244 arg2 = integer_three_node;
1245
1246 /* Argument 0 is an address. */
1247 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1248
1249 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1250 if (TREE_CODE (arg1) != INTEGER_CST)
1251 {
1252 error ("second argument to %<__builtin_prefetch%> must be a constant");
1253 arg1 = integer_zero_node;
1254 }
1255 op1 = expand_normal (arg1);
1256 /* Argument 1 must be either zero or one. */
1257 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1258 {
1259 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1260 " using zero");
1261 op1 = const0_rtx;
1262 }
1263
1264 /* Argument 2 (locality) must be a compile-time constant int. */
1265 if (TREE_CODE (arg2) != INTEGER_CST)
1266 {
1267 error ("third argument to %<__builtin_prefetch%> must be a constant");
1268 arg2 = integer_zero_node;
1269 }
1270 op2 = expand_normal (arg2);
1271 /* Argument 2 must be 0, 1, 2, or 3. */
1272 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1273 {
1274 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1275 op2 = const0_rtx;
1276 }
1277
1278 if (targetm.have_prefetch ())
1279 {
1280 struct expand_operand ops[3];
1281
1282 create_address_operand (&ops[0], op0);
1283 create_integer_operand (&ops[1], INTVAL (op1));
1284 create_integer_operand (&ops[2], INTVAL (op2));
1285 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1286 return;
1287 }
1288
1289 /* Don't do anything with direct references to volatile memory, but
1290 generate code to handle other side effects. */
1291 if (!MEM_P (op0) && side_effects_p (op0))
1292 emit_insn (op0);
1293 }
1294
1295 /* Get a MEM rtx for expression EXP which is the address of an operand
1296 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1297 the maximum length of the block of memory that might be accessed or
1298 NULL if unknown. */
1299
1300 static rtx
1301 get_memory_rtx (tree exp, tree len)
1302 {
1303 tree orig_exp = exp;
1304 rtx addr, mem;
1305
1306 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1307 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1308 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1309 exp = TREE_OPERAND (exp, 0);
1310
1311 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1312 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1313
1314 /* Get an expression we can use to find the attributes to assign to MEM.
1315 First remove any nops. */
1316 while (CONVERT_EXPR_P (exp)
1317 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1318 exp = TREE_OPERAND (exp, 0);
1319
1320 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1321 (as builtin stringops may alias with anything). */
1322 exp = fold_build2 (MEM_REF,
1323 build_array_type (char_type_node,
1324 build_range_type (sizetype,
1325 size_one_node, len)),
1326 exp, build_int_cst (ptr_type_node, 0));
1327
1328 /* If the MEM_REF has no acceptable address, try to get the base object
1329 from the original address we got, and build an all-aliasing
1330 unknown-sized access to that one. */
1331 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1332 set_mem_attributes (mem, exp, 0);
1333 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1334 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1335 0))))
1336 {
1337 exp = build_fold_addr_expr (exp);
1338 exp = fold_build2 (MEM_REF,
1339 build_array_type (char_type_node,
1340 build_range_type (sizetype,
1341 size_zero_node,
1342 NULL)),
1343 exp, build_int_cst (ptr_type_node, 0));
1344 set_mem_attributes (mem, exp, 0);
1345 }
1346 set_mem_alias_set (mem, 0);
1347 return mem;
1348 }
1349 \f
1350 /* Built-in functions to perform an untyped call and return. */
1351
1352 #define apply_args_mode \
1353 (this_target_builtins->x_apply_args_mode)
1354 #define apply_result_mode \
1355 (this_target_builtins->x_apply_result_mode)
1356
1357 /* Return the size required for the block returned by __builtin_apply_args,
1358 and initialize apply_args_mode. */
1359
1360 static int
1361 apply_args_size (void)
1362 {
1363 static int size = -1;
1364 int align;
1365 unsigned int regno;
1366 machine_mode mode;
1367
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1370 {
1371 /* The first value is the incoming arg-pointer. */
1372 size = GET_MODE_SIZE (Pmode);
1373
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
1376 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1377 size += GET_MODE_SIZE (Pmode);
1378
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if (FUNCTION_ARG_REGNO_P (regno))
1381 {
1382 mode = targetm.calls.get_raw_arg_mode (regno);
1383
1384 gcc_assert (mode != VOIDmode);
1385
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
1389 size += GET_MODE_SIZE (mode);
1390 apply_args_mode[regno] = mode;
1391 }
1392 else
1393 {
1394 apply_args_mode[regno] = VOIDmode;
1395 }
1396 }
1397 return size;
1398 }
1399
1400 /* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1402
1403 static int
1404 apply_result_size (void)
1405 {
1406 static int size = -1;
1407 int align, regno;
1408 machine_mode mode;
1409
1410 /* The values computed by this function never change. */
1411 if (size < 0)
1412 {
1413 size = 0;
1414
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if (targetm.calls.function_value_regno_p (regno))
1417 {
1418 mode = targetm.calls.get_raw_result_mode (regno);
1419
1420 gcc_assert (mode != VOIDmode);
1421
1422 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1423 if (size % align != 0)
1424 size = CEIL (size, align) * align;
1425 size += GET_MODE_SIZE (mode);
1426 apply_result_mode[regno] = mode;
1427 }
1428 else
1429 apply_result_mode[regno] = VOIDmode;
1430
1431 /* Allow targets that use untyped_call and untyped_return to override
1432 the size so that machine-specific information can be stored here. */
1433 #ifdef APPLY_RESULT_SIZE
1434 size = APPLY_RESULT_SIZE;
1435 #endif
1436 }
1437 return size;
1438 }
1439
1440 /* Create a vector describing the result block RESULT. If SAVEP is true,
1441 the result block is used to save the values; otherwise it is used to
1442 restore the values. */
1443
1444 static rtx
1445 result_vector (int savep, rtx result)
1446 {
1447 int regno, size, align, nelts;
1448 machine_mode mode;
1449 rtx reg, mem;
1450 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1451
1452 size = nelts = 0;
1453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1454 if ((mode = apply_result_mode[regno]) != VOIDmode)
1455 {
1456 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1457 if (size % align != 0)
1458 size = CEIL (size, align) * align;
1459 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1460 mem = adjust_address (result, mode, size);
1461 savevec[nelts++] = (savep
1462 ? gen_rtx_SET (mem, reg)
1463 : gen_rtx_SET (reg, mem));
1464 size += GET_MODE_SIZE (mode);
1465 }
1466 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1467 }
1468
1469 /* Save the state required to perform an untyped call with the same
1470 arguments as were passed to the current function. */
1471
1472 static rtx
1473 expand_builtin_apply_args_1 (void)
1474 {
1475 rtx registers, tem;
1476 int size, align, regno;
1477 machine_mode mode;
1478 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1479
1480 /* Create a block where the arg-pointer, structure value address,
1481 and argument registers can be saved. */
1482 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1483
1484 /* Walk past the arg-pointer and structure value address. */
1485 size = GET_MODE_SIZE (Pmode);
1486 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1487 size += GET_MODE_SIZE (Pmode);
1488
1489 /* Save each register used in calling a function to the block. */
1490 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1491 if ((mode = apply_args_mode[regno]) != VOIDmode)
1492 {
1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1496
1497 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1498
1499 emit_move_insn (adjust_address (registers, mode, size), tem);
1500 size += GET_MODE_SIZE (mode);
1501 }
1502
1503 /* Save the arg pointer to the block. */
1504 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1505 /* We need the pointer as the caller actually passed them to us, not
1506 as we might have pretended they were passed. Make sure it's a valid
1507 operand, as emit_move_insn isn't expected to handle a PLUS. */
1508 if (STACK_GROWS_DOWNWARD)
1509 tem
1510 = force_operand (plus_constant (Pmode, tem,
1511 crtl->args.pretend_args_size),
1512 NULL_RTX);
1513 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1514
1515 size = GET_MODE_SIZE (Pmode);
1516
1517 /* Save the structure value address unless this is passed as an
1518 "invisible" first argument. */
1519 if (struct_incoming_value)
1520 {
1521 emit_move_insn (adjust_address (registers, Pmode, size),
1522 copy_to_reg (struct_incoming_value));
1523 size += GET_MODE_SIZE (Pmode);
1524 }
1525
1526 /* Return the address of the block. */
1527 return copy_addr_to_reg (XEXP (registers, 0));
1528 }
1529
1530 /* __builtin_apply_args returns block of memory allocated on
1531 the stack into which is stored the arg pointer, structure
1532 value address, static chain, and all the registers that might
1533 possibly be used in performing a function call. The code is
1534 moved to the start of the function so the incoming values are
1535 saved. */
1536
1537 static rtx
1538 expand_builtin_apply_args (void)
1539 {
1540 /* Don't do __builtin_apply_args more than once in a function.
1541 Save the result of the first call and reuse it. */
1542 if (apply_args_value != 0)
1543 return apply_args_value;
1544 {
1545 /* When this function is called, it means that registers must be
1546 saved on entry to this function. So we migrate the
1547 call to the first insn of this function. */
1548 rtx temp;
1549
1550 start_sequence ();
1551 temp = expand_builtin_apply_args_1 ();
1552 rtx_insn *seq = get_insns ();
1553 end_sequence ();
1554
1555 apply_args_value = temp;
1556
1557 /* Put the insns after the NOTE that starts the function.
1558 If this is inside a start_sequence, make the outer-level insn
1559 chain current, so the code is placed at the start of the
1560 function. If internal_arg_pointer is a non-virtual pseudo,
1561 it needs to be placed after the function that initializes
1562 that pseudo. */
1563 push_topmost_sequence ();
1564 if (REG_P (crtl->args.internal_arg_pointer)
1565 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1566 emit_insn_before (seq, parm_birth_insn);
1567 else
1568 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1569 pop_topmost_sequence ();
1570 return temp;
1571 }
1572 }
1573
1574 /* Perform an untyped call and save the state required to perform an
1575 untyped return of whatever value was returned by the given function. */
1576
1577 static rtx
1578 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1579 {
1580 int size, align, regno;
1581 machine_mode mode;
1582 rtx incoming_args, result, reg, dest, src;
1583 rtx_call_insn *call_insn;
1584 rtx old_stack_level = 0;
1585 rtx call_fusage = 0;
1586 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1587
1588 arguments = convert_memory_address (Pmode, arguments);
1589
1590 /* Create a block where the return registers can be saved. */
1591 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592
1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args = gen_reg_rtx (Pmode);
1595 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1596 if (!STACK_GROWS_DOWNWARD)
1597 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1598 incoming_args, 0, OPTAB_LIB_WIDEN);
1599
1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1602 manipulations. */
1603 do_pending_stack_adjust ();
1604 NO_DEFER_POP;
1605
1606 /* Save the stack with nonlocal if available. */
1607 if (targetm.have_save_stack_nonlocal ())
1608 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1609 else
1610 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1611
1612 /* Allocate a block of memory onto the stack and copy the memory
1613 arguments to the outgoing arguments address. We can pass TRUE
1614 as the 4th argument because we just saved the stack pointer
1615 and will restore it right after the call. */
1616 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1617
1618 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1619 may have already set current_function_calls_alloca to true.
1620 current_function_calls_alloca won't be set if argsize is zero,
1621 so we have to guarantee need_drap is true here. */
1622 if (SUPPORTS_STACK_ALIGNMENT)
1623 crtl->need_drap = true;
1624
1625 dest = virtual_outgoing_args_rtx;
1626 if (!STACK_GROWS_DOWNWARD)
1627 {
1628 if (CONST_INT_P (argsize))
1629 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1630 else
1631 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1632 }
1633 dest = gen_rtx_MEM (BLKmode, dest);
1634 set_mem_align (dest, PARM_BOUNDARY);
1635 src = gen_rtx_MEM (BLKmode, incoming_args);
1636 set_mem_align (src, PARM_BOUNDARY);
1637 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1638
1639 /* Refer to the argument block. */
1640 apply_args_size ();
1641 arguments = gen_rtx_MEM (BLKmode, arguments);
1642 set_mem_align (arguments, PARM_BOUNDARY);
1643
1644 /* Walk past the arg-pointer and structure value address. */
1645 size = GET_MODE_SIZE (Pmode);
1646 if (struct_value)
1647 size += GET_MODE_SIZE (Pmode);
1648
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_args_mode[regno]) != VOIDmode)
1653 {
1654 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1655 if (size % align != 0)
1656 size = CEIL (size, align) * align;
1657 reg = gen_rtx_REG (mode, regno);
1658 emit_move_insn (reg, adjust_address (arguments, mode, size));
1659 use_reg (&call_fusage, reg);
1660 size += GET_MODE_SIZE (mode);
1661 }
1662
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size = GET_MODE_SIZE (Pmode);
1666 if (struct_value)
1667 {
1668 rtx value = gen_reg_rtx (Pmode);
1669 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1670 emit_move_insn (struct_value, value);
1671 if (REG_P (struct_value))
1672 use_reg (&call_fusage, struct_value);
1673 size += GET_MODE_SIZE (Pmode);
1674 }
1675
1676 /* All arguments and registers used for the call are set up by now! */
1677 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1678
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function) != SYMBOL_REF)
1683 function = memory_address (FUNCTION_MODE, function);
1684
1685 /* Generate the actual call instruction and save the return value. */
1686 if (targetm.have_untyped_call ())
1687 {
1688 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1689 emit_call_insn (targetm.gen_untyped_call (mem, result,
1690 result_vector (1, result)));
1691 }
1692 else if (targetm.have_call_value ())
1693 {
1694 rtx valreg = 0;
1695
1696 /* Locate the unique return register. It is not possible to
1697 express a call that sets more than one return register using
1698 call_value; use untyped_call for that. In fact, untyped_call
1699 only needs to save the return registers in the given block. */
1700 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1701 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 {
1703 gcc_assert (!valreg); /* have_untyped_call required. */
1704
1705 valreg = gen_rtx_REG (mode, regno);
1706 }
1707
1708 emit_insn (targetm.gen_call_value (valreg,
1709 gen_rtx_MEM (FUNCTION_MODE, function),
1710 const0_rtx, NULL_RTX, const0_rtx));
1711
1712 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1713 }
1714 else
1715 gcc_unreachable ();
1716
1717 /* Find the CALL insn we just emitted, and attach the register usage
1718 information. */
1719 call_insn = last_call_insn ();
1720 add_function_usage_to (call_insn, call_fusage);
1721
1722 /* Restore the stack. */
1723 if (targetm.have_save_stack_nonlocal ())
1724 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1725 else
1726 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1727 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1728
1729 OK_DEFER_POP;
1730
1731 /* Return the address of the result block. */
1732 result = copy_addr_to_reg (XEXP (result, 0));
1733 return convert_memory_address (ptr_mode, result);
1734 }
1735
1736 /* Perform an untyped return. */
1737
1738 static void
1739 expand_builtin_return (rtx result)
1740 {
1741 int size, align, regno;
1742 machine_mode mode;
1743 rtx reg;
1744 rtx_insn *call_fusage = 0;
1745
1746 result = convert_memory_address (Pmode, result);
1747
1748 apply_result_size ();
1749 result = gen_rtx_MEM (BLKmode, result);
1750
1751 if (targetm.have_untyped_return ())
1752 {
1753 rtx vector = result_vector (0, result);
1754 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1755 emit_barrier ();
1756 return;
1757 }
1758
1759 /* Restore the return value and note that each value is used. */
1760 size = 0;
1761 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1762 if ((mode = apply_result_mode[regno]) != VOIDmode)
1763 {
1764 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1765 if (size % align != 0)
1766 size = CEIL (size, align) * align;
1767 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1768 emit_move_insn (reg, adjust_address (result, mode, size));
1769
1770 push_to_sequence (call_fusage);
1771 emit_use (reg);
1772 call_fusage = get_insns ();
1773 end_sequence ();
1774 size += GET_MODE_SIZE (mode);
1775 }
1776
1777 /* Put the USE insns before the return. */
1778 emit_insn (call_fusage);
1779
1780 /* Return whatever values was restored by jumping directly to the end
1781 of the function. */
1782 expand_naked_return ();
1783 }
1784
1785 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1786
1787 static enum type_class
1788 type_to_class (tree type)
1789 {
1790 switch (TREE_CODE (type))
1791 {
1792 case VOID_TYPE: return void_type_class;
1793 case INTEGER_TYPE: return integer_type_class;
1794 case ENUMERAL_TYPE: return enumeral_type_class;
1795 case BOOLEAN_TYPE: return boolean_type_class;
1796 case POINTER_TYPE: return pointer_type_class;
1797 case REFERENCE_TYPE: return reference_type_class;
1798 case OFFSET_TYPE: return offset_type_class;
1799 case REAL_TYPE: return real_type_class;
1800 case COMPLEX_TYPE: return complex_type_class;
1801 case FUNCTION_TYPE: return function_type_class;
1802 case METHOD_TYPE: return method_type_class;
1803 case RECORD_TYPE: return record_type_class;
1804 case UNION_TYPE:
1805 case QUAL_UNION_TYPE: return union_type_class;
1806 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1807 ? string_type_class : array_type_class);
1808 case LANG_TYPE: return lang_type_class;
1809 default: return no_type_class;
1810 }
1811 }
1812
1813 /* Expand a call EXP to __builtin_classify_type. */
1814
1815 static rtx
1816 expand_builtin_classify_type (tree exp)
1817 {
1818 if (call_expr_nargs (exp))
1819 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1820 return GEN_INT (no_type_class);
1821 }
1822
1823 /* This helper macro, meant to be used in mathfn_built_in below,
1824 determines which among a set of three builtin math functions is
1825 appropriate for a given type mode. The `F' and `L' cases are
1826 automatically generated from the `double' case. */
1827 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1828 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1829 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1830 fcodel = BUILT_IN_MATHFN##L ; break;
1831 /* Similar to above, but appends _R after any F/L suffix. */
1832 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1833 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1834 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1835 fcodel = BUILT_IN_MATHFN##L_R ; break;
1836
1837 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1838 if available. If IMPLICIT is true use the implicit builtin declaration,
1839 otherwise use the explicit declaration. If we can't do the conversion,
1840 return zero. */
1841
1842 static tree
1843 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1844 {
1845 enum built_in_function fcode, fcodef, fcodel, fcode2;
1846
1847 switch (fn)
1848 {
1849 CASE_MATHFN (BUILT_IN_ACOS)
1850 CASE_MATHFN (BUILT_IN_ACOSH)
1851 CASE_MATHFN (BUILT_IN_ASIN)
1852 CASE_MATHFN (BUILT_IN_ASINH)
1853 CASE_MATHFN (BUILT_IN_ATAN)
1854 CASE_MATHFN (BUILT_IN_ATAN2)
1855 CASE_MATHFN (BUILT_IN_ATANH)
1856 CASE_MATHFN (BUILT_IN_CBRT)
1857 CASE_MATHFN (BUILT_IN_CEIL)
1858 CASE_MATHFN (BUILT_IN_CEXPI)
1859 CASE_MATHFN (BUILT_IN_COPYSIGN)
1860 CASE_MATHFN (BUILT_IN_COS)
1861 CASE_MATHFN (BUILT_IN_COSH)
1862 CASE_MATHFN (BUILT_IN_DREM)
1863 CASE_MATHFN (BUILT_IN_ERF)
1864 CASE_MATHFN (BUILT_IN_ERFC)
1865 CASE_MATHFN (BUILT_IN_EXP)
1866 CASE_MATHFN (BUILT_IN_EXP10)
1867 CASE_MATHFN (BUILT_IN_EXP2)
1868 CASE_MATHFN (BUILT_IN_EXPM1)
1869 CASE_MATHFN (BUILT_IN_FABS)
1870 CASE_MATHFN (BUILT_IN_FDIM)
1871 CASE_MATHFN (BUILT_IN_FLOOR)
1872 CASE_MATHFN (BUILT_IN_FMA)
1873 CASE_MATHFN (BUILT_IN_FMAX)
1874 CASE_MATHFN (BUILT_IN_FMIN)
1875 CASE_MATHFN (BUILT_IN_FMOD)
1876 CASE_MATHFN (BUILT_IN_FREXP)
1877 CASE_MATHFN (BUILT_IN_GAMMA)
1878 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1879 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1880 CASE_MATHFN (BUILT_IN_HYPOT)
1881 CASE_MATHFN (BUILT_IN_ILOGB)
1882 CASE_MATHFN (BUILT_IN_ICEIL)
1883 CASE_MATHFN (BUILT_IN_IFLOOR)
1884 CASE_MATHFN (BUILT_IN_INF)
1885 CASE_MATHFN (BUILT_IN_IRINT)
1886 CASE_MATHFN (BUILT_IN_IROUND)
1887 CASE_MATHFN (BUILT_IN_ISINF)
1888 CASE_MATHFN (BUILT_IN_J0)
1889 CASE_MATHFN (BUILT_IN_J1)
1890 CASE_MATHFN (BUILT_IN_JN)
1891 CASE_MATHFN (BUILT_IN_LCEIL)
1892 CASE_MATHFN (BUILT_IN_LDEXP)
1893 CASE_MATHFN (BUILT_IN_LFLOOR)
1894 CASE_MATHFN (BUILT_IN_LGAMMA)
1895 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1896 CASE_MATHFN (BUILT_IN_LLCEIL)
1897 CASE_MATHFN (BUILT_IN_LLFLOOR)
1898 CASE_MATHFN (BUILT_IN_LLRINT)
1899 CASE_MATHFN (BUILT_IN_LLROUND)
1900 CASE_MATHFN (BUILT_IN_LOG)
1901 CASE_MATHFN (BUILT_IN_LOG10)
1902 CASE_MATHFN (BUILT_IN_LOG1P)
1903 CASE_MATHFN (BUILT_IN_LOG2)
1904 CASE_MATHFN (BUILT_IN_LOGB)
1905 CASE_MATHFN (BUILT_IN_LRINT)
1906 CASE_MATHFN (BUILT_IN_LROUND)
1907 CASE_MATHFN (BUILT_IN_MODF)
1908 CASE_MATHFN (BUILT_IN_NAN)
1909 CASE_MATHFN (BUILT_IN_NANS)
1910 CASE_MATHFN (BUILT_IN_NEARBYINT)
1911 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1912 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1913 CASE_MATHFN (BUILT_IN_POW)
1914 CASE_MATHFN (BUILT_IN_POWI)
1915 CASE_MATHFN (BUILT_IN_POW10)
1916 CASE_MATHFN (BUILT_IN_REMAINDER)
1917 CASE_MATHFN (BUILT_IN_REMQUO)
1918 CASE_MATHFN (BUILT_IN_RINT)
1919 CASE_MATHFN (BUILT_IN_ROUND)
1920 CASE_MATHFN (BUILT_IN_SCALB)
1921 CASE_MATHFN (BUILT_IN_SCALBLN)
1922 CASE_MATHFN (BUILT_IN_SCALBN)
1923 CASE_MATHFN (BUILT_IN_SIGNBIT)
1924 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1925 CASE_MATHFN (BUILT_IN_SIN)
1926 CASE_MATHFN (BUILT_IN_SINCOS)
1927 CASE_MATHFN (BUILT_IN_SINH)
1928 CASE_MATHFN (BUILT_IN_SQRT)
1929 CASE_MATHFN (BUILT_IN_TAN)
1930 CASE_MATHFN (BUILT_IN_TANH)
1931 CASE_MATHFN (BUILT_IN_TGAMMA)
1932 CASE_MATHFN (BUILT_IN_TRUNC)
1933 CASE_MATHFN (BUILT_IN_Y0)
1934 CASE_MATHFN (BUILT_IN_Y1)
1935 CASE_MATHFN (BUILT_IN_YN)
1936
1937 default:
1938 return NULL_TREE;
1939 }
1940
1941 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1942 fcode2 = fcode;
1943 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1944 fcode2 = fcodef;
1945 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1946 fcode2 = fcodel;
1947 else
1948 return NULL_TREE;
1949
1950 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1951 return NULL_TREE;
1952
1953 return builtin_decl_explicit (fcode2);
1954 }
1955
1956 /* Like mathfn_built_in_1(), but always use the implicit array. */
1957
1958 tree
1959 mathfn_built_in (tree type, enum built_in_function fn)
1960 {
1961 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1962 }
1963
1964 /* If errno must be maintained, expand the RTL to check if the result,
1965 TARGET, of a built-in function call, EXP, is NaN, and if so set
1966 errno to EDOM. */
1967
1968 static void
1969 expand_errno_check (tree exp, rtx target)
1970 {
1971 rtx_code_label *lab = gen_label_rtx ();
1972
1973 /* Test the result; if it is NaN, set errno=EDOM because
1974 the argument was not in the domain. */
1975 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1976 NULL_RTX, NULL, lab,
1977 /* The jump is very likely. */
1978 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1979
1980 #ifdef TARGET_EDOM
1981 /* If this built-in doesn't throw an exception, set errno directly. */
1982 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1983 {
1984 #ifdef GEN_ERRNO_RTX
1985 rtx errno_rtx = GEN_ERRNO_RTX;
1986 #else
1987 rtx errno_rtx
1988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1989 #endif
1990 emit_move_insn (errno_rtx,
1991 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1992 emit_label (lab);
1993 return;
1994 }
1995 #endif
1996
1997 /* Make sure the library call isn't expanded as a tail call. */
1998 CALL_EXPR_TAILCALL (exp) = 0;
1999
2000 /* We can't set errno=EDOM directly; let the library call do it.
2001 Pop the arguments right away in case the call gets deleted. */
2002 NO_DEFER_POP;
2003 expand_call (exp, target, 0);
2004 OK_DEFER_POP;
2005 emit_label (lab);
2006 }
2007
2008 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2009 Return NULL_RTX if a normal call should be emitted rather than expanding
2010 the function in-line. EXP is the expression that is a call to the builtin
2011 function; if convenient, the result should be placed in TARGET.
2012 SUBTARGET may be used as the target for computing one of EXP's operands. */
2013
2014 static rtx
2015 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2016 {
2017 optab builtin_optab;
2018 rtx op0;
2019 rtx_insn *insns;
2020 tree fndecl = get_callee_fndecl (exp);
2021 machine_mode mode;
2022 bool errno_set = false;
2023 bool try_widening = false;
2024 tree arg;
2025
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2028
2029 arg = CALL_EXPR_ARG (exp, 0);
2030
2031 switch (DECL_FUNCTION_CODE (fndecl))
2032 {
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 try_widening = true;
2036 builtin_optab = sqrt_optab;
2037 break;
2038 CASE_FLT_FN (BUILT_IN_EXP):
2039 errno_set = true; builtin_optab = exp_optab; break;
2040 CASE_FLT_FN (BUILT_IN_EXP10):
2041 CASE_FLT_FN (BUILT_IN_POW10):
2042 errno_set = true; builtin_optab = exp10_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP2):
2044 errno_set = true; builtin_optab = exp2_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXPM1):
2046 errno_set = true; builtin_optab = expm1_optab; break;
2047 CASE_FLT_FN (BUILT_IN_LOGB):
2048 errno_set = true; builtin_optab = logb_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOG):
2050 errno_set = true; builtin_optab = log_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG10):
2052 errno_set = true; builtin_optab = log10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG2):
2054 errno_set = true; builtin_optab = log2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG1P):
2056 errno_set = true; builtin_optab = log1p_optab; break;
2057 CASE_FLT_FN (BUILT_IN_ASIN):
2058 builtin_optab = asin_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ACOS):
2060 builtin_optab = acos_optab; break;
2061 CASE_FLT_FN (BUILT_IN_TAN):
2062 builtin_optab = tan_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ATAN):
2064 builtin_optab = atan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_FLOOR):
2066 builtin_optab = floor_optab; break;
2067 CASE_FLT_FN (BUILT_IN_CEIL):
2068 builtin_optab = ceil_optab; break;
2069 CASE_FLT_FN (BUILT_IN_TRUNC):
2070 builtin_optab = btrunc_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ROUND):
2072 builtin_optab = round_optab; break;
2073 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2074 builtin_optab = nearbyint_optab;
2075 if (flag_trapping_math)
2076 break;
2077 /* Else fallthrough and expand as rint. */
2078 CASE_FLT_FN (BUILT_IN_RINT):
2079 builtin_optab = rint_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2081 builtin_optab = significand_optab; break;
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2088
2089 if (! flag_errno_math || ! HONOR_NANS (mode))
2090 errno_set = false;
2091
2092 /* Before working hard, check whether the instruction is available, but try
2093 to widen the mode for specific operations. */
2094 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2095 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2096 && (!errno_set || !optimize_insn_for_size_p ()))
2097 {
2098 rtx result = gen_reg_rtx (mode);
2099
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2104
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2106
2107 start_sequence ();
2108
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 result = expand_unop (mode, builtin_optab, op0, result, 0);
2112
2113 if (result != 0)
2114 {
2115 if (errno_set)
2116 expand_errno_check (exp, result);
2117
2118 /* Output the entire sequence. */
2119 insns = get_insns ();
2120 end_sequence ();
2121 emit_insn (insns);
2122 return result;
2123 }
2124
2125 /* If we were unable to expand via the builtin, stop the sequence
2126 (without outputting the insns) and call to the library function
2127 with the stabilized argument list. */
2128 end_sequence ();
2129 }
2130
2131 return expand_call (exp, target, target == const0_rtx);
2132 }
2133
2134 /* Expand a call to the builtin binary math functions (pow and atan2).
2135 Return NULL_RTX if a normal call should be emitted rather than expanding the
2136 function in-line. EXP is the expression that is a call to the builtin
2137 function; if convenient, the result should be placed in TARGET.
2138 SUBTARGET may be used as the target for computing one of EXP's
2139 operands. */
2140
2141 static rtx
2142 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2143 {
2144 optab builtin_optab;
2145 rtx op0, op1, result;
2146 rtx_insn *insns;
2147 int op1_type = REAL_TYPE;
2148 tree fndecl = get_callee_fndecl (exp);
2149 tree arg0, arg1;
2150 machine_mode mode;
2151 bool errno_set = true;
2152
2153 switch (DECL_FUNCTION_CODE (fndecl))
2154 {
2155 CASE_FLT_FN (BUILT_IN_SCALBN):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN):
2157 CASE_FLT_FN (BUILT_IN_LDEXP):
2158 op1_type = INTEGER_TYPE;
2159 default:
2160 break;
2161 }
2162
2163 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2164 return NULL_RTX;
2165
2166 arg0 = CALL_EXPR_ARG (exp, 0);
2167 arg1 = CALL_EXPR_ARG (exp, 1);
2168
2169 switch (DECL_FUNCTION_CODE (fndecl))
2170 {
2171 CASE_FLT_FN (BUILT_IN_POW):
2172 builtin_optab = pow_optab; break;
2173 CASE_FLT_FN (BUILT_IN_ATAN2):
2174 builtin_optab = atan2_optab; break;
2175 CASE_FLT_FN (BUILT_IN_SCALB):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2177 return 0;
2178 builtin_optab = scalb_optab; break;
2179 CASE_FLT_FN (BUILT_IN_SCALBN):
2180 CASE_FLT_FN (BUILT_IN_SCALBLN):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 /* Fall through... */
2184 CASE_FLT_FN (BUILT_IN_LDEXP):
2185 builtin_optab = ldexp_optab; break;
2186 CASE_FLT_FN (BUILT_IN_FMOD):
2187 builtin_optab = fmod_optab; break;
2188 CASE_FLT_FN (BUILT_IN_REMAINDER):
2189 CASE_FLT_FN (BUILT_IN_DREM):
2190 builtin_optab = remainder_optab; break;
2191 default:
2192 gcc_unreachable ();
2193 }
2194
2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
2197
2198 /* Before working hard, check whether the instruction is available. */
2199 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2200 return NULL_RTX;
2201
2202 result = gen_reg_rtx (mode);
2203
2204 if (! flag_errno_math || ! HONOR_NANS (mode))
2205 errno_set = false;
2206
2207 if (errno_set && optimize_insn_for_size_p ())
2208 return 0;
2209
2210 /* Always stabilize the argument list. */
2211 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2212 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2213
2214 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2215 op1 = expand_normal (arg1);
2216
2217 start_sequence ();
2218
2219 /* Compute into RESULT.
2220 Set RESULT to wherever the result comes back. */
2221 result = expand_binop (mode, builtin_optab, op0, op1,
2222 result, 0, OPTAB_DIRECT);
2223
2224 /* If we were unable to expand via the builtin, stop the sequence
2225 (without outputting the insns) and call to the library function
2226 with the stabilized argument list. */
2227 if (result == 0)
2228 {
2229 end_sequence ();
2230 return expand_call (exp, target, target == const0_rtx);
2231 }
2232
2233 if (errno_set)
2234 expand_errno_check (exp, result);
2235
2236 /* Output the entire sequence. */
2237 insns = get_insns ();
2238 end_sequence ();
2239 emit_insn (insns);
2240
2241 return result;
2242 }
2243
2244 /* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2249 operands. */
2250
2251 static rtx
2252 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2253 {
2254 optab builtin_optab;
2255 rtx op0, op1, op2, result;
2256 rtx_insn *insns;
2257 tree fndecl = get_callee_fndecl (exp);
2258 tree arg0, arg1, arg2;
2259 machine_mode mode;
2260
2261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2263
2264 arg0 = CALL_EXPR_ARG (exp, 0);
2265 arg1 = CALL_EXPR_ARG (exp, 1);
2266 arg2 = CALL_EXPR_ARG (exp, 2);
2267
2268 switch (DECL_FUNCTION_CODE (fndecl))
2269 {
2270 CASE_FLT_FN (BUILT_IN_FMA):
2271 builtin_optab = fma_optab; break;
2272 default:
2273 gcc_unreachable ();
2274 }
2275
2276 /* Make a suitable register to place result in. */
2277 mode = TYPE_MODE (TREE_TYPE (exp));
2278
2279 /* Before working hard, check whether the instruction is available. */
2280 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2281 return NULL_RTX;
2282
2283 result = gen_reg_rtx (mode);
2284
2285 /* Always stabilize the argument list. */
2286 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2287 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2288 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2289
2290 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2291 op1 = expand_normal (arg1);
2292 op2 = expand_normal (arg2);
2293
2294 start_sequence ();
2295
2296 /* Compute into RESULT.
2297 Set RESULT to wherever the result comes back. */
2298 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2299 result, 0);
2300
2301 /* If we were unable to expand via the builtin, stop the sequence
2302 (without outputting the insns) and call to the library function
2303 with the stabilized argument list. */
2304 if (result == 0)
2305 {
2306 end_sequence ();
2307 return expand_call (exp, target, target == const0_rtx);
2308 }
2309
2310 /* Output the entire sequence. */
2311 insns = get_insns ();
2312 end_sequence ();
2313 emit_insn (insns);
2314
2315 return result;
2316 }
2317
2318 /* Expand a call to the builtin sin and cos math functions.
2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET.
2322 SUBTARGET may be used as the target for computing one of EXP's
2323 operands. */
2324
2325 static rtx
2326 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2327 {
2328 optab builtin_optab;
2329 rtx op0;
2330 rtx_insn *insns;
2331 tree fndecl = get_callee_fndecl (exp);
2332 machine_mode mode;
2333 tree arg;
2334
2335 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2336 return NULL_RTX;
2337
2338 arg = CALL_EXPR_ARG (exp, 0);
2339
2340 switch (DECL_FUNCTION_CODE (fndecl))
2341 {
2342 CASE_FLT_FN (BUILT_IN_SIN):
2343 CASE_FLT_FN (BUILT_IN_COS):
2344 builtin_optab = sincos_optab; break;
2345 default:
2346 gcc_unreachable ();
2347 }
2348
2349 /* Make a suitable register to place result in. */
2350 mode = TYPE_MODE (TREE_TYPE (exp));
2351
2352 /* Check if sincos insn is available, otherwise fallback
2353 to sin or cos insn. */
2354 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2355 switch (DECL_FUNCTION_CODE (fndecl))
2356 {
2357 CASE_FLT_FN (BUILT_IN_SIN):
2358 builtin_optab = sin_optab; break;
2359 CASE_FLT_FN (BUILT_IN_COS):
2360 builtin_optab = cos_optab; break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364
2365 /* Before working hard, check whether the instruction is available. */
2366 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2367 {
2368 rtx result = gen_reg_rtx (mode);
2369
2370 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2371 need to expand the argument again. This way, we will not perform
2372 side-effects more the once. */
2373 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2374
2375 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2376
2377 start_sequence ();
2378
2379 /* Compute into RESULT.
2380 Set RESULT to wherever the result comes back. */
2381 if (builtin_optab == sincos_optab)
2382 {
2383 int ok;
2384
2385 switch (DECL_FUNCTION_CODE (fndecl))
2386 {
2387 CASE_FLT_FN (BUILT_IN_SIN):
2388 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2389 break;
2390 CASE_FLT_FN (BUILT_IN_COS):
2391 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2392 break;
2393 default:
2394 gcc_unreachable ();
2395 }
2396 gcc_assert (ok);
2397 }
2398 else
2399 result = expand_unop (mode, builtin_optab, op0, result, 0);
2400
2401 if (result != 0)
2402 {
2403 /* Output the entire sequence. */
2404 insns = get_insns ();
2405 end_sequence ();
2406 emit_insn (insns);
2407 return result;
2408 }
2409
2410 /* If we were unable to expand via the builtin, stop the sequence
2411 (without outputting the insns) and call to the library function
2412 with the stabilized argument list. */
2413 end_sequence ();
2414 }
2415
2416 return expand_call (exp, target, target == const0_rtx);
2417 }
2418
2419 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2420 return an RTL instruction code that implements the functionality.
2421 If that isn't possible or available return CODE_FOR_nothing. */
2422
2423 static enum insn_code
2424 interclass_mathfn_icode (tree arg, tree fndecl)
2425 {
2426 bool errno_set = false;
2427 optab builtin_optab = unknown_optab;
2428 machine_mode mode;
2429
2430 switch (DECL_FUNCTION_CODE (fndecl))
2431 {
2432 CASE_FLT_FN (BUILT_IN_ILOGB):
2433 errno_set = true; builtin_optab = ilogb_optab; break;
2434 CASE_FLT_FN (BUILT_IN_ISINF):
2435 builtin_optab = isinf_optab; break;
2436 case BUILT_IN_ISNORMAL:
2437 case BUILT_IN_ISFINITE:
2438 CASE_FLT_FN (BUILT_IN_FINITE):
2439 case BUILT_IN_FINITED32:
2440 case BUILT_IN_FINITED64:
2441 case BUILT_IN_FINITED128:
2442 case BUILT_IN_ISINFD32:
2443 case BUILT_IN_ISINFD64:
2444 case BUILT_IN_ISINFD128:
2445 /* These builtins have no optabs (yet). */
2446 break;
2447 default:
2448 gcc_unreachable ();
2449 }
2450
2451 /* There's no easy way to detect the case we need to set EDOM. */
2452 if (flag_errno_math && errno_set)
2453 return CODE_FOR_nothing;
2454
2455 /* Optab mode depends on the mode of the input argument. */
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2457
2458 if (builtin_optab)
2459 return optab_handler (builtin_optab, mode);
2460 return CODE_FOR_nothing;
2461 }
2462
2463 /* Expand a call to one of the builtin math functions that operate on
2464 floating point argument and output an integer result (ilogb, isinf,
2465 isnan, etc).
2466 Return 0 if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
2468 function; if convenient, the result should be placed in TARGET. */
2469
2470 static rtx
2471 expand_builtin_interclass_mathfn (tree exp, rtx target)
2472 {
2473 enum insn_code icode = CODE_FOR_nothing;
2474 rtx op0;
2475 tree fndecl = get_callee_fndecl (exp);
2476 machine_mode mode;
2477 tree arg;
2478
2479 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2480 return NULL_RTX;
2481
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 icode = interclass_mathfn_icode (arg, fndecl);
2484 mode = TYPE_MODE (TREE_TYPE (arg));
2485
2486 if (icode != CODE_FOR_nothing)
2487 {
2488 struct expand_operand ops[1];
2489 rtx_insn *last = get_last_insn ();
2490 tree orig_arg = arg;
2491
2492 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2493 need to expand the argument again. This way, we will not perform
2494 side-effects more the once. */
2495 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2496
2497 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2498
2499 if (mode != GET_MODE (op0))
2500 op0 = convert_to_mode (mode, op0, 0);
2501
2502 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2503 if (maybe_legitimize_operands (icode, 0, 1, ops)
2504 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2505 return ops[0].value;
2506
2507 delete_insns_since (last);
2508 CALL_EXPR_ARG (exp, 0) = orig_arg;
2509 }
2510
2511 return NULL_RTX;
2512 }
2513
2514 /* Expand a call to the builtin sincos math function.
2515 Return NULL_RTX if a normal call should be emitted rather than expanding the
2516 function in-line. EXP is the expression that is a call to the builtin
2517 function. */
2518
2519 static rtx
2520 expand_builtin_sincos (tree exp)
2521 {
2522 rtx op0, op1, op2, target1, target2;
2523 machine_mode mode;
2524 tree arg, sinp, cosp;
2525 int result;
2526 location_t loc = EXPR_LOCATION (exp);
2527 tree alias_type, alias_off;
2528
2529 if (!validate_arglist (exp, REAL_TYPE,
2530 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2531 return NULL_RTX;
2532
2533 arg = CALL_EXPR_ARG (exp, 0);
2534 sinp = CALL_EXPR_ARG (exp, 1);
2535 cosp = CALL_EXPR_ARG (exp, 2);
2536
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (arg));
2539
2540 /* Check if sincos insn is available, otherwise emit the call. */
2541 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2542 return NULL_RTX;
2543
2544 target1 = gen_reg_rtx (mode);
2545 target2 = gen_reg_rtx (mode);
2546
2547 op0 = expand_normal (arg);
2548 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2549 alias_off = build_int_cst (alias_type, 0);
2550 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 sinp, alias_off));
2552 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 cosp, alias_off));
2554
2555 /* Compute into target1 and target2.
2556 Set TARGET to wherever the result comes back. */
2557 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2558 gcc_assert (result);
2559
2560 /* Move target1 and target2 to the memory locations indicated
2561 by op1 and op2. */
2562 emit_move_insn (op1, target1);
2563 emit_move_insn (op2, target2);
2564
2565 return const0_rtx;
2566 }
2567
2568 /* Expand a call to the internal cexpi builtin to the sincos math function.
2569 EXP is the expression that is a call to the builtin function; if convenient,
2570 the result should be placed in TARGET. */
2571
2572 static rtx
2573 expand_builtin_cexpi (tree exp, rtx target)
2574 {
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg, type;
2577 machine_mode mode;
2578 rtx op0, op1, op2;
2579 location_t loc = EXPR_LOCATION (exp);
2580
2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 return NULL_RTX;
2583
2584 arg = CALL_EXPR_ARG (exp, 0);
2585 type = TREE_TYPE (arg);
2586 mode = TYPE_MODE (TREE_TYPE (arg));
2587
2588 /* Try expanding via a sincos optab, fall back to emitting a libcall
2589 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2590 is only generated from sincos, cexp or if we have either of them. */
2591 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2592 {
2593 op1 = gen_reg_rtx (mode);
2594 op2 = gen_reg_rtx (mode);
2595
2596 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2597
2598 /* Compute into op1 and op2. */
2599 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2600 }
2601 else if (targetm.libc_has_function (function_sincos))
2602 {
2603 tree call, fn = NULL_TREE;
2604 tree top1, top2;
2605 rtx op1a, op2a;
2606
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2612 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2613 else
2614 gcc_unreachable ();
2615
2616 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2617 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op1a = copy_addr_to_reg (XEXP (op1, 0));
2619 op2a = copy_addr_to_reg (XEXP (op2, 0));
2620 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2621 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2622
2623 /* Make sure not to fold the sincos call again. */
2624 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2625 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2626 call, 3, arg, top1, top2));
2627 }
2628 else
2629 {
2630 tree call, fn = NULL_TREE, narg;
2631 tree ctype = build_complex_type (type);
2632
2633 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2638 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2639 else
2640 gcc_unreachable ();
2641
2642 /* If we don't have a decl for cexp create one. This is the
2643 friendliest fallback if the user calls __builtin_cexpi
2644 without full target C99 function support. */
2645 if (fn == NULL_TREE)
2646 {
2647 tree fntype;
2648 const char *name = NULL;
2649
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 name = "cexpf";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 name = "cexp";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 name = "cexpl";
2656
2657 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2658 fn = build_fn_decl (name, fntype);
2659 }
2660
2661 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2662 build_real (type, dconst0), arg);
2663
2664 /* Make sure not to fold the cexp call again. */
2665 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2666 return expand_expr (build_call_nary (ctype, call, 1, narg),
2667 target, VOIDmode, EXPAND_NORMAL);
2668 }
2669
2670 /* Now build the proper return type. */
2671 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2672 make_tree (TREE_TYPE (arg), op2),
2673 make_tree (TREE_TYPE (arg), op1)),
2674 target, VOIDmode, EXPAND_NORMAL);
2675 }
2676
2677 /* Conveniently construct a function call expression. FNDECL names the
2678 function to be called, N is the number of arguments, and the "..."
2679 parameters are the argument expressions. Unlike build_call_exr
2680 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2681
2682 static tree
2683 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2684 {
2685 va_list ap;
2686 tree fntype = TREE_TYPE (fndecl);
2687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2688
2689 va_start (ap, n);
2690 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2691 va_end (ap);
2692 SET_EXPR_LOCATION (fn, loc);
2693 return fn;
2694 }
2695
2696 /* Expand a call to one of the builtin rounding functions gcc defines
2697 as an extension (lfloor and lceil). As these are gcc extensions we
2698 do not need to worry about setting errno to EDOM.
2699 If expanding via optab fails, lower expression to (int)(floor(x)).
2700 EXP is the expression that is a call to the builtin function;
2701 if convenient, the result should be placed in TARGET. */
2702
2703 static rtx
2704 expand_builtin_int_roundingfn (tree exp, rtx target)
2705 {
2706 convert_optab builtin_optab;
2707 rtx op0, tmp;
2708 rtx_insn *insns;
2709 tree fndecl = get_callee_fndecl (exp);
2710 enum built_in_function fallback_fn;
2711 tree fallback_fndecl;
2712 machine_mode mode;
2713 tree arg;
2714
2715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2716 gcc_unreachable ();
2717
2718 arg = CALL_EXPR_ARG (exp, 0);
2719
2720 switch (DECL_FUNCTION_CODE (fndecl))
2721 {
2722 CASE_FLT_FN (BUILT_IN_ICEIL):
2723 CASE_FLT_FN (BUILT_IN_LCEIL):
2724 CASE_FLT_FN (BUILT_IN_LLCEIL):
2725 builtin_optab = lceil_optab;
2726 fallback_fn = BUILT_IN_CEIL;
2727 break;
2728
2729 CASE_FLT_FN (BUILT_IN_IFLOOR):
2730 CASE_FLT_FN (BUILT_IN_LFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2732 builtin_optab = lfloor_optab;
2733 fallback_fn = BUILT_IN_FLOOR;
2734 break;
2735
2736 default:
2737 gcc_unreachable ();
2738 }
2739
2740 /* Make a suitable register to place result in. */
2741 mode = TYPE_MODE (TREE_TYPE (exp));
2742
2743 target = gen_reg_rtx (mode);
2744
2745 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2746 need to expand the argument again. This way, we will not perform
2747 side-effects more the once. */
2748 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2749
2750 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2751
2752 start_sequence ();
2753
2754 /* Compute into TARGET. */
2755 if (expand_sfix_optab (target, op0, builtin_optab))
2756 {
2757 /* Output the entire sequence. */
2758 insns = get_insns ();
2759 end_sequence ();
2760 emit_insn (insns);
2761 return target;
2762 }
2763
2764 /* If we were unable to expand via the builtin, stop the sequence
2765 (without outputting the insns). */
2766 end_sequence ();
2767
2768 /* Fall back to floating point rounding optab. */
2769 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2770
2771 /* For non-C99 targets we may end up without a fallback fndecl here
2772 if the user called __builtin_lfloor directly. In this case emit
2773 a call to the floor/ceil variants nevertheless. This should result
2774 in the best user experience for not full C99 targets. */
2775 if (fallback_fndecl == NULL_TREE)
2776 {
2777 tree fntype;
2778 const char *name = NULL;
2779
2780 switch (DECL_FUNCTION_CODE (fndecl))
2781 {
2782 case BUILT_IN_ICEIL:
2783 case BUILT_IN_LCEIL:
2784 case BUILT_IN_LLCEIL:
2785 name = "ceil";
2786 break;
2787 case BUILT_IN_ICEILF:
2788 case BUILT_IN_LCEILF:
2789 case BUILT_IN_LLCEILF:
2790 name = "ceilf";
2791 break;
2792 case BUILT_IN_ICEILL:
2793 case BUILT_IN_LCEILL:
2794 case BUILT_IN_LLCEILL:
2795 name = "ceill";
2796 break;
2797 case BUILT_IN_IFLOOR:
2798 case BUILT_IN_LFLOOR:
2799 case BUILT_IN_LLFLOOR:
2800 name = "floor";
2801 break;
2802 case BUILT_IN_IFLOORF:
2803 case BUILT_IN_LFLOORF:
2804 case BUILT_IN_LLFLOORF:
2805 name = "floorf";
2806 break;
2807 case BUILT_IN_IFLOORL:
2808 case BUILT_IN_LFLOORL:
2809 case BUILT_IN_LLFLOORL:
2810 name = "floorl";
2811 break;
2812 default:
2813 gcc_unreachable ();
2814 }
2815
2816 fntype = build_function_type_list (TREE_TYPE (arg),
2817 TREE_TYPE (arg), NULL_TREE);
2818 fallback_fndecl = build_fn_decl (name, fntype);
2819 }
2820
2821 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2822
2823 tmp = expand_normal (exp);
2824 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2825
2826 /* Truncate the result of floating point optab to integer
2827 via expand_fix (). */
2828 target = gen_reg_rtx (mode);
2829 expand_fix (target, tmp, 0);
2830
2831 return target;
2832 }
2833
2834 /* Expand a call to one of the builtin math functions doing integer
2835 conversion (lrint).
2836 Return 0 if a normal call should be emitted rather than expanding the
2837 function in-line. EXP is the expression that is a call to the builtin
2838 function; if convenient, the result should be placed in TARGET. */
2839
2840 static rtx
2841 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2842 {
2843 convert_optab builtin_optab;
2844 rtx op0;
2845 rtx_insn *insns;
2846 tree fndecl = get_callee_fndecl (exp);
2847 tree arg;
2848 machine_mode mode;
2849 enum built_in_function fallback_fn = BUILT_IN_NONE;
2850
2851 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2852 gcc_unreachable ();
2853
2854 arg = CALL_EXPR_ARG (exp, 0);
2855
2856 switch (DECL_FUNCTION_CODE (fndecl))
2857 {
2858 CASE_FLT_FN (BUILT_IN_IRINT):
2859 fallback_fn = BUILT_IN_LRINT;
2860 /* FALLTHRU */
2861 CASE_FLT_FN (BUILT_IN_LRINT):
2862 CASE_FLT_FN (BUILT_IN_LLRINT):
2863 builtin_optab = lrint_optab;
2864 break;
2865
2866 CASE_FLT_FN (BUILT_IN_IROUND):
2867 fallback_fn = BUILT_IN_LROUND;
2868 /* FALLTHRU */
2869 CASE_FLT_FN (BUILT_IN_LROUND):
2870 CASE_FLT_FN (BUILT_IN_LLROUND):
2871 builtin_optab = lround_optab;
2872 break;
2873
2874 default:
2875 gcc_unreachable ();
2876 }
2877
2878 /* There's no easy way to detect the case we need to set EDOM. */
2879 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2880 return NULL_RTX;
2881
2882 /* Make a suitable register to place result in. */
2883 mode = TYPE_MODE (TREE_TYPE (exp));
2884
2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (!flag_errno_math)
2887 {
2888 rtx result = gen_reg_rtx (mode);
2889
2890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2891 need to expand the argument again. This way, we will not perform
2892 side-effects more the once. */
2893 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2894
2895 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2896
2897 start_sequence ();
2898
2899 if (expand_sfix_optab (result, op0, builtin_optab))
2900 {
2901 /* Output the entire sequence. */
2902 insns = get_insns ();
2903 end_sequence ();
2904 emit_insn (insns);
2905 return result;
2906 }
2907
2908 /* If we were unable to expand via the builtin, stop the sequence
2909 (without outputting the insns) and call to the library function
2910 with the stabilized argument list. */
2911 end_sequence ();
2912 }
2913
2914 if (fallback_fn != BUILT_IN_NONE)
2915 {
2916 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2917 targets, (int) round (x) should never be transformed into
2918 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2919 a call to lround in the hope that the target provides at least some
2920 C99 functions. This should result in the best user experience for
2921 not full C99 targets. */
2922 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2923 fallback_fn, 0);
2924
2925 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2926 fallback_fndecl, 1, arg);
2927
2928 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2929 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2930 return convert_to_mode (mode, target, 0);
2931 }
2932
2933 return expand_call (exp, target, target == const0_rtx);
2934 }
2935
2936 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2937 a normal call should be emitted rather than expanding the function
2938 in-line. EXP is the expression that is a call to the builtin
2939 function; if convenient, the result should be placed in TARGET. */
2940
2941 static rtx
2942 expand_builtin_powi (tree exp, rtx target)
2943 {
2944 tree arg0, arg1;
2945 rtx op0, op1;
2946 machine_mode mode;
2947 machine_mode mode2;
2948
2949 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2950 return NULL_RTX;
2951
2952 arg0 = CALL_EXPR_ARG (exp, 0);
2953 arg1 = CALL_EXPR_ARG (exp, 1);
2954 mode = TYPE_MODE (TREE_TYPE (exp));
2955
2956 /* Emit a libcall to libgcc. */
2957
2958 /* Mode of the 2nd argument must match that of an int. */
2959 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2960
2961 if (target == NULL_RTX)
2962 target = gen_reg_rtx (mode);
2963
2964 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2965 if (GET_MODE (op0) != mode)
2966 op0 = convert_to_mode (mode, op0, 0);
2967 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2968 if (GET_MODE (op1) != mode2)
2969 op1 = convert_to_mode (mode2, op1, 0);
2970
2971 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2972 target, LCT_CONST, mode, 2,
2973 op0, mode, op1, mode2);
2974
2975 return target;
2976 }
2977
2978 /* Expand expression EXP which is a call to the strlen builtin. Return
2979 NULL_RTX if we failed the caller should emit a normal call, otherwise
2980 try to get the result in TARGET, if convenient. */
2981
2982 static rtx
2983 expand_builtin_strlen (tree exp, rtx target,
2984 machine_mode target_mode)
2985 {
2986 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2987 return NULL_RTX;
2988 else
2989 {
2990 struct expand_operand ops[4];
2991 rtx pat;
2992 tree len;
2993 tree src = CALL_EXPR_ARG (exp, 0);
2994 rtx src_reg;
2995 rtx_insn *before_strlen;
2996 machine_mode insn_mode = target_mode;
2997 enum insn_code icode = CODE_FOR_nothing;
2998 unsigned int align;
2999
3000 /* If the length can be computed at compile-time, return it. */
3001 len = c_strlen (src, 0);
3002 if (len)
3003 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3004
3005 /* If the length can be computed at compile-time and is constant
3006 integer, but there are side-effects in src, evaluate
3007 src for side-effects, then return len.
3008 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3009 can be optimized into: i++; x = 3; */
3010 len = c_strlen (src, 1);
3011 if (len && TREE_CODE (len) == INTEGER_CST)
3012 {
3013 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 }
3016
3017 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3018
3019 /* If SRC is not a pointer type, don't do this operation inline. */
3020 if (align == 0)
3021 return NULL_RTX;
3022
3023 /* Bail out if we can't compute strlen in the right mode. */
3024 while (insn_mode != VOIDmode)
3025 {
3026 icode = optab_handler (strlen_optab, insn_mode);
3027 if (icode != CODE_FOR_nothing)
3028 break;
3029
3030 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3031 }
3032 if (insn_mode == VOIDmode)
3033 return NULL_RTX;
3034
3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg = gen_reg_rtx (Pmode);
3039
3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
3042 before_strlen = get_last_insn ();
3043
3044 create_output_operand (&ops[0], target, insn_mode);
3045 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3046 create_integer_operand (&ops[2], 0);
3047 create_integer_operand (&ops[3], align);
3048 if (!maybe_expand_insn (icode, 4, ops))
3049 return NULL_RTX;
3050
3051 /* Now that we are assured of success, expand the source. */
3052 start_sequence ();
3053 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3054 if (pat != src_reg)
3055 {
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (pat) != Pmode)
3058 pat = convert_to_mode (Pmode, pat,
3059 POINTERS_EXTEND_UNSIGNED);
3060 #endif
3061 emit_move_insn (src_reg, pat);
3062 }
3063 pat = get_insns ();
3064 end_sequence ();
3065
3066 if (before_strlen)
3067 emit_insn_after (pat, before_strlen);
3068 else
3069 emit_insn_before (pat, get_insns ());
3070
3071 /* Return the value in the proper mode for this function. */
3072 if (GET_MODE (ops[0].value) == target_mode)
3073 target = ops[0].value;
3074 else if (target != 0)
3075 convert_move (target, ops[0].value, 0);
3076 else
3077 target = convert_to_mode (target_mode, ops[0].value, 0);
3078
3079 return target;
3080 }
3081 }
3082
3083 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3084 bytes from constant string DATA + OFFSET and return it as target
3085 constant. */
3086
3087 static rtx
3088 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3089 machine_mode mode)
3090 {
3091 const char *str = (const char *) data;
3092
3093 gcc_assert (offset >= 0
3094 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3095 <= strlen (str) + 1));
3096
3097 return c_readstr (str + offset, mode);
3098 }
3099
3100 /* LEN specify length of the block of memcpy/memset operation.
3101 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3102 In some cases we can make very likely guess on max size, then we
3103 set it into PROBABLE_MAX_SIZE. */
3104
3105 static void
3106 determine_block_size (tree len, rtx len_rtx,
3107 unsigned HOST_WIDE_INT *min_size,
3108 unsigned HOST_WIDE_INT *max_size,
3109 unsigned HOST_WIDE_INT *probable_max_size)
3110 {
3111 if (CONST_INT_P (len_rtx))
3112 {
3113 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3114 return;
3115 }
3116 else
3117 {
3118 wide_int min, max;
3119 enum value_range_type range_type = VR_UNDEFINED;
3120
3121 /* Determine bounds from the type. */
3122 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3123 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3124 else
3125 *min_size = 0;
3126 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3127 *probable_max_size = *max_size
3128 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3129 else
3130 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3131
3132 if (TREE_CODE (len) == SSA_NAME)
3133 range_type = get_range_info (len, &min, &max);
3134 if (range_type == VR_RANGE)
3135 {
3136 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3137 *min_size = min.to_uhwi ();
3138 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3139 *probable_max_size = *max_size = max.to_uhwi ();
3140 }
3141 else if (range_type == VR_ANTI_RANGE)
3142 {
3143 /* Anti range 0...N lets us to determine minimal size to N+1. */
3144 if (min == 0)
3145 {
3146 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3147 *min_size = max.to_uhwi () + 1;
3148 }
3149 /* Code like
3150
3151 int n;
3152 if (n < 100)
3153 memcpy (a, b, n)
3154
3155 Produce anti range allowing negative values of N. We still
3156 can use the information and make a guess that N is not negative.
3157 */
3158 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3159 *probable_max_size = min.to_uhwi () - 1;
3160 }
3161 }
3162 gcc_checking_assert (*max_size <=
3163 (unsigned HOST_WIDE_INT)
3164 GET_MODE_MASK (GET_MODE (len_rtx)));
3165 }
3166
3167 /* Helper function to do the actual work for expand_builtin_memcpy. */
3168
3169 static rtx
3170 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3171 {
3172 const char *src_str;
3173 unsigned int src_align = get_pointer_alignment (src);
3174 unsigned int dest_align = get_pointer_alignment (dest);
3175 rtx dest_mem, src_mem, dest_addr, len_rtx;
3176 HOST_WIDE_INT expected_size = -1;
3177 unsigned int expected_align = 0;
3178 unsigned HOST_WIDE_INT min_size;
3179 unsigned HOST_WIDE_INT max_size;
3180 unsigned HOST_WIDE_INT probable_max_size;
3181
3182 /* If DEST is not a pointer type, call the normal function. */
3183 if (dest_align == 0)
3184 return NULL_RTX;
3185
3186 /* If either SRC is not a pointer type, don't do this
3187 operation in-line. */
3188 if (src_align == 0)
3189 return NULL_RTX;
3190
3191 if (currently_expanding_gimple_stmt)
3192 stringop_block_profile (currently_expanding_gimple_stmt,
3193 &expected_align, &expected_size);
3194
3195 if (expected_align < dest_align)
3196 expected_align = dest_align;
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 len_rtx = expand_normal (len);
3200 determine_block_size (len, len_rtx, &min_size, &max_size,
3201 &probable_max_size);
3202 src_str = c_getstr (src);
3203
3204 /* If SRC is a string constant and block move would be done
3205 by pieces, we can avoid loading the string from memory
3206 and only stored the computed constants. */
3207 if (src_str
3208 && CONST_INT_P (len_rtx)
3209 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3210 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3211 CONST_CAST (char *, src_str),
3212 dest_align, false))
3213 {
3214 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3215 builtin_memcpy_read_str,
3216 CONST_CAST (char *, src_str),
3217 dest_align, false, 0);
3218 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3219 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3220 return dest_mem;
3221 }
3222
3223 src_mem = get_memory_rtx (src, len);
3224 set_mem_align (src_mem, src_align);
3225
3226 /* Copy word part most expediently. */
3227 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3228 CALL_EXPR_TAILCALL (exp)
3229 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3230 expected_align, expected_size,
3231 min_size, max_size, probable_max_size);
3232
3233 if (dest_addr == 0)
3234 {
3235 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3236 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3237 }
3238
3239 return dest_addr;
3240 }
3241
3242 /* Expand a call EXP to the memcpy builtin.
3243 Return NULL_RTX if we failed, the caller should emit a normal call,
3244 otherwise try to get the result in TARGET, if convenient (and in
3245 mode MODE if that's convenient). */
3246
3247 static rtx
3248 expand_builtin_memcpy (tree exp, rtx target)
3249 {
3250 if (!validate_arglist (exp,
3251 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3252 return NULL_RTX;
3253 else
3254 {
3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 1);
3257 tree len = CALL_EXPR_ARG (exp, 2);
3258 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3259 }
3260 }
3261
3262 /* Expand an instrumented call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3266
3267 static rtx
3268 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3269 {
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 2);
3279 tree len = CALL_EXPR_ARG (exp, 4);
3280 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3281
3282 /* Return src bounds with the result. */
3283 if (res)
3284 {
3285 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3286 expand_normal (CALL_EXPR_ARG (exp, 1)));
3287 res = chkp_join_splitted_slot (res, bnd);
3288 }
3289 return res;
3290 }
3291 }
3292
3293 /* Expand a call EXP to the mempcpy builtin.
3294 Return NULL_RTX if we failed; the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). If ENDP is 0 return the
3297 destination pointer, if ENDP is 1 return the end pointer ala
3298 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3299 stpcpy. */
3300
3301 static rtx
3302 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3303 {
3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307 else
3308 {
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 1);
3311 tree len = CALL_EXPR_ARG (exp, 2);
3312 return expand_builtin_mempcpy_args (dest, src, len,
3313 target, mode, /*endp=*/ 1,
3314 exp);
3315 }
3316 }
3317
3318 /* Expand an instrumented call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed, the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). */
3322
3323 static rtx
3324 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3325 {
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3331 else
3332 {
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 2);
3335 tree len = CALL_EXPR_ARG (exp, 4);
3336 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3337 mode, 1, exp);
3338
3339 /* Return src bounds with the result. */
3340 if (res)
3341 {
3342 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3343 expand_normal (CALL_EXPR_ARG (exp, 1)));
3344 res = chkp_join_splitted_slot (res, bnd);
3345 }
3346 return res;
3347 }
3348 }
3349
3350 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
3353 The other arguments and return value are the same as for
3354 expand_builtin_mempcpy. */
3355
3356 static rtx
3357 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3358 rtx target, machine_mode mode, int endp,
3359 tree orig_exp)
3360 {
3361 tree fndecl = get_callee_fndecl (orig_exp);
3362
3363 /* If return value is ignored, transform mempcpy into memcpy. */
3364 if (target == const0_rtx
3365 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3366 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3367 {
3368 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3369 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3370 dest, src, len);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3372 }
3373 else if (target == const0_rtx
3374 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3375 {
3376 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3377 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3378 dest, src, len);
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3380 }
3381 else
3382 {
3383 const char *src_str;
3384 unsigned int src_align = get_pointer_alignment (src);
3385 unsigned int dest_align = get_pointer_alignment (dest);
3386 rtx dest_mem, src_mem, len_rtx;
3387
3388 /* If either SRC or DEST is not a pointer type, don't do this
3389 operation in-line. */
3390 if (dest_align == 0 || src_align == 0)
3391 return NULL_RTX;
3392
3393 /* If LEN is not constant, call the normal function. */
3394 if (! tree_fits_uhwi_p (len))
3395 return NULL_RTX;
3396
3397 len_rtx = expand_normal (len);
3398 src_str = c_getstr (src);
3399
3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
3404 && CONST_INT_P (len_rtx)
3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
3409 {
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3419 }
3420
3421 if (CONST_INT_P (len_rtx)
3422 && can_move_by_pieces (INTVAL (len_rtx),
3423 MIN (dest_align, src_align)))
3424 {
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 src_mem = get_memory_rtx (src, len);
3428 set_mem_align (src_mem, src_align);
3429 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3430 MIN (dest_align, src_align), endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3434 }
3435
3436 return NULL_RTX;
3437 }
3438 }
3439
3440 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3441 we failed, the caller should emit a normal call, otherwise try to
3442 get the result in TARGET, if convenient. If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3446
3447 static rtx
3448 expand_movstr (tree dest, tree src, rtx target, int endp)
3449 {
3450 struct expand_operand ops[3];
3451 rtx dest_mem;
3452 rtx src_mem;
3453
3454 if (!targetm.have_movstr ())
3455 return NULL_RTX;
3456
3457 dest_mem = get_memory_rtx (dest, NULL);
3458 src_mem = get_memory_rtx (src, NULL);
3459 if (!endp)
3460 {
3461 target = force_reg (Pmode, XEXP (dest_mem, 0));
3462 dest_mem = replace_equiv_address (dest_mem, target);
3463 }
3464
3465 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3466 create_fixed_operand (&ops[1], dest_mem);
3467 create_fixed_operand (&ops[2], src_mem);
3468 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3469 return NULL_RTX;
3470
3471 if (endp && target != const0_rtx)
3472 {
3473 target = ops[0].value;
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3476 adjust it. */
3477 if (endp == 1)
3478 {
3479 rtx tem = plus_constant (GET_MODE (target),
3480 gen_lowpart (GET_MODE (target), target), 1);
3481 emit_move_insn (target, force_operand (tem, NULL_RTX));
3482 }
3483 }
3484 return target;
3485 }
3486
3487 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3488 NULL_RTX if we failed the caller should emit a normal call, otherwise
3489 try to get the result in TARGET, if convenient (and in mode MODE if that's
3490 convenient). */
3491
3492 static rtx
3493 expand_builtin_strcpy (tree exp, rtx target)
3494 {
3495 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 {
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 1);
3499 return expand_builtin_strcpy_args (dest, src, target);
3500 }
3501 return NULL_RTX;
3502 }
3503
3504 /* Helper function to do the actual work for expand_builtin_strcpy. The
3505 arguments to the builtin_strcpy call DEST and SRC are broken out
3506 so that this can also be called without constructing an actual CALL_EXPR.
3507 The other arguments and return value are the same as for
3508 expand_builtin_strcpy. */
3509
3510 static rtx
3511 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3512 {
3513 return expand_movstr (dest, src, target, /*endp=*/0);
3514 }
3515
3516 /* Expand a call EXP to the stpcpy builtin.
3517 Return NULL_RTX if we failed the caller should emit a normal call,
3518 otherwise try to get the result in TARGET, if convenient (and in
3519 mode MODE if that's convenient). */
3520
3521 static rtx
3522 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3523 {
3524 tree dst, src;
3525 location_t loc = EXPR_LOCATION (exp);
3526
3527 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3529
3530 dst = CALL_EXPR_ARG (exp, 0);
3531 src = CALL_EXPR_ARG (exp, 1);
3532
3533 /* If return value is ignored, transform stpcpy into strcpy. */
3534 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3535 {
3536 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3537 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
3539 }
3540 else
3541 {
3542 tree len, lenp1;
3543 rtx ret;
3544
3545 /* Ensure we get an actual string whose length can be evaluated at
3546 compile-time, not an expression containing a string. This is
3547 because the latter will potentially produce pessimized code
3548 when used to produce the return value. */
3549 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3550 return expand_movstr (dst, src, target, /*endp=*/2);
3551
3552 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3553 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3554 target, mode, /*endp=*/2,
3555 exp);
3556
3557 if (ret)
3558 return ret;
3559
3560 if (TREE_CODE (len) == INTEGER_CST)
3561 {
3562 rtx len_rtx = expand_normal (len);
3563
3564 if (CONST_INT_P (len_rtx))
3565 {
3566 ret = expand_builtin_strcpy_args (dst, src, target);
3567
3568 if (ret)
3569 {
3570 if (! target)
3571 {
3572 if (mode != VOIDmode)
3573 target = gen_reg_rtx (mode);
3574 else
3575 target = gen_reg_rtx (GET_MODE (ret));
3576 }
3577 if (GET_MODE (target) != GET_MODE (ret))
3578 ret = gen_lowpart (GET_MODE (target), ret);
3579
3580 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3581 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3582 gcc_assert (ret);
3583
3584 return target;
3585 }
3586 }
3587 }
3588
3589 return expand_movstr (dst, src, target, /*endp=*/2);
3590 }
3591 }
3592
3593 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3594 bytes from constant string DATA + OFFSET and return it as target
3595 constant. */
3596
3597 rtx
3598 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3599 machine_mode mode)
3600 {
3601 const char *str = (const char *) data;
3602
3603 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return const0_rtx;
3605
3606 return c_readstr (str + offset, mode);
3607 }
3608
3609 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3610 NULL_RTX if we failed the caller should emit a normal call. */
3611
3612 static rtx
3613 expand_builtin_strncpy (tree exp, rtx target)
3614 {
3615 location_t loc = EXPR_LOCATION (exp);
3616
3617 if (validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 {
3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3623 tree slen = c_strlen (src, 1);
3624
3625 /* We must be passed a constant len and src parameter. */
3626 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3627 return NULL_RTX;
3628
3629 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3630
3631 /* We're required to pad with trailing zeros if the requested
3632 len is greater than strlen(s2)+1. In that case try to
3633 use store_by_pieces, if it fails, punt. */
3634 if (tree_int_cst_lt (slen, len))
3635 {
3636 unsigned int dest_align = get_pointer_alignment (dest);
3637 const char *p = c_getstr (src);
3638 rtx dest_mem;
3639
3640 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3641 || !can_store_by_pieces (tree_to_uhwi (len),
3642 builtin_strncpy_read_str,
3643 CONST_CAST (char *, p),
3644 dest_align, false))
3645 return NULL_RTX;
3646
3647 dest_mem = get_memory_rtx (dest, len);
3648 store_by_pieces (dest_mem, tree_to_uhwi (len),
3649 builtin_strncpy_read_str,
3650 CONST_CAST (char *, p), dest_align, false, 0);
3651 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3652 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3653 return dest_mem;
3654 }
3655 }
3656 return NULL_RTX;
3657 }
3658
3659 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3660 bytes from constant string DATA + OFFSET and return it as target
3661 constant. */
3662
3663 rtx
3664 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3665 machine_mode mode)
3666 {
3667 const char *c = (const char *) data;
3668 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3669
3670 memset (p, *c, GET_MODE_SIZE (mode));
3671
3672 return c_readstr (p, mode);
3673 }
3674
3675 /* Callback routine for store_by_pieces. Return the RTL of a register
3676 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3677 char value given in the RTL register data. For example, if mode is
3678 4 bytes wide, return the RTL for 0x01010101*data. */
3679
3680 static rtx
3681 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3682 machine_mode mode)
3683 {
3684 rtx target, coeff;
3685 size_t size;
3686 char *p;
3687
3688 size = GET_MODE_SIZE (mode);
3689 if (size == 1)
3690 return (rtx) data;
3691
3692 p = XALLOCAVEC (char, size);
3693 memset (p, 1, size);
3694 coeff = c_readstr (p, mode);
3695
3696 target = convert_to_mode (mode, (rtx) data, 1);
3697 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3698 return force_reg (mode, target);
3699 }
3700
3701 /* Expand expression EXP, which is a call to the memset builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
3704 convenient). */
3705
3706 static rtx
3707 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3708 {
3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712 else
3713 {
3714 tree dest = CALL_EXPR_ARG (exp, 0);
3715 tree val = CALL_EXPR_ARG (exp, 1);
3716 tree len = CALL_EXPR_ARG (exp, 2);
3717 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3718 }
3719 }
3720
3721 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3722 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3725
3726 static rtx
3727 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3728 {
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3731 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 return NULL_RTX;
3733 else
3734 {
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree val = CALL_EXPR_ARG (exp, 2);
3737 tree len = CALL_EXPR_ARG (exp, 3);
3738 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3739
3740 /* Return src bounds with the result. */
3741 if (res)
3742 {
3743 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3744 expand_normal (CALL_EXPR_ARG (exp, 1)));
3745 res = chkp_join_splitted_slot (res, bnd);
3746 }
3747 return res;
3748 }
3749 }
3750
3751 /* Helper function to do the actual work for expand_builtin_memset. The
3752 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3753 so that this can also be called without constructing an actual CALL_EXPR.
3754 The other arguments and return value are the same as for
3755 expand_builtin_memset. */
3756
3757 static rtx
3758 expand_builtin_memset_args (tree dest, tree val, tree len,
3759 rtx target, machine_mode mode, tree orig_exp)
3760 {
3761 tree fndecl, fn;
3762 enum built_in_function fcode;
3763 machine_mode val_mode;
3764 char c;
3765 unsigned int dest_align;
3766 rtx dest_mem, dest_addr, len_rtx;
3767 HOST_WIDE_INT expected_size = -1;
3768 unsigned int expected_align = 0;
3769 unsigned HOST_WIDE_INT min_size;
3770 unsigned HOST_WIDE_INT max_size;
3771 unsigned HOST_WIDE_INT probable_max_size;
3772
3773 dest_align = get_pointer_alignment (dest);
3774
3775 /* If DEST is not a pointer type, don't do this operation in-line. */
3776 if (dest_align == 0)
3777 return NULL_RTX;
3778
3779 if (currently_expanding_gimple_stmt)
3780 stringop_block_profile (currently_expanding_gimple_stmt,
3781 &expected_align, &expected_size);
3782
3783 if (expected_align < dest_align)
3784 expected_align = dest_align;
3785
3786 /* If the LEN parameter is zero, return DEST. */
3787 if (integer_zerop (len))
3788 {
3789 /* Evaluate and ignore VAL in case it has side-effects. */
3790 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3791 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3792 }
3793
3794 /* Stabilize the arguments in case we fail. */
3795 dest = builtin_save_expr (dest);
3796 val = builtin_save_expr (val);
3797 len = builtin_save_expr (len);
3798
3799 len_rtx = expand_normal (len);
3800 determine_block_size (len, len_rtx, &min_size, &max_size,
3801 &probable_max_size);
3802 dest_mem = get_memory_rtx (dest, len);
3803 val_mode = TYPE_MODE (unsigned_char_type_node);
3804
3805 if (TREE_CODE (val) != INTEGER_CST)
3806 {
3807 rtx val_rtx;
3808
3809 val_rtx = expand_normal (val);
3810 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3811
3812 /* Assume that we can memset by pieces if we can store
3813 * the coefficients by pieces (in the required modes).
3814 * We can't pass builtin_memset_gen_str as that emits RTL. */
3815 c = 1;
3816 if (tree_fits_uhwi_p (len)
3817 && can_store_by_pieces (tree_to_uhwi (len),
3818 builtin_memset_read_str, &c, dest_align,
3819 true))
3820 {
3821 val_rtx = force_reg (val_mode, val_rtx);
3822 store_by_pieces (dest_mem, tree_to_uhwi (len),
3823 builtin_memset_gen_str, val_rtx, dest_align,
3824 true, 0);
3825 }
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3827 dest_align, expected_align,
3828 expected_size, min_size, max_size,
3829 probable_max_size))
3830 goto do_libcall;
3831
3832 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3833 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3834 return dest_mem;
3835 }
3836
3837 if (target_char_cast (val, &c))
3838 goto do_libcall;
3839
3840 if (c)
3841 {
3842 if (tree_fits_uhwi_p (len)
3843 && can_store_by_pieces (tree_to_uhwi (len),
3844 builtin_memset_read_str, &c, dest_align,
3845 true))
3846 store_by_pieces (dest_mem, tree_to_uhwi (len),
3847 builtin_memset_read_str, &c, dest_align, true, 0);
3848 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3849 gen_int_mode (c, val_mode),
3850 dest_align, expected_align,
3851 expected_size, min_size, max_size,
3852 probable_max_size))
3853 goto do_libcall;
3854
3855 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3857 return dest_mem;
3858 }
3859
3860 set_mem_align (dest_mem, dest_align);
3861 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3862 CALL_EXPR_TAILCALL (orig_exp)
3863 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3864 expected_align, expected_size,
3865 min_size, max_size,
3866 probable_max_size);
3867
3868 if (dest_addr == 0)
3869 {
3870 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3872 }
3873
3874 return dest_addr;
3875
3876 do_libcall:
3877 fndecl = get_callee_fndecl (orig_exp);
3878 fcode = DECL_FUNCTION_CODE (fndecl);
3879 if (fcode == BUILT_IN_MEMSET
3880 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3881 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3882 dest, val, len);
3883 else if (fcode == BUILT_IN_BZERO)
3884 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3885 dest, len);
3886 else
3887 gcc_unreachable ();
3888 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3889 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3890 return expand_call (fn, target, target == const0_rtx);
3891 }
3892
3893 /* Expand expression EXP, which is a call to the bzero builtin. Return
3894 NULL_RTX if we failed the caller should emit a normal call. */
3895
3896 static rtx
3897 expand_builtin_bzero (tree exp)
3898 {
3899 tree dest, size;
3900 location_t loc = EXPR_LOCATION (exp);
3901
3902 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3903 return NULL_RTX;
3904
3905 dest = CALL_EXPR_ARG (exp, 0);
3906 size = CALL_EXPR_ARG (exp, 1);
3907
3908 /* New argument list transforming bzero(ptr x, int y) to
3909 memset(ptr x, int 0, size_t y). This is done this way
3910 so that if it isn't expanded inline, we fallback to
3911 calling bzero instead of memset. */
3912
3913 return expand_builtin_memset_args (dest, integer_zero_node,
3914 fold_convert_loc (loc,
3915 size_type_node, size),
3916 const0_rtx, VOIDmode, exp);
3917 }
3918
3919 /* Try to expand cmpstr operation ICODE with the given operands.
3920 Return the result rtx on success, otherwise return null. */
3921
3922 static rtx
3923 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3924 HOST_WIDE_INT align)
3925 {
3926 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3927
3928 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3929 target = NULL_RTX;
3930
3931 struct expand_operand ops[4];
3932 create_output_operand (&ops[0], target, insn_mode);
3933 create_fixed_operand (&ops[1], arg1_rtx);
3934 create_fixed_operand (&ops[2], arg2_rtx);
3935 create_integer_operand (&ops[3], align);
3936 if (maybe_expand_insn (icode, 4, ops))
3937 return ops[0].value;
3938 return NULL_RTX;
3939 }
3940
3941 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3942 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3943 otherwise return null. */
3944
3945 static rtx
3946 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3947 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3948 HOST_WIDE_INT align)
3949 {
3950 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3951
3952 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3953 target = NULL_RTX;
3954
3955 struct expand_operand ops[5];
3956 create_output_operand (&ops[0], target, insn_mode);
3957 create_fixed_operand (&ops[1], arg1_rtx);
3958 create_fixed_operand (&ops[2], arg2_rtx);
3959 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3960 TYPE_UNSIGNED (arg3_type));
3961 create_integer_operand (&ops[4], align);
3962 if (maybe_expand_insn (icode, 5, ops))
3963 return ops[0].value;
3964 return NULL_RTX;
3965 }
3966
3967 /* Expand expression EXP, which is a call to the memcmp built-in function.
3968 Return NULL_RTX if we failed and the caller should emit a normal call,
3969 otherwise try to get the result in TARGET, if convenient. */
3970
3971 static rtx
3972 expand_builtin_memcmp (tree exp, rtx target)
3973 {
3974 if (!validate_arglist (exp,
3975 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3976 return NULL_RTX;
3977
3978 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3979 implementing memcmp because it will stop if it encounters two
3980 zero bytes. */
3981 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3982 if (icode == CODE_FOR_nothing)
3983 return NULL_RTX;
3984
3985 tree arg1 = CALL_EXPR_ARG (exp, 0);
3986 tree arg2 = CALL_EXPR_ARG (exp, 1);
3987 tree len = CALL_EXPR_ARG (exp, 2);
3988
3989 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3990 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3991
3992 /* If we don't have POINTER_TYPE, call the function. */
3993 if (arg1_align == 0 || arg2_align == 0)
3994 return NULL_RTX;
3995
3996 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3997 location_t loc = EXPR_LOCATION (exp);
3998 rtx arg1_rtx = get_memory_rtx (arg1, len);
3999 rtx arg2_rtx = get_memory_rtx (arg2, len);
4000 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4001
4002 /* Set MEM_SIZE as appropriate. */
4003 if (CONST_INT_P (arg3_rtx))
4004 {
4005 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4006 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4007 }
4008
4009 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4010 TREE_TYPE (len), arg3_rtx,
4011 MIN (arg1_align, arg2_align));
4012 if (result)
4013 {
4014 /* Return the value in the proper mode for this function. */
4015 if (GET_MODE (result) == mode)
4016 return result;
4017
4018 if (target != 0)
4019 {
4020 convert_move (target, result, 0);
4021 return target;
4022 }
4023
4024 return convert_to_mode (mode, result, 0);
4025 }
4026
4027 result = target;
4028 if (! (result != 0
4029 && REG_P (result) && GET_MODE (result) == mode
4030 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4031 result = gen_reg_rtx (mode);
4032
4033 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4034 TYPE_MODE (integer_type_node), 3,
4035 XEXP (arg1_rtx, 0), Pmode,
4036 XEXP (arg2_rtx, 0), Pmode,
4037 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4038 TYPE_UNSIGNED (sizetype)),
4039 TYPE_MODE (sizetype));
4040 return result;
4041 }
4042
4043 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4044 if we failed the caller should emit a normal call, otherwise try to get
4045 the result in TARGET, if convenient. */
4046
4047 static rtx
4048 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4049 {
4050 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052
4053 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4054 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4055 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4056 {
4057 rtx arg1_rtx, arg2_rtx;
4058 tree fndecl, fn;
4059 tree arg1 = CALL_EXPR_ARG (exp, 0);
4060 tree arg2 = CALL_EXPR_ARG (exp, 1);
4061 rtx result = NULL_RTX;
4062
4063 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4064 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4065
4066 /* If we don't have POINTER_TYPE, call the function. */
4067 if (arg1_align == 0 || arg2_align == 0)
4068 return NULL_RTX;
4069
4070 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
4073
4074 arg1_rtx = get_memory_rtx (arg1, NULL);
4075 arg2_rtx = get_memory_rtx (arg2, NULL);
4076
4077 /* Try to call cmpstrsi. */
4078 if (cmpstr_icode != CODE_FOR_nothing)
4079 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4080 MIN (arg1_align, arg2_align));
4081
4082 /* Try to determine at least one length and call cmpstrnsi. */
4083 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4084 {
4085 tree len;
4086 rtx arg3_rtx;
4087
4088 tree len1 = c_strlen (arg1, 1);
4089 tree len2 = c_strlen (arg2, 1);
4090
4091 if (len1)
4092 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4093 if (len2)
4094 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4095
4096 /* If we don't have a constant length for the first, use the length
4097 of the second, if we know it. We don't require a constant for
4098 this case; some cost analysis could be done if both are available
4099 but neither is constant. For now, assume they're equally cheap,
4100 unless one has side effects. If both strings have constant lengths,
4101 use the smaller. */
4102
4103 if (!len1)
4104 len = len2;
4105 else if (!len2)
4106 len = len1;
4107 else if (TREE_SIDE_EFFECTS (len1))
4108 len = len2;
4109 else if (TREE_SIDE_EFFECTS (len2))
4110 len = len1;
4111 else if (TREE_CODE (len1) != INTEGER_CST)
4112 len = len2;
4113 else if (TREE_CODE (len2) != INTEGER_CST)
4114 len = len1;
4115 else if (tree_int_cst_lt (len1, len2))
4116 len = len1;
4117 else
4118 len = len2;
4119
4120 /* If both arguments have side effects, we cannot optimize. */
4121 if (len && !TREE_SIDE_EFFECTS (len))
4122 {
4123 arg3_rtx = expand_normal (len);
4124 result = expand_cmpstrn_or_cmpmem
4125 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4126 arg3_rtx, MIN (arg1_align, arg2_align));
4127 }
4128 }
4129
4130 if (result)
4131 {
4132 /* Return the value in the proper mode for this function. */
4133 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4134 if (GET_MODE (result) == mode)
4135 return result;
4136 if (target == 0)
4137 return convert_to_mode (mode, result, 0);
4138 convert_move (target, result, 0);
4139 return target;
4140 }
4141
4142 /* Expand the library call ourselves using a stabilized argument
4143 list to avoid re-evaluating the function's arguments twice. */
4144 fndecl = get_callee_fndecl (exp);
4145 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4146 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4147 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4148 return expand_call (fn, target, target == const0_rtx);
4149 }
4150 return NULL_RTX;
4151 }
4152
4153 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4154 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4155 the result in TARGET, if convenient. */
4156
4157 static rtx
4158 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4159 ATTRIBUTE_UNUSED machine_mode mode)
4160 {
4161 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4162
4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4165 return NULL_RTX;
4166
4167 /* If c_strlen can determine an expression for one of the string
4168 lengths, and it doesn't have side effects, then emit cmpstrnsi
4169 using length MIN(strlen(string)+1, arg3). */
4170 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4171 if (cmpstrn_icode != CODE_FOR_nothing)
4172 {
4173 tree len, len1, len2;
4174 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4175 rtx result;
4176 tree fndecl, fn;
4177 tree arg1 = CALL_EXPR_ARG (exp, 0);
4178 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 tree arg3 = CALL_EXPR_ARG (exp, 2);
4180
4181 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4182 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4183
4184 len1 = c_strlen (arg1, 1);
4185 len2 = c_strlen (arg2, 1);
4186
4187 if (len1)
4188 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4189 if (len2)
4190 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4191
4192 /* If we don't have a constant length for the first, use the length
4193 of the second, if we know it. We don't require a constant for
4194 this case; some cost analysis could be done if both are available
4195 but neither is constant. For now, assume they're equally cheap,
4196 unless one has side effects. If both strings have constant lengths,
4197 use the smaller. */
4198
4199 if (!len1)
4200 len = len2;
4201 else if (!len2)
4202 len = len1;
4203 else if (TREE_SIDE_EFFECTS (len1))
4204 len = len2;
4205 else if (TREE_SIDE_EFFECTS (len2))
4206 len = len1;
4207 else if (TREE_CODE (len1) != INTEGER_CST)
4208 len = len2;
4209 else if (TREE_CODE (len2) != INTEGER_CST)
4210 len = len1;
4211 else if (tree_int_cst_lt (len1, len2))
4212 len = len1;
4213 else
4214 len = len2;
4215
4216 /* If both arguments have side effects, we cannot optimize. */
4217 if (!len || TREE_SIDE_EFFECTS (len))
4218 return NULL_RTX;
4219
4220 /* The actual new length parameter is MIN(len,arg3). */
4221 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4222 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4223
4224 /* If we don't have POINTER_TYPE, call the function. */
4225 if (arg1_align == 0 || arg2_align == 0)
4226 return NULL_RTX;
4227
4228 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4229 arg1 = builtin_save_expr (arg1);
4230 arg2 = builtin_save_expr (arg2);
4231 len = builtin_save_expr (len);
4232
4233 arg1_rtx = get_memory_rtx (arg1, len);
4234 arg2_rtx = get_memory_rtx (arg2, len);
4235 arg3_rtx = expand_normal (len);
4236 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4237 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4238 MIN (arg1_align, arg2_align));
4239 if (result)
4240 {
4241 /* Return the value in the proper mode for this function. */
4242 mode = TYPE_MODE (TREE_TYPE (exp));
4243 if (GET_MODE (result) == mode)
4244 return result;
4245 if (target == 0)
4246 return convert_to_mode (mode, result, 0);
4247 convert_move (target, result, 0);
4248 return target;
4249 }
4250
4251 /* Expand the library call ourselves using a stabilized argument
4252 list to avoid re-evaluating the function's arguments twice. */
4253 fndecl = get_callee_fndecl (exp);
4254 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4255 arg1, arg2, len);
4256 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4257 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4258 return expand_call (fn, target, target == const0_rtx);
4259 }
4260 return NULL_RTX;
4261 }
4262
4263 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4264 if that's convenient. */
4265
4266 rtx
4267 expand_builtin_saveregs (void)
4268 {
4269 rtx val;
4270 rtx_insn *seq;
4271
4272 /* Don't do __builtin_saveregs more than once in a function.
4273 Save the result of the first call and reuse it. */
4274 if (saveregs_value != 0)
4275 return saveregs_value;
4276
4277 /* When this function is called, it means that registers must be
4278 saved on entry to this function. So we migrate the call to the
4279 first insn of this function. */
4280
4281 start_sequence ();
4282
4283 /* Do whatever the machine needs done in this case. */
4284 val = targetm.calls.expand_builtin_saveregs ();
4285
4286 seq = get_insns ();
4287 end_sequence ();
4288
4289 saveregs_value = val;
4290
4291 /* Put the insns after the NOTE that starts the function. If this
4292 is inside a start_sequence, make the outer-level insn chain current, so
4293 the code is placed at the start of the function. */
4294 push_topmost_sequence ();
4295 emit_insn_after (seq, entry_of_function ());
4296 pop_topmost_sequence ();
4297
4298 return val;
4299 }
4300
4301 /* Expand a call to __builtin_next_arg. */
4302
4303 static rtx
4304 expand_builtin_next_arg (void)
4305 {
4306 /* Checking arguments is already done in fold_builtin_next_arg
4307 that must be called before this function. */
4308 return expand_binop (ptr_mode, add_optab,
4309 crtl->args.internal_arg_pointer,
4310 crtl->args.arg_offset_rtx,
4311 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4312 }
4313
4314 /* Make it easier for the backends by protecting the valist argument
4315 from multiple evaluations. */
4316
4317 static tree
4318 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4319 {
4320 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4321
4322 /* The current way of determining the type of valist is completely
4323 bogus. We should have the information on the va builtin instead. */
4324 if (!vatype)
4325 vatype = targetm.fn_abi_va_list (cfun->decl);
4326
4327 if (TREE_CODE (vatype) == ARRAY_TYPE)
4328 {
4329 if (TREE_SIDE_EFFECTS (valist))
4330 valist = save_expr (valist);
4331
4332 /* For this case, the backends will be expecting a pointer to
4333 vatype, but it's possible we've actually been given an array
4334 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4335 So fix it. */
4336 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4337 {
4338 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4339 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4340 }
4341 }
4342 else
4343 {
4344 tree pt = build_pointer_type (vatype);
4345
4346 if (! needs_lvalue)
4347 {
4348 if (! TREE_SIDE_EFFECTS (valist))
4349 return valist;
4350
4351 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4352 TREE_SIDE_EFFECTS (valist) = 1;
4353 }
4354
4355 if (TREE_SIDE_EFFECTS (valist))
4356 valist = save_expr (valist);
4357 valist = fold_build2_loc (loc, MEM_REF,
4358 vatype, valist, build_int_cst (pt, 0));
4359 }
4360
4361 return valist;
4362 }
4363
4364 /* The "standard" definition of va_list is void*. */
4365
4366 tree
4367 std_build_builtin_va_list (void)
4368 {
4369 return ptr_type_node;
4370 }
4371
4372 /* The "standard" abi va_list is va_list_type_node. */
4373
4374 tree
4375 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4376 {
4377 return va_list_type_node;
4378 }
4379
4380 /* The "standard" type of va_list is va_list_type_node. */
4381
4382 tree
4383 std_canonical_va_list_type (tree type)
4384 {
4385 tree wtype, htype;
4386
4387 if (INDIRECT_REF_P (type))
4388 type = TREE_TYPE (type);
4389 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4390 type = TREE_TYPE (type);
4391 wtype = va_list_type_node;
4392 htype = type;
4393 /* Treat structure va_list types. */
4394 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4395 htype = TREE_TYPE (htype);
4396 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4397 {
4398 /* If va_list is an array type, the argument may have decayed
4399 to a pointer type, e.g. by being passed to another function.
4400 In that case, unwrap both types so that we can compare the
4401 underlying records. */
4402 if (TREE_CODE (htype) == ARRAY_TYPE
4403 || POINTER_TYPE_P (htype))
4404 {
4405 wtype = TREE_TYPE (wtype);
4406 htype = TREE_TYPE (htype);
4407 }
4408 }
4409 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4410 return va_list_type_node;
4411
4412 return NULL_TREE;
4413 }
4414
4415 /* The "standard" implementation of va_start: just assign `nextarg' to
4416 the variable. */
4417
4418 void
4419 std_expand_builtin_va_start (tree valist, rtx nextarg)
4420 {
4421 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4422 convert_move (va_r, nextarg, 0);
4423
4424 /* We do not have any valid bounds for the pointer, so
4425 just store zero bounds for it. */
4426 if (chkp_function_instrumented_p (current_function_decl))
4427 chkp_expand_bounds_reset_for_mem (valist,
4428 make_tree (TREE_TYPE (valist),
4429 nextarg));
4430 }
4431
4432 /* Expand EXP, a call to __builtin_va_start. */
4433
4434 static rtx
4435 expand_builtin_va_start (tree exp)
4436 {
4437 rtx nextarg;
4438 tree valist;
4439 location_t loc = EXPR_LOCATION (exp);
4440
4441 if (call_expr_nargs (exp) < 2)
4442 {
4443 error_at (loc, "too few arguments to function %<va_start%>");
4444 return const0_rtx;
4445 }
4446
4447 if (fold_builtin_next_arg (exp, true))
4448 return const0_rtx;
4449
4450 nextarg = expand_builtin_next_arg ();
4451 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4452
4453 if (targetm.expand_builtin_va_start)
4454 targetm.expand_builtin_va_start (valist, nextarg);
4455 else
4456 std_expand_builtin_va_start (valist, nextarg);
4457
4458 return const0_rtx;
4459 }
4460
4461 /* Expand EXP, a call to __builtin_va_end. */
4462
4463 static rtx
4464 expand_builtin_va_end (tree exp)
4465 {
4466 tree valist = CALL_EXPR_ARG (exp, 0);
4467
4468 /* Evaluate for side effects, if needed. I hate macros that don't
4469 do that. */
4470 if (TREE_SIDE_EFFECTS (valist))
4471 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4472
4473 return const0_rtx;
4474 }
4475
4476 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4477 builtin rather than just as an assignment in stdarg.h because of the
4478 nastiness of array-type va_list types. */
4479
4480 static rtx
4481 expand_builtin_va_copy (tree exp)
4482 {
4483 tree dst, src, t;
4484 location_t loc = EXPR_LOCATION (exp);
4485
4486 dst = CALL_EXPR_ARG (exp, 0);
4487 src = CALL_EXPR_ARG (exp, 1);
4488
4489 dst = stabilize_va_list_loc (loc, dst, 1);
4490 src = stabilize_va_list_loc (loc, src, 0);
4491
4492 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4493
4494 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4495 {
4496 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4497 TREE_SIDE_EFFECTS (t) = 1;
4498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499 }
4500 else
4501 {
4502 rtx dstb, srcb, size;
4503
4504 /* Evaluate to pointers. */
4505 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4506 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4507 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4508 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4509
4510 dstb = convert_memory_address (Pmode, dstb);
4511 srcb = convert_memory_address (Pmode, srcb);
4512
4513 /* "Dereference" to BLKmode memories. */
4514 dstb = gen_rtx_MEM (BLKmode, dstb);
4515 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4516 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4517 srcb = gen_rtx_MEM (BLKmode, srcb);
4518 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4519 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4520
4521 /* Copy. */
4522 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4523 }
4524
4525 return const0_rtx;
4526 }
4527
4528 /* Expand a call to one of the builtin functions __builtin_frame_address or
4529 __builtin_return_address. */
4530
4531 static rtx
4532 expand_builtin_frame_address (tree fndecl, tree exp)
4533 {
4534 /* The argument must be a nonnegative integer constant.
4535 It counts the number of frames to scan up the stack.
4536 The value is either the frame pointer value or the return
4537 address saved in that frame. */
4538 if (call_expr_nargs (exp) == 0)
4539 /* Warning about missing arg was already issued. */
4540 return const0_rtx;
4541 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4542 {
4543 error ("invalid argument to %qD", fndecl);
4544 return const0_rtx;
4545 }
4546 else
4547 {
4548 /* Number of frames to scan up the stack. */
4549 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4550
4551 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4552
4553 /* Some ports cannot access arbitrary stack frames. */
4554 if (tem == NULL)
4555 {
4556 warning (0, "unsupported argument to %qD", fndecl);
4557 return const0_rtx;
4558 }
4559
4560 if (count)
4561 {
4562 /* Warn since no effort is made to ensure that any frame
4563 beyond the current one exists or can be safely reached. */
4564 warning (OPT_Wframe_address, "calling %qD with "
4565 "a nonzero argument is unsafe", fndecl);
4566 }
4567
4568 /* For __builtin_frame_address, return what we've got. */
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4570 return tem;
4571
4572 if (!REG_P (tem)
4573 && ! CONSTANT_P (tem))
4574 tem = copy_addr_to_reg (tem);
4575 return tem;
4576 }
4577 }
4578
4579 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4580 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4581 is the same as for allocate_dynamic_stack_space. */
4582
4583 static rtx
4584 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4585 {
4586 rtx op0;
4587 rtx result;
4588 bool valid_arglist;
4589 unsigned int align;
4590 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4591 == BUILT_IN_ALLOCA_WITH_ALIGN);
4592
4593 valid_arglist
4594 = (alloca_with_align
4595 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4596 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4597
4598 if (!valid_arglist)
4599 return NULL_RTX;
4600
4601 /* Compute the argument. */
4602 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4603
4604 /* Compute the alignment. */
4605 align = (alloca_with_align
4606 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4607 : BIGGEST_ALIGNMENT);
4608
4609 /* Allocate the desired space. */
4610 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4611 result = convert_memory_address (ptr_mode, result);
4612
4613 return result;
4614 }
4615
4616 /* Expand a call to bswap builtin in EXP.
4617 Return NULL_RTX if a normal call should be emitted rather than expanding the
4618 function in-line. If convenient, the result should be placed in TARGET.
4619 SUBTARGET may be used as the target for computing one of EXP's operands. */
4620
4621 static rtx
4622 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4623 rtx subtarget)
4624 {
4625 tree arg;
4626 rtx op0;
4627
4628 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
4630
4631 arg = CALL_EXPR_ARG (exp, 0);
4632 op0 = expand_expr (arg,
4633 subtarget && GET_MODE (subtarget) == target_mode
4634 ? subtarget : NULL_RTX,
4635 target_mode, EXPAND_NORMAL);
4636 if (GET_MODE (op0) != target_mode)
4637 op0 = convert_to_mode (target_mode, op0, 1);
4638
4639 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4640
4641 gcc_assert (target);
4642
4643 return convert_to_mode (target_mode, target, 1);
4644 }
4645
4646 /* Expand a call to a unary builtin in EXP.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding the
4648 function in-line. If convenient, the result should be placed in TARGET.
4649 SUBTARGET may be used as the target for computing one of EXP's operands. */
4650
4651 static rtx
4652 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4653 rtx subtarget, optab op_optab)
4654 {
4655 rtx op0;
4656
4657 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4659
4660 /* Compute the argument. */
4661 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4662 (subtarget
4663 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4664 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4665 VOIDmode, EXPAND_NORMAL);
4666 /* Compute op, into TARGET if possible.
4667 Set TARGET to wherever the result comes back. */
4668 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4669 op_optab, op0, target, op_optab != clrsb_optab);
4670 gcc_assert (target);
4671
4672 return convert_to_mode (target_mode, target, 0);
4673 }
4674
4675 /* Expand a call to __builtin_expect. We just return our argument
4676 as the builtin_expect semantic should've been already executed by
4677 tree branch prediction pass. */
4678
4679 static rtx
4680 expand_builtin_expect (tree exp, rtx target)
4681 {
4682 tree arg;
4683
4684 if (call_expr_nargs (exp) < 2)
4685 return const0_rtx;
4686 arg = CALL_EXPR_ARG (exp, 0);
4687
4688 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4689 /* When guessing was done, the hints should be already stripped away. */
4690 gcc_assert (!flag_guess_branch_prob
4691 || optimize == 0 || seen_error ());
4692 return target;
4693 }
4694
4695 /* Expand a call to __builtin_assume_aligned. We just return our first
4696 argument as the builtin_assume_aligned semantic should've been already
4697 executed by CCP. */
4698
4699 static rtx
4700 expand_builtin_assume_aligned (tree exp, rtx target)
4701 {
4702 if (call_expr_nargs (exp) < 2)
4703 return const0_rtx;
4704 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4705 EXPAND_NORMAL);
4706 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4707 && (call_expr_nargs (exp) < 3
4708 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4709 return target;
4710 }
4711
4712 void
4713 expand_builtin_trap (void)
4714 {
4715 if (targetm.have_trap ())
4716 {
4717 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4718 /* For trap insns when not accumulating outgoing args force
4719 REG_ARGS_SIZE note to prevent crossjumping of calls with
4720 different args sizes. */
4721 if (!ACCUMULATE_OUTGOING_ARGS)
4722 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4723 }
4724 else
4725 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4726 emit_barrier ();
4727 }
4728
4729 /* Expand a call to __builtin_unreachable. We do nothing except emit
4730 a barrier saying that control flow will not pass here.
4731
4732 It is the responsibility of the program being compiled to ensure
4733 that control flow does never reach __builtin_unreachable. */
4734 static void
4735 expand_builtin_unreachable (void)
4736 {
4737 emit_barrier ();
4738 }
4739
4740 /* Expand EXP, a call to fabs, fabsf or fabsl.
4741 Return NULL_RTX if a normal call should be emitted rather than expanding
4742 the function inline. If convenient, the result should be placed
4743 in TARGET. SUBTARGET may be used as the target for computing
4744 the operand. */
4745
4746 static rtx
4747 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4748 {
4749 machine_mode mode;
4750 tree arg;
4751 rtx op0;
4752
4753 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4754 return NULL_RTX;
4755
4756 arg = CALL_EXPR_ARG (exp, 0);
4757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4758 mode = TYPE_MODE (TREE_TYPE (arg));
4759 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4760 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4761 }
4762
4763 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4764 Return NULL is a normal call should be emitted rather than expanding the
4765 function inline. If convenient, the result should be placed in TARGET.
4766 SUBTARGET may be used as the target for computing the operand. */
4767
4768 static rtx
4769 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4770 {
4771 rtx op0, op1;
4772 tree arg;
4773
4774 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4775 return NULL_RTX;
4776
4777 arg = CALL_EXPR_ARG (exp, 0);
4778 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4779
4780 arg = CALL_EXPR_ARG (exp, 1);
4781 op1 = expand_normal (arg);
4782
4783 return expand_copysign (op0, op1, target);
4784 }
4785
4786 /* Expand a call to __builtin___clear_cache. */
4787
4788 static rtx
4789 expand_builtin___clear_cache (tree exp)
4790 {
4791 if (!targetm.code_for_clear_cache)
4792 {
4793 #ifdef CLEAR_INSN_CACHE
4794 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4795 does something. Just do the default expansion to a call to
4796 __clear_cache(). */
4797 return NULL_RTX;
4798 #else
4799 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4800 does nothing. There is no need to call it. Do nothing. */
4801 return const0_rtx;
4802 #endif /* CLEAR_INSN_CACHE */
4803 }
4804
4805 /* We have a "clear_cache" insn, and it will handle everything. */
4806 tree begin, end;
4807 rtx begin_rtx, end_rtx;
4808
4809 /* We must not expand to a library call. If we did, any
4810 fallback library function in libgcc that might contain a call to
4811 __builtin___clear_cache() would recurse infinitely. */
4812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4813 {
4814 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4815 return const0_rtx;
4816 }
4817
4818 if (targetm.have_clear_cache ())
4819 {
4820 struct expand_operand ops[2];
4821
4822 begin = CALL_EXPR_ARG (exp, 0);
4823 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4824
4825 end = CALL_EXPR_ARG (exp, 1);
4826 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4827
4828 create_address_operand (&ops[0], begin_rtx);
4829 create_address_operand (&ops[1], end_rtx);
4830 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4831 return const0_rtx;
4832 }
4833 return const0_rtx;
4834 }
4835
4836 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4837
4838 static rtx
4839 round_trampoline_addr (rtx tramp)
4840 {
4841 rtx temp, addend, mask;
4842
4843 /* If we don't need too much alignment, we'll have been guaranteed
4844 proper alignment by get_trampoline_type. */
4845 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4846 return tramp;
4847
4848 /* Round address up to desired boundary. */
4849 temp = gen_reg_rtx (Pmode);
4850 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4851 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4852
4853 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4854 temp, 0, OPTAB_LIB_WIDEN);
4855 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4856 temp, 0, OPTAB_LIB_WIDEN);
4857
4858 return tramp;
4859 }
4860
4861 static rtx
4862 expand_builtin_init_trampoline (tree exp, bool onstack)
4863 {
4864 tree t_tramp, t_func, t_chain;
4865 rtx m_tramp, r_tramp, r_chain, tmp;
4866
4867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4868 POINTER_TYPE, VOID_TYPE))
4869 return NULL_RTX;
4870
4871 t_tramp = CALL_EXPR_ARG (exp, 0);
4872 t_func = CALL_EXPR_ARG (exp, 1);
4873 t_chain = CALL_EXPR_ARG (exp, 2);
4874
4875 r_tramp = expand_normal (t_tramp);
4876 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4877 MEM_NOTRAP_P (m_tramp) = 1;
4878
4879 /* If ONSTACK, the TRAMP argument should be the address of a field
4880 within the local function's FRAME decl. Either way, let's see if
4881 we can fill in the MEM_ATTRs for this memory. */
4882 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4883 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4884
4885 /* Creator of a heap trampoline is responsible for making sure the
4886 address is aligned to at least STACK_BOUNDARY. Normally malloc
4887 will ensure this anyhow. */
4888 tmp = round_trampoline_addr (r_tramp);
4889 if (tmp != r_tramp)
4890 {
4891 m_tramp = change_address (m_tramp, BLKmode, tmp);
4892 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4893 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4894 }
4895
4896 /* The FUNC argument should be the address of the nested function.
4897 Extract the actual function decl to pass to the hook. */
4898 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4899 t_func = TREE_OPERAND (t_func, 0);
4900 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4901
4902 r_chain = expand_normal (t_chain);
4903
4904 /* Generate insns to initialize the trampoline. */
4905 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4906
4907 if (onstack)
4908 {
4909 trampolines_created = 1;
4910
4911 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4912 "trampoline generated for nested function %qD", t_func);
4913 }
4914
4915 return const0_rtx;
4916 }
4917
4918 static rtx
4919 expand_builtin_adjust_trampoline (tree exp)
4920 {
4921 rtx tramp;
4922
4923 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4924 return NULL_RTX;
4925
4926 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4927 tramp = round_trampoline_addr (tramp);
4928 if (targetm.calls.trampoline_adjust_address)
4929 tramp = targetm.calls.trampoline_adjust_address (tramp);
4930
4931 return tramp;
4932 }
4933
4934 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4935 function. The function first checks whether the back end provides
4936 an insn to implement signbit for the respective mode. If not, it
4937 checks whether the floating point format of the value is such that
4938 the sign bit can be extracted. If that is not the case, error out.
4939 EXP is the expression that is a call to the builtin function; if
4940 convenient, the result should be placed in TARGET. */
4941 static rtx
4942 expand_builtin_signbit (tree exp, rtx target)
4943 {
4944 const struct real_format *fmt;
4945 machine_mode fmode, imode, rmode;
4946 tree arg;
4947 int word, bitpos;
4948 enum insn_code icode;
4949 rtx temp;
4950 location_t loc = EXPR_LOCATION (exp);
4951
4952 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4953 return NULL_RTX;
4954
4955 arg = CALL_EXPR_ARG (exp, 0);
4956 fmode = TYPE_MODE (TREE_TYPE (arg));
4957 rmode = TYPE_MODE (TREE_TYPE (exp));
4958 fmt = REAL_MODE_FORMAT (fmode);
4959
4960 arg = builtin_save_expr (arg);
4961
4962 /* Expand the argument yielding a RTX expression. */
4963 temp = expand_normal (arg);
4964
4965 /* Check if the back end provides an insn that handles signbit for the
4966 argument's mode. */
4967 icode = optab_handler (signbit_optab, fmode);
4968 if (icode != CODE_FOR_nothing)
4969 {
4970 rtx_insn *last = get_last_insn ();
4971 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4972 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4973 return target;
4974 delete_insns_since (last);
4975 }
4976
4977 /* For floating point formats without a sign bit, implement signbit
4978 as "ARG < 0.0". */
4979 bitpos = fmt->signbit_ro;
4980 if (bitpos < 0)
4981 {
4982 /* But we can't do this if the format supports signed zero. */
4983 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4984
4985 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4986 build_real (TREE_TYPE (arg), dconst0));
4987 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4988 }
4989
4990 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4991 {
4992 imode = int_mode_for_mode (fmode);
4993 gcc_assert (imode != BLKmode);
4994 temp = gen_lowpart (imode, temp);
4995 }
4996 else
4997 {
4998 imode = word_mode;
4999 /* Handle targets with different FP word orders. */
5000 if (FLOAT_WORDS_BIG_ENDIAN)
5001 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5002 else
5003 word = bitpos / BITS_PER_WORD;
5004 temp = operand_subword_force (temp, word, fmode);
5005 bitpos = bitpos % BITS_PER_WORD;
5006 }
5007
5008 /* Force the intermediate word_mode (or narrower) result into a
5009 register. This avoids attempting to create paradoxical SUBREGs
5010 of floating point modes below. */
5011 temp = force_reg (imode, temp);
5012
5013 /* If the bitpos is within the "result mode" lowpart, the operation
5014 can be implement with a single bitwise AND. Otherwise, we need
5015 a right shift and an AND. */
5016
5017 if (bitpos < GET_MODE_BITSIZE (rmode))
5018 {
5019 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5020
5021 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5022 temp = gen_lowpart (rmode, temp);
5023 temp = expand_binop (rmode, and_optab, temp,
5024 immed_wide_int_const (mask, rmode),
5025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5026 }
5027 else
5028 {
5029 /* Perform a logical right shift to place the signbit in the least
5030 significant bit, then truncate the result to the desired mode
5031 and mask just this bit. */
5032 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5035 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5036 }
5037
5038 return temp;
5039 }
5040
5041 /* Expand fork or exec calls. TARGET is the desired target of the
5042 call. EXP is the call. FN is the
5043 identificator of the actual function. IGNORE is nonzero if the
5044 value is to be ignored. */
5045
5046 static rtx
5047 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5048 {
5049 tree id, decl;
5050 tree call;
5051
5052 /* If we are not profiling, just call the function. */
5053 if (!profile_arc_flag)
5054 return NULL_RTX;
5055
5056 /* Otherwise call the wrapper. This should be equivalent for the rest of
5057 compiler, so the code does not diverge, and the wrapper may run the
5058 code necessary for keeping the profiling sane. */
5059
5060 switch (DECL_FUNCTION_CODE (fn))
5061 {
5062 case BUILT_IN_FORK:
5063 id = get_identifier ("__gcov_fork");
5064 break;
5065
5066 case BUILT_IN_EXECL:
5067 id = get_identifier ("__gcov_execl");
5068 break;
5069
5070 case BUILT_IN_EXECV:
5071 id = get_identifier ("__gcov_execv");
5072 break;
5073
5074 case BUILT_IN_EXECLP:
5075 id = get_identifier ("__gcov_execlp");
5076 break;
5077
5078 case BUILT_IN_EXECLE:
5079 id = get_identifier ("__gcov_execle");
5080 break;
5081
5082 case BUILT_IN_EXECVP:
5083 id = get_identifier ("__gcov_execvp");
5084 break;
5085
5086 case BUILT_IN_EXECVE:
5087 id = get_identifier ("__gcov_execve");
5088 break;
5089
5090 default:
5091 gcc_unreachable ();
5092 }
5093
5094 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5095 FUNCTION_DECL, id, TREE_TYPE (fn));
5096 DECL_EXTERNAL (decl) = 1;
5097 TREE_PUBLIC (decl) = 1;
5098 DECL_ARTIFICIAL (decl) = 1;
5099 TREE_NOTHROW (decl) = 1;
5100 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5101 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5102 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5103 return expand_call (call, target, ignore);
5104 }
5105
5106
5107 \f
5108 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5109 the pointer in these functions is void*, the tree optimizers may remove
5110 casts. The mode computed in expand_builtin isn't reliable either, due
5111 to __sync_bool_compare_and_swap.
5112
5113 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5114 group of builtins. This gives us log2 of the mode size. */
5115
5116 static inline machine_mode
5117 get_builtin_sync_mode (int fcode_diff)
5118 {
5119 /* The size is not negotiable, so ask not to get BLKmode in return
5120 if the target indicates that a smaller size would be better. */
5121 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5122 }
5123
5124 /* Expand the memory expression LOC and return the appropriate memory operand
5125 for the builtin_sync operations. */
5126
5127 static rtx
5128 get_builtin_sync_mem (tree loc, machine_mode mode)
5129 {
5130 rtx addr, mem;
5131
5132 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5133 addr = convert_memory_address (Pmode, addr);
5134
5135 /* Note that we explicitly do not want any alias information for this
5136 memory, so that we kill all other live memories. Otherwise we don't
5137 satisfy the full barrier semantics of the intrinsic. */
5138 mem = validize_mem (gen_rtx_MEM (mode, addr));
5139
5140 /* The alignment needs to be at least according to that of the mode. */
5141 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5142 get_pointer_alignment (loc)));
5143 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5144 MEM_VOLATILE_P (mem) = 1;
5145
5146 return mem;
5147 }
5148
5149 /* Make sure an argument is in the right mode.
5150 EXP is the tree argument.
5151 MODE is the mode it should be in. */
5152
5153 static rtx
5154 expand_expr_force_mode (tree exp, machine_mode mode)
5155 {
5156 rtx val;
5157 machine_mode old_mode;
5158
5159 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5161 of CONST_INTs, where we know the old_mode only from the call argument. */
5162
5163 old_mode = GET_MODE (val);
5164 if (old_mode == VOIDmode)
5165 old_mode = TYPE_MODE (TREE_TYPE (exp));
5166 val = convert_modes (mode, old_mode, val, 1);
5167 return val;
5168 }
5169
5170
5171 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5172 EXP is the CALL_EXPR. CODE is the rtx code
5173 that corresponds to the arithmetic or logical operation from the name;
5174 an exception here is that NOT actually means NAND. TARGET is an optional
5175 place for us to store the results; AFTER is true if this is the
5176 fetch_and_xxx form. */
5177
5178 static rtx
5179 expand_builtin_sync_operation (machine_mode mode, tree exp,
5180 enum rtx_code code, bool after,
5181 rtx target)
5182 {
5183 rtx val, mem;
5184 location_t loc = EXPR_LOCATION (exp);
5185
5186 if (code == NOT && warn_sync_nand)
5187 {
5188 tree fndecl = get_callee_fndecl (exp);
5189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5190
5191 static bool warned_f_a_n, warned_n_a_f;
5192
5193 switch (fcode)
5194 {
5195 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5198 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5199 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5200 if (warned_f_a_n)
5201 break;
5202
5203 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5204 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5205 warned_f_a_n = true;
5206 break;
5207
5208 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5211 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5212 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5213 if (warned_n_a_f)
5214 break;
5215
5216 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5217 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5218 warned_n_a_f = true;
5219 break;
5220
5221 default:
5222 gcc_unreachable ();
5223 }
5224 }
5225
5226 /* Expand the operands. */
5227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5229
5230 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5231 after);
5232 }
5233
5234 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5235 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5236 true if this is the boolean form. TARGET is a place for us to store the
5237 results; this is NOT optional if IS_BOOL is true. */
5238
5239 static rtx
5240 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5241 bool is_bool, rtx target)
5242 {
5243 rtx old_val, new_val, mem;
5244 rtx *pbool, *poval;
5245
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5250
5251 pbool = poval = NULL;
5252 if (target != const0_rtx)
5253 {
5254 if (is_bool)
5255 pbool = &target;
5256 else
5257 poval = &target;
5258 }
5259 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5260 false, MEMMODEL_SYNC_SEQ_CST,
5261 MEMMODEL_SYNC_SEQ_CST))
5262 return NULL_RTX;
5263
5264 return target;
5265 }
5266
5267 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5268 general form is actually an atomic exchange, and some targets only
5269 support a reduced form with the second argument being a constant 1.
5270 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5271 the results. */
5272
5273 static rtx
5274 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5275 rtx target)
5276 {
5277 rtx val, mem;
5278
5279 /* Expand the operands. */
5280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5281 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5282
5283 return expand_sync_lock_test_and_set (target, mem, val);
5284 }
5285
5286 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5287
5288 static void
5289 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5290 {
5291 rtx mem;
5292
5293 /* Expand the operands. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
5296 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5297 }
5298
5299 /* Given an integer representing an ``enum memmodel'', verify its
5300 correctness and return the memory model enum. */
5301
5302 static enum memmodel
5303 get_memmodel (tree exp)
5304 {
5305 rtx op;
5306 unsigned HOST_WIDE_INT val;
5307
5308 /* If the parameter is not a constant, it's a run time value so we'll just
5309 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5310 if (TREE_CODE (exp) != INTEGER_CST)
5311 return MEMMODEL_SEQ_CST;
5312
5313 op = expand_normal (exp);
5314
5315 val = INTVAL (op);
5316 if (targetm.memmodel_check)
5317 val = targetm.memmodel_check (val);
5318 else if (val & ~MEMMODEL_MASK)
5319 {
5320 warning (OPT_Winvalid_memory_model,
5321 "Unknown architecture specifier in memory model to builtin.");
5322 return MEMMODEL_SEQ_CST;
5323 }
5324
5325 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5326 if (memmodel_base (val) >= MEMMODEL_LAST)
5327 {
5328 warning (OPT_Winvalid_memory_model,
5329 "invalid memory model argument to builtin");
5330 return MEMMODEL_SEQ_CST;
5331 }
5332
5333 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5334 be conservative and promote consume to acquire. */
5335 if (val == MEMMODEL_CONSUME)
5336 val = MEMMODEL_ACQUIRE;
5337
5338 return (enum memmodel) val;
5339 }
5340
5341 /* Expand the __atomic_exchange intrinsic:
5342 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5343 EXP is the CALL_EXPR.
5344 TARGET is an optional place for us to store the results. */
5345
5346 static rtx
5347 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5348 {
5349 rtx val, mem;
5350 enum memmodel model;
5351
5352 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5353
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5356
5357 /* Expand the operands. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5359 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5360
5361 return expand_atomic_exchange (target, mem, val, model);
5362 }
5363
5364 /* Expand the __atomic_compare_exchange intrinsic:
5365 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5366 TYPE desired, BOOL weak,
5367 enum memmodel success,
5368 enum memmodel failure)
5369 EXP is the CALL_EXPR.
5370 TARGET is an optional place for us to store the results. */
5371
5372 static rtx
5373 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5374 rtx target)
5375 {
5376 rtx expect, desired, mem, oldval;
5377 rtx_code_label *label;
5378 enum memmodel success, failure;
5379 tree weak;
5380 bool is_weak;
5381
5382 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5383 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5384
5385 if (failure > success)
5386 {
5387 warning (OPT_Winvalid_memory_model,
5388 "failure memory model cannot be stronger than success memory "
5389 "model for %<__atomic_compare_exchange%>");
5390 success = MEMMODEL_SEQ_CST;
5391 }
5392
5393 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5394 {
5395 warning (OPT_Winvalid_memory_model,
5396 "invalid failure memory model for "
5397 "%<__atomic_compare_exchange%>");
5398 failure = MEMMODEL_SEQ_CST;
5399 success = MEMMODEL_SEQ_CST;
5400 }
5401
5402
5403 if (!flag_inline_atomics)
5404 return NULL_RTX;
5405
5406 /* Expand the operands. */
5407 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5408
5409 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5410 expect = convert_memory_address (Pmode, expect);
5411 expect = gen_rtx_MEM (mode, expect);
5412 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5413
5414 weak = CALL_EXPR_ARG (exp, 3);
5415 is_weak = false;
5416 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5417 is_weak = true;
5418
5419 if (target == const0_rtx)
5420 target = NULL;
5421
5422 /* Lest the rtl backend create a race condition with an imporoper store
5423 to memory, always create a new pseudo for OLDVAL. */
5424 oldval = NULL;
5425
5426 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5427 is_weak, success, failure))
5428 return NULL_RTX;
5429
5430 /* Conditionally store back to EXPECT, lest we create a race condition
5431 with an improper store to memory. */
5432 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5433 the normal case where EXPECT is totally private, i.e. a register. At
5434 which point the store can be unconditional. */
5435 label = gen_label_rtx ();
5436 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5437 GET_MODE (target), 1, label);
5438 emit_move_insn (expect, oldval);
5439 emit_label (label);
5440
5441 return target;
5442 }
5443
5444 /* Expand the __atomic_load intrinsic:
5445 TYPE __atomic_load (TYPE *object, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5448
5449 static rtx
5450 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5451 {
5452 rtx mem;
5453 enum memmodel model;
5454
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5456 if (is_mm_release (model) || is_mm_acq_rel (model))
5457 {
5458 warning (OPT_Winvalid_memory_model,
5459 "invalid memory model for %<__atomic_load%>");
5460 model = MEMMODEL_SEQ_CST;
5461 }
5462
5463 if (!flag_inline_atomics)
5464 return NULL_RTX;
5465
5466 /* Expand the operand. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468
5469 return expand_atomic_load (target, mem, model);
5470 }
5471
5472
5473 /* Expand the __atomic_store intrinsic:
5474 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5477
5478 static rtx
5479 expand_builtin_atomic_store (machine_mode mode, tree exp)
5480 {
5481 rtx mem, val;
5482 enum memmodel model;
5483
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5485 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5486 || is_mm_release (model)))
5487 {
5488 warning (OPT_Winvalid_memory_model,
5489 "invalid memory model for %<__atomic_store%>");
5490 model = MEMMODEL_SEQ_CST;
5491 }
5492
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5495
5496 /* Expand the operands. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5499
5500 return expand_atomic_store (mem, val, model, false);
5501 }
5502
5503 /* Expand the __atomic_fetch_XXX intrinsic:
5504 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results.
5507 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5508 FETCH_AFTER is true if returning the result of the operation.
5509 FETCH_AFTER is false if returning the value before the operation.
5510 IGNORE is true if the result is not used.
5511 EXT_CALL is the correct builtin for an external call if this cannot be
5512 resolved to an instruction sequence. */
5513
5514 static rtx
5515 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5516 enum rtx_code code, bool fetch_after,
5517 bool ignore, enum built_in_function ext_call)
5518 {
5519 rtx val, mem, ret;
5520 enum memmodel model;
5521 tree fndecl;
5522 tree addr;
5523
5524 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5525
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529
5530 /* Only try generating instructions if inlining is turned on. */
5531 if (flag_inline_atomics)
5532 {
5533 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5534 if (ret)
5535 return ret;
5536 }
5537
5538 /* Return if a different routine isn't needed for the library call. */
5539 if (ext_call == BUILT_IN_NONE)
5540 return NULL_RTX;
5541
5542 /* Change the call to the specified function. */
5543 fndecl = get_callee_fndecl (exp);
5544 addr = CALL_EXPR_FN (exp);
5545 STRIP_NOPS (addr);
5546
5547 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5548 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5549
5550 /* Expand the call here so we can emit trailing code. */
5551 ret = expand_call (exp, target, ignore);
5552
5553 /* Replace the original function just in case it matters. */
5554 TREE_OPERAND (addr, 0) = fndecl;
5555
5556 /* Then issue the arithmetic correction to return the right result. */
5557 if (!ignore)
5558 {
5559 if (code == NOT)
5560 {
5561 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5562 OPTAB_LIB_WIDEN);
5563 ret = expand_simple_unop (mode, NOT, ret, target, true);
5564 }
5565 else
5566 ret = expand_simple_binop (mode, code, ret, val, target, true,
5567 OPTAB_LIB_WIDEN);
5568 }
5569 return ret;
5570 }
5571
5572 /* Expand an atomic clear operation.
5573 void _atomic_clear (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5575
5576 static rtx
5577 expand_builtin_atomic_clear (tree exp)
5578 {
5579 machine_mode mode;
5580 rtx mem, ret;
5581 enum memmodel model;
5582
5583 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5586
5587 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5588 {
5589 warning (OPT_Winvalid_memory_model,
5590 "invalid memory model for %<__atomic_store%>");
5591 model = MEMMODEL_SEQ_CST;
5592 }
5593
5594 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5595 Failing that, a store is issued by __atomic_store. The only way this can
5596 fail is if the bool type is larger than a word size. Unlikely, but
5597 handle it anyway for completeness. Assume a single threaded model since
5598 there is no atomic support in this case, and no barriers are required. */
5599 ret = expand_atomic_store (mem, const0_rtx, model, true);
5600 if (!ret)
5601 emit_move_insn (mem, const0_rtx);
5602 return const0_rtx;
5603 }
5604
5605 /* Expand an atomic test_and_set operation.
5606 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5607 EXP is the call expression. */
5608
5609 static rtx
5610 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5611 {
5612 rtx mem;
5613 enum memmodel model;
5614 machine_mode mode;
5615
5616 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5617 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5618 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5619
5620 return expand_atomic_test_and_set (target, mem, model);
5621 }
5622
5623
5624 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5625 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5626
5627 static tree
5628 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5629 {
5630 int size;
5631 machine_mode mode;
5632 unsigned int mode_align, type_align;
5633
5634 if (TREE_CODE (arg0) != INTEGER_CST)
5635 return NULL_TREE;
5636
5637 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5638 mode = mode_for_size (size, MODE_INT, 0);
5639 mode_align = GET_MODE_ALIGNMENT (mode);
5640
5641 if (TREE_CODE (arg1) == INTEGER_CST)
5642 {
5643 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5644
5645 /* Either this argument is null, or it's a fake pointer encoding
5646 the alignment of the object. */
5647 val = val & -val;
5648 val *= BITS_PER_UNIT;
5649
5650 if (val == 0 || mode_align < val)
5651 type_align = mode_align;
5652 else
5653 type_align = val;
5654 }
5655 else
5656 {
5657 tree ttype = TREE_TYPE (arg1);
5658
5659 /* This function is usually invoked and folded immediately by the front
5660 end before anything else has a chance to look at it. The pointer
5661 parameter at this point is usually cast to a void *, so check for that
5662 and look past the cast. */
5663 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5664 && VOID_TYPE_P (TREE_TYPE (ttype)))
5665 arg1 = TREE_OPERAND (arg1, 0);
5666
5667 ttype = TREE_TYPE (arg1);
5668 gcc_assert (POINTER_TYPE_P (ttype));
5669
5670 /* Get the underlying type of the object. */
5671 ttype = TREE_TYPE (ttype);
5672 type_align = TYPE_ALIGN (ttype);
5673 }
5674
5675 /* If the object has smaller alignment, the lock free routines cannot
5676 be used. */
5677 if (type_align < mode_align)
5678 return boolean_false_node;
5679
5680 /* Check if a compare_and_swap pattern exists for the mode which represents
5681 the required size. The pattern is not allowed to fail, so the existence
5682 of the pattern indicates support is present. */
5683 if (can_compare_and_swap_p (mode, true))
5684 return boolean_true_node;
5685 else
5686 return boolean_false_node;
5687 }
5688
5689 /* Return true if the parameters to call EXP represent an object which will
5690 always generate lock free instructions. The first argument represents the
5691 size of the object, and the second parameter is a pointer to the object
5692 itself. If NULL is passed for the object, then the result is based on
5693 typical alignment for an object of the specified size. Otherwise return
5694 false. */
5695
5696 static rtx
5697 expand_builtin_atomic_always_lock_free (tree exp)
5698 {
5699 tree size;
5700 tree arg0 = CALL_EXPR_ARG (exp, 0);
5701 tree arg1 = CALL_EXPR_ARG (exp, 1);
5702
5703 if (TREE_CODE (arg0) != INTEGER_CST)
5704 {
5705 error ("non-constant argument 1 to __atomic_always_lock_free");
5706 return const0_rtx;
5707 }
5708
5709 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5710 if (size == boolean_true_node)
5711 return const1_rtx;
5712 return const0_rtx;
5713 }
5714
5715 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5716 is lock free on this architecture. */
5717
5718 static tree
5719 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5720 {
5721 if (!flag_inline_atomics)
5722 return NULL_TREE;
5723
5724 /* If it isn't always lock free, don't generate a result. */
5725 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5726 return boolean_true_node;
5727
5728 return NULL_TREE;
5729 }
5730
5731 /* Return true if the parameters to call EXP represent an object which will
5732 always generate lock free instructions. The first argument represents the
5733 size of the object, and the second parameter is a pointer to the object
5734 itself. If NULL is passed for the object, then the result is based on
5735 typical alignment for an object of the specified size. Otherwise return
5736 NULL*/
5737
5738 static rtx
5739 expand_builtin_atomic_is_lock_free (tree exp)
5740 {
5741 tree size;
5742 tree arg0 = CALL_EXPR_ARG (exp, 0);
5743 tree arg1 = CALL_EXPR_ARG (exp, 1);
5744
5745 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5746 {
5747 error ("non-integer argument 1 to __atomic_is_lock_free");
5748 return NULL_RTX;
5749 }
5750
5751 if (!flag_inline_atomics)
5752 return NULL_RTX;
5753
5754 /* If the value is known at compile time, return the RTX for it. */
5755 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5756 if (size == boolean_true_node)
5757 return const1_rtx;
5758
5759 return NULL_RTX;
5760 }
5761
5762 /* Expand the __atomic_thread_fence intrinsic:
5763 void __atomic_thread_fence (enum memmodel)
5764 EXP is the CALL_EXPR. */
5765
5766 static void
5767 expand_builtin_atomic_thread_fence (tree exp)
5768 {
5769 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5770 expand_mem_thread_fence (model);
5771 }
5772
5773 /* Expand the __atomic_signal_fence intrinsic:
5774 void __atomic_signal_fence (enum memmodel)
5775 EXP is the CALL_EXPR. */
5776
5777 static void
5778 expand_builtin_atomic_signal_fence (tree exp)
5779 {
5780 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5781 expand_mem_signal_fence (model);
5782 }
5783
5784 /* Expand the __sync_synchronize intrinsic. */
5785
5786 static void
5787 expand_builtin_sync_synchronize (void)
5788 {
5789 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5790 }
5791
5792 static rtx
5793 expand_builtin_thread_pointer (tree exp, rtx target)
5794 {
5795 enum insn_code icode;
5796 if (!validate_arglist (exp, VOID_TYPE))
5797 return const0_rtx;
5798 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5799 if (icode != CODE_FOR_nothing)
5800 {
5801 struct expand_operand op;
5802 /* If the target is not sutitable then create a new target. */
5803 if (target == NULL_RTX
5804 || !REG_P (target)
5805 || GET_MODE (target) != Pmode)
5806 target = gen_reg_rtx (Pmode);
5807 create_output_operand (&op, target, Pmode);
5808 expand_insn (icode, 1, &op);
5809 return target;
5810 }
5811 error ("__builtin_thread_pointer is not supported on this target");
5812 return const0_rtx;
5813 }
5814
5815 static void
5816 expand_builtin_set_thread_pointer (tree exp)
5817 {
5818 enum insn_code icode;
5819 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5820 return;
5821 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5822 if (icode != CODE_FOR_nothing)
5823 {
5824 struct expand_operand op;
5825 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5826 Pmode, EXPAND_NORMAL);
5827 create_input_operand (&op, val, Pmode);
5828 expand_insn (icode, 1, &op);
5829 return;
5830 }
5831 error ("__builtin_set_thread_pointer is not supported on this target");
5832 }
5833
5834 \f
5835 /* Emit code to restore the current value of stack. */
5836
5837 static void
5838 expand_stack_restore (tree var)
5839 {
5840 rtx_insn *prev;
5841 rtx sa = expand_normal (var);
5842
5843 sa = convert_memory_address (Pmode, sa);
5844
5845 prev = get_last_insn ();
5846 emit_stack_restore (SAVE_BLOCK, sa);
5847
5848 record_new_stack_level ();
5849
5850 fixup_args_size_notes (prev, get_last_insn (), 0);
5851 }
5852
5853 /* Emit code to save the current value of stack. */
5854
5855 static rtx
5856 expand_stack_save (void)
5857 {
5858 rtx ret = NULL_RTX;
5859
5860 emit_stack_save (SAVE_BLOCK, &ret);
5861 return ret;
5862 }
5863
5864
5865 /* Expand an expression EXP that calls a built-in function,
5866 with result going to TARGET if that's convenient
5867 (and in mode MODE if that's convenient).
5868 SUBTARGET may be used as the target for computing one of EXP's operands.
5869 IGNORE is nonzero if the value is to be ignored. */
5870
5871 rtx
5872 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5873 int ignore)
5874 {
5875 tree fndecl = get_callee_fndecl (exp);
5876 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5877 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5878 int flags;
5879
5880 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5881 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5882
5883 /* When ASan is enabled, we don't want to expand some memory/string
5884 builtins and rely on libsanitizer's hooks. This allows us to avoid
5885 redundant checks and be sure, that possible overflow will be detected
5886 by ASan. */
5887
5888 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5889 return expand_call (exp, target, ignore);
5890
5891 /* When not optimizing, generate calls to library functions for a certain
5892 set of builtins. */
5893 if (!optimize
5894 && !called_as_built_in (fndecl)
5895 && fcode != BUILT_IN_FORK
5896 && fcode != BUILT_IN_EXECL
5897 && fcode != BUILT_IN_EXECV
5898 && fcode != BUILT_IN_EXECLP
5899 && fcode != BUILT_IN_EXECLE
5900 && fcode != BUILT_IN_EXECVP
5901 && fcode != BUILT_IN_EXECVE
5902 && fcode != BUILT_IN_ALLOCA
5903 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5904 && fcode != BUILT_IN_FREE
5905 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5906 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5907 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5908 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5909 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5910 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5911 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5912 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5913 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5914 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5915 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5916 && fcode != BUILT_IN_CHKP_BNDRET)
5917 return expand_call (exp, target, ignore);
5918
5919 /* The built-in function expanders test for target == const0_rtx
5920 to determine whether the function's result will be ignored. */
5921 if (ignore)
5922 target = const0_rtx;
5923
5924 /* If the result of a pure or const built-in function is ignored, and
5925 none of its arguments are volatile, we can avoid expanding the
5926 built-in call and just evaluate the arguments for side-effects. */
5927 if (target == const0_rtx
5928 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5929 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5930 {
5931 bool volatilep = false;
5932 tree arg;
5933 call_expr_arg_iterator iter;
5934
5935 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5936 if (TREE_THIS_VOLATILE (arg))
5937 {
5938 volatilep = true;
5939 break;
5940 }
5941
5942 if (! volatilep)
5943 {
5944 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5945 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5946 return const0_rtx;
5947 }
5948 }
5949
5950 /* expand_builtin_with_bounds is supposed to be used for
5951 instrumented builtin calls. */
5952 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5953
5954 switch (fcode)
5955 {
5956 CASE_FLT_FN (BUILT_IN_FABS):
5957 case BUILT_IN_FABSD32:
5958 case BUILT_IN_FABSD64:
5959 case BUILT_IN_FABSD128:
5960 target = expand_builtin_fabs (exp, target, subtarget);
5961 if (target)
5962 return target;
5963 break;
5964
5965 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5966 target = expand_builtin_copysign (exp, target, subtarget);
5967 if (target)
5968 return target;
5969 break;
5970
5971 /* Just do a normal library call if we were unable to fold
5972 the values. */
5973 CASE_FLT_FN (BUILT_IN_CABS):
5974 break;
5975
5976 CASE_FLT_FN (BUILT_IN_EXP):
5977 CASE_FLT_FN (BUILT_IN_EXP10):
5978 CASE_FLT_FN (BUILT_IN_POW10):
5979 CASE_FLT_FN (BUILT_IN_EXP2):
5980 CASE_FLT_FN (BUILT_IN_EXPM1):
5981 CASE_FLT_FN (BUILT_IN_LOGB):
5982 CASE_FLT_FN (BUILT_IN_LOG):
5983 CASE_FLT_FN (BUILT_IN_LOG10):
5984 CASE_FLT_FN (BUILT_IN_LOG2):
5985 CASE_FLT_FN (BUILT_IN_LOG1P):
5986 CASE_FLT_FN (BUILT_IN_TAN):
5987 CASE_FLT_FN (BUILT_IN_ASIN):
5988 CASE_FLT_FN (BUILT_IN_ACOS):
5989 CASE_FLT_FN (BUILT_IN_ATAN):
5990 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5991 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5992 because of possible accuracy problems. */
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 CASE_FLT_FN (BUILT_IN_SQRT):
5996 CASE_FLT_FN (BUILT_IN_FLOOR):
5997 CASE_FLT_FN (BUILT_IN_CEIL):
5998 CASE_FLT_FN (BUILT_IN_TRUNC):
5999 CASE_FLT_FN (BUILT_IN_ROUND):
6000 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6001 CASE_FLT_FN (BUILT_IN_RINT):
6002 target = expand_builtin_mathfn (exp, target, subtarget);
6003 if (target)
6004 return target;
6005 break;
6006
6007 CASE_FLT_FN (BUILT_IN_FMA):
6008 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6009 if (target)
6010 return target;
6011 break;
6012
6013 CASE_FLT_FN (BUILT_IN_ILOGB):
6014 if (! flag_unsafe_math_optimizations)
6015 break;
6016 CASE_FLT_FN (BUILT_IN_ISINF):
6017 CASE_FLT_FN (BUILT_IN_FINITE):
6018 case BUILT_IN_ISFINITE:
6019 case BUILT_IN_ISNORMAL:
6020 target = expand_builtin_interclass_mathfn (exp, target);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_ICEIL):
6026 CASE_FLT_FN (BUILT_IN_LCEIL):
6027 CASE_FLT_FN (BUILT_IN_LLCEIL):
6028 CASE_FLT_FN (BUILT_IN_LFLOOR):
6029 CASE_FLT_FN (BUILT_IN_IFLOOR):
6030 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6031 target = expand_builtin_int_roundingfn (exp, target);
6032 if (target)
6033 return target;
6034 break;
6035
6036 CASE_FLT_FN (BUILT_IN_IRINT):
6037 CASE_FLT_FN (BUILT_IN_LRINT):
6038 CASE_FLT_FN (BUILT_IN_LLRINT):
6039 CASE_FLT_FN (BUILT_IN_IROUND):
6040 CASE_FLT_FN (BUILT_IN_LROUND):
6041 CASE_FLT_FN (BUILT_IN_LLROUND):
6042 target = expand_builtin_int_roundingfn_2 (exp, target);
6043 if (target)
6044 return target;
6045 break;
6046
6047 CASE_FLT_FN (BUILT_IN_POWI):
6048 target = expand_builtin_powi (exp, target);
6049 if (target)
6050 return target;
6051 break;
6052
6053 CASE_FLT_FN (BUILT_IN_ATAN2):
6054 CASE_FLT_FN (BUILT_IN_LDEXP):
6055 CASE_FLT_FN (BUILT_IN_SCALB):
6056 CASE_FLT_FN (BUILT_IN_SCALBN):
6057 CASE_FLT_FN (BUILT_IN_SCALBLN):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6060
6061 CASE_FLT_FN (BUILT_IN_FMOD):
6062 CASE_FLT_FN (BUILT_IN_REMAINDER):
6063 CASE_FLT_FN (BUILT_IN_DREM):
6064 CASE_FLT_FN (BUILT_IN_POW):
6065 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6066 if (target)
6067 return target;
6068 break;
6069
6070 CASE_FLT_FN (BUILT_IN_CEXPI):
6071 target = expand_builtin_cexpi (exp, target);
6072 gcc_assert (target);
6073 return target;
6074
6075 CASE_FLT_FN (BUILT_IN_SIN):
6076 CASE_FLT_FN (BUILT_IN_COS):
6077 if (! flag_unsafe_math_optimizations)
6078 break;
6079 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_FLT_FN (BUILT_IN_SINCOS):
6085 if (! flag_unsafe_math_optimizations)
6086 break;
6087 target = expand_builtin_sincos (exp);
6088 if (target)
6089 return target;
6090 break;
6091
6092 case BUILT_IN_APPLY_ARGS:
6093 return expand_builtin_apply_args ();
6094
6095 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6096 FUNCTION with a copy of the parameters described by
6097 ARGUMENTS, and ARGSIZE. It returns a block of memory
6098 allocated on the stack into which is stored all the registers
6099 that might possibly be used for returning the result of a
6100 function. ARGUMENTS is the value returned by
6101 __builtin_apply_args. ARGSIZE is the number of bytes of
6102 arguments that must be copied. ??? How should this value be
6103 computed? We'll also need a safe worst case value for varargs
6104 functions. */
6105 case BUILT_IN_APPLY:
6106 if (!validate_arglist (exp, POINTER_TYPE,
6107 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6108 && !validate_arglist (exp, REFERENCE_TYPE,
6109 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6110 return const0_rtx;
6111 else
6112 {
6113 rtx ops[3];
6114
6115 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6116 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6117 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6118
6119 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6120 }
6121
6122 /* __builtin_return (RESULT) causes the function to return the
6123 value described by RESULT. RESULT is address of the block of
6124 memory returned by __builtin_apply. */
6125 case BUILT_IN_RETURN:
6126 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6127 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6128 return const0_rtx;
6129
6130 case BUILT_IN_SAVEREGS:
6131 return expand_builtin_saveregs ();
6132
6133 case BUILT_IN_VA_ARG_PACK:
6134 /* All valid uses of __builtin_va_arg_pack () are removed during
6135 inlining. */
6136 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6137 return const0_rtx;
6138
6139 case BUILT_IN_VA_ARG_PACK_LEN:
6140 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6141 inlining. */
6142 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6143 return const0_rtx;
6144
6145 /* Return the address of the first anonymous stack arg. */
6146 case BUILT_IN_NEXT_ARG:
6147 if (fold_builtin_next_arg (exp, false))
6148 return const0_rtx;
6149 return expand_builtin_next_arg ();
6150
6151 case BUILT_IN_CLEAR_CACHE:
6152 target = expand_builtin___clear_cache (exp);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_CLASSIFY_TYPE:
6158 return expand_builtin_classify_type (exp);
6159
6160 case BUILT_IN_CONSTANT_P:
6161 return const0_rtx;
6162
6163 case BUILT_IN_FRAME_ADDRESS:
6164 case BUILT_IN_RETURN_ADDRESS:
6165 return expand_builtin_frame_address (fndecl, exp);
6166
6167 /* Returns the address of the area where the structure is returned.
6168 0 otherwise. */
6169 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6170 if (call_expr_nargs (exp) != 0
6171 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6172 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6173 return const0_rtx;
6174 else
6175 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6176
6177 case BUILT_IN_ALLOCA:
6178 case BUILT_IN_ALLOCA_WITH_ALIGN:
6179 /* If the allocation stems from the declaration of a variable-sized
6180 object, it cannot accumulate. */
6181 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6182 if (target)
6183 return target;
6184 break;
6185
6186 case BUILT_IN_STACK_SAVE:
6187 return expand_stack_save ();
6188
6189 case BUILT_IN_STACK_RESTORE:
6190 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6191 return const0_rtx;
6192
6193 case BUILT_IN_BSWAP16:
6194 case BUILT_IN_BSWAP32:
6195 case BUILT_IN_BSWAP64:
6196 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6197 if (target)
6198 return target;
6199 break;
6200
6201 CASE_INT_FN (BUILT_IN_FFS):
6202 target = expand_builtin_unop (target_mode, exp, target,
6203 subtarget, ffs_optab);
6204 if (target)
6205 return target;
6206 break;
6207
6208 CASE_INT_FN (BUILT_IN_CLZ):
6209 target = expand_builtin_unop (target_mode, exp, target,
6210 subtarget, clz_optab);
6211 if (target)
6212 return target;
6213 break;
6214
6215 CASE_INT_FN (BUILT_IN_CTZ):
6216 target = expand_builtin_unop (target_mode, exp, target,
6217 subtarget, ctz_optab);
6218 if (target)
6219 return target;
6220 break;
6221
6222 CASE_INT_FN (BUILT_IN_CLRSB):
6223 target = expand_builtin_unop (target_mode, exp, target,
6224 subtarget, clrsb_optab);
6225 if (target)
6226 return target;
6227 break;
6228
6229 CASE_INT_FN (BUILT_IN_POPCOUNT):
6230 target = expand_builtin_unop (target_mode, exp, target,
6231 subtarget, popcount_optab);
6232 if (target)
6233 return target;
6234 break;
6235
6236 CASE_INT_FN (BUILT_IN_PARITY):
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, parity_optab);
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_STRLEN:
6244 target = expand_builtin_strlen (exp, target, target_mode);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_STRCPY:
6250 target = expand_builtin_strcpy (exp, target);
6251 if (target)
6252 return target;
6253 break;
6254
6255 case BUILT_IN_STRNCPY:
6256 target = expand_builtin_strncpy (exp, target);
6257 if (target)
6258 return target;
6259 break;
6260
6261 case BUILT_IN_STPCPY:
6262 target = expand_builtin_stpcpy (exp, target, mode);
6263 if (target)
6264 return target;
6265 break;
6266
6267 case BUILT_IN_MEMCPY:
6268 target = expand_builtin_memcpy (exp, target);
6269 if (target)
6270 return target;
6271 break;
6272
6273 case BUILT_IN_MEMPCPY:
6274 target = expand_builtin_mempcpy (exp, target, mode);
6275 if (target)
6276 return target;
6277 break;
6278
6279 case BUILT_IN_MEMSET:
6280 target = expand_builtin_memset (exp, target, mode);
6281 if (target)
6282 return target;
6283 break;
6284
6285 case BUILT_IN_BZERO:
6286 target = expand_builtin_bzero (exp);
6287 if (target)
6288 return target;
6289 break;
6290
6291 case BUILT_IN_STRCMP:
6292 target = expand_builtin_strcmp (exp, target);
6293 if (target)
6294 return target;
6295 break;
6296
6297 case BUILT_IN_STRNCMP:
6298 target = expand_builtin_strncmp (exp, target, mode);
6299 if (target)
6300 return target;
6301 break;
6302
6303 case BUILT_IN_BCMP:
6304 case BUILT_IN_MEMCMP:
6305 target = expand_builtin_memcmp (exp, target);
6306 if (target)
6307 return target;
6308 break;
6309
6310 case BUILT_IN_SETJMP:
6311 /* This should have been lowered to the builtins below. */
6312 gcc_unreachable ();
6313
6314 case BUILT_IN_SETJMP_SETUP:
6315 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6316 and the receiver label. */
6317 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6318 {
6319 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6320 VOIDmode, EXPAND_NORMAL);
6321 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6322 rtx_insn *label_r = label_rtx (label);
6323
6324 /* This is copied from the handling of non-local gotos. */
6325 expand_builtin_setjmp_setup (buf_addr, label_r);
6326 nonlocal_goto_handler_labels
6327 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6328 nonlocal_goto_handler_labels);
6329 /* ??? Do not let expand_label treat us as such since we would
6330 not want to be both on the list of non-local labels and on
6331 the list of forced labels. */
6332 FORCED_LABEL (label) = 0;
6333 return const0_rtx;
6334 }
6335 break;
6336
6337 case BUILT_IN_SETJMP_RECEIVER:
6338 /* __builtin_setjmp_receiver is passed the receiver label. */
6339 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6340 {
6341 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6342 rtx_insn *label_r = label_rtx (label);
6343
6344 expand_builtin_setjmp_receiver (label_r);
6345 return const0_rtx;
6346 }
6347 break;
6348
6349 /* __builtin_longjmp is passed a pointer to an array of five words.
6350 It's similar to the C library longjmp function but works with
6351 __builtin_setjmp above. */
6352 case BUILT_IN_LONGJMP:
6353 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6354 {
6355 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6356 VOIDmode, EXPAND_NORMAL);
6357 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6358
6359 if (value != const1_rtx)
6360 {
6361 error ("%<__builtin_longjmp%> second argument must be 1");
6362 return const0_rtx;
6363 }
6364
6365 expand_builtin_longjmp (buf_addr, value);
6366 return const0_rtx;
6367 }
6368 break;
6369
6370 case BUILT_IN_NONLOCAL_GOTO:
6371 target = expand_builtin_nonlocal_goto (exp);
6372 if (target)
6373 return target;
6374 break;
6375
6376 /* This updates the setjmp buffer that is its argument with the value
6377 of the current stack pointer. */
6378 case BUILT_IN_UPDATE_SETJMP_BUF:
6379 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6380 {
6381 rtx buf_addr
6382 = expand_normal (CALL_EXPR_ARG (exp, 0));
6383
6384 expand_builtin_update_setjmp_buf (buf_addr);
6385 return const0_rtx;
6386 }
6387 break;
6388
6389 case BUILT_IN_TRAP:
6390 expand_builtin_trap ();
6391 return const0_rtx;
6392
6393 case BUILT_IN_UNREACHABLE:
6394 expand_builtin_unreachable ();
6395 return const0_rtx;
6396
6397 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6398 case BUILT_IN_SIGNBITD32:
6399 case BUILT_IN_SIGNBITD64:
6400 case BUILT_IN_SIGNBITD128:
6401 target = expand_builtin_signbit (exp, target);
6402 if (target)
6403 return target;
6404 break;
6405
6406 /* Various hooks for the DWARF 2 __throw routine. */
6407 case BUILT_IN_UNWIND_INIT:
6408 expand_builtin_unwind_init ();
6409 return const0_rtx;
6410 case BUILT_IN_DWARF_CFA:
6411 return virtual_cfa_rtx;
6412 #ifdef DWARF2_UNWIND_INFO
6413 case BUILT_IN_DWARF_SP_COLUMN:
6414 return expand_builtin_dwarf_sp_column ();
6415 case BUILT_IN_INIT_DWARF_REG_SIZES:
6416 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6417 return const0_rtx;
6418 #endif
6419 case BUILT_IN_FROB_RETURN_ADDR:
6420 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6421 case BUILT_IN_EXTRACT_RETURN_ADDR:
6422 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6423 case BUILT_IN_EH_RETURN:
6424 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6425 CALL_EXPR_ARG (exp, 1));
6426 return const0_rtx;
6427 case BUILT_IN_EH_RETURN_DATA_REGNO:
6428 return expand_builtin_eh_return_data_regno (exp);
6429 case BUILT_IN_EXTEND_POINTER:
6430 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6431 case BUILT_IN_EH_POINTER:
6432 return expand_builtin_eh_pointer (exp);
6433 case BUILT_IN_EH_FILTER:
6434 return expand_builtin_eh_filter (exp);
6435 case BUILT_IN_EH_COPY_VALUES:
6436 return expand_builtin_eh_copy_values (exp);
6437
6438 case BUILT_IN_VA_START:
6439 return expand_builtin_va_start (exp);
6440 case BUILT_IN_VA_END:
6441 return expand_builtin_va_end (exp);
6442 case BUILT_IN_VA_COPY:
6443 return expand_builtin_va_copy (exp);
6444 case BUILT_IN_EXPECT:
6445 return expand_builtin_expect (exp, target);
6446 case BUILT_IN_ASSUME_ALIGNED:
6447 return expand_builtin_assume_aligned (exp, target);
6448 case BUILT_IN_PREFETCH:
6449 expand_builtin_prefetch (exp);
6450 return const0_rtx;
6451
6452 case BUILT_IN_INIT_TRAMPOLINE:
6453 return expand_builtin_init_trampoline (exp, true);
6454 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6455 return expand_builtin_init_trampoline (exp, false);
6456 case BUILT_IN_ADJUST_TRAMPOLINE:
6457 return expand_builtin_adjust_trampoline (exp);
6458
6459 case BUILT_IN_FORK:
6460 case BUILT_IN_EXECL:
6461 case BUILT_IN_EXECV:
6462 case BUILT_IN_EXECLP:
6463 case BUILT_IN_EXECLE:
6464 case BUILT_IN_EXECVP:
6465 case BUILT_IN_EXECVE:
6466 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6467 if (target)
6468 return target;
6469 break;
6470
6471 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6474 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6475 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6477 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6485 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6486 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6488 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6489 if (target)
6490 return target;
6491 break;
6492
6493 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6494 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6496 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6497 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6499 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6505 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6507 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6508 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6510 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6518 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6519 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6521 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6529 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6530 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6532 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6540 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6541 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6543 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6544 if (target)
6545 return target;
6546 break;
6547
6548 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6551 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6552 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6554 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6560 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6562 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6563 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6571 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6573 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6574 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6584 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6585 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6595 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6596 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6599 if (target)
6600 return target;
6601 break;
6602
6603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6608 if (mode == VOIDmode)
6609 mode = TYPE_MODE (boolean_type_node);
6610 if (!target || !register_operand (target, mode))
6611 target = gen_reg_rtx (mode);
6612
6613 mode = get_builtin_sync_mode
6614 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6615 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6616 if (target)
6617 return target;
6618 break;
6619
6620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6625 mode = get_builtin_sync_mode
6626 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6627 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6628 if (target)
6629 return target;
6630 break;
6631
6632 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6635 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6638 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6639 if (target)
6640 return target;
6641 break;
6642
6643 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6644 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6646 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6647 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6649 expand_builtin_sync_lock_release (mode, exp);
6650 return const0_rtx;
6651
6652 case BUILT_IN_SYNC_SYNCHRONIZE:
6653 expand_builtin_sync_synchronize ();
6654 return const0_rtx;
6655
6656 case BUILT_IN_ATOMIC_EXCHANGE_1:
6657 case BUILT_IN_ATOMIC_EXCHANGE_2:
6658 case BUILT_IN_ATOMIC_EXCHANGE_4:
6659 case BUILT_IN_ATOMIC_EXCHANGE_8:
6660 case BUILT_IN_ATOMIC_EXCHANGE_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6662 target = expand_builtin_atomic_exchange (mode, exp, target);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6670 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6672 {
6673 unsigned int nargs, z;
6674 vec<tree, va_gc> *vec;
6675
6676 mode =
6677 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6678 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6679 if (target)
6680 return target;
6681
6682 /* If this is turned into an external library call, the weak parameter
6683 must be dropped to match the expected parameter list. */
6684 nargs = call_expr_nargs (exp);
6685 vec_alloc (vec, nargs - 1);
6686 for (z = 0; z < 3; z++)
6687 vec->quick_push (CALL_EXPR_ARG (exp, z));
6688 /* Skip the boolean weak parameter. */
6689 for (z = 4; z < 6; z++)
6690 vec->quick_push (CALL_EXPR_ARG (exp, z));
6691 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6692 break;
6693 }
6694
6695 case BUILT_IN_ATOMIC_LOAD_1:
6696 case BUILT_IN_ATOMIC_LOAD_2:
6697 case BUILT_IN_ATOMIC_LOAD_4:
6698 case BUILT_IN_ATOMIC_LOAD_8:
6699 case BUILT_IN_ATOMIC_LOAD_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6701 target = expand_builtin_atomic_load (mode, exp, target);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_ATOMIC_STORE_1:
6707 case BUILT_IN_ATOMIC_STORE_2:
6708 case BUILT_IN_ATOMIC_STORE_4:
6709 case BUILT_IN_ATOMIC_STORE_8:
6710 case BUILT_IN_ATOMIC_STORE_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6712 target = expand_builtin_atomic_store (mode, exp);
6713 if (target)
6714 return const0_rtx;
6715 break;
6716
6717 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6718 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6720 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6721 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6722 {
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6726 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6732 }
6733 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6734 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6736 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6737 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6738 {
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6742 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6748 }
6749 case BUILT_IN_ATOMIC_AND_FETCH_1:
6750 case BUILT_IN_ATOMIC_AND_FETCH_2:
6751 case BUILT_IN_ATOMIC_AND_FETCH_4:
6752 case BUILT_IN_ATOMIC_AND_FETCH_8:
6753 case BUILT_IN_ATOMIC_AND_FETCH_16:
6754 {
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6758 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6764 }
6765 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6766 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6768 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6769 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6770 {
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6774 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6780 }
6781 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6782 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6784 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6785 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6786 {
6787 enum built_in_function lib;
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6789 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6790 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6792 ignore, lib);
6793 if (target)
6794 return target;
6795 break;
6796 }
6797 case BUILT_IN_ATOMIC_OR_FETCH_1:
6798 case BUILT_IN_ATOMIC_OR_FETCH_2:
6799 case BUILT_IN_ATOMIC_OR_FETCH_4:
6800 case BUILT_IN_ATOMIC_OR_FETCH_8:
6801 case BUILT_IN_ATOMIC_OR_FETCH_16:
6802 {
6803 enum built_in_function lib;
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6805 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6806 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6808 ignore, lib);
6809 if (target)
6810 return target;
6811 break;
6812 }
6813 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6814 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6816 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6817 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6820 ignore, BUILT_IN_NONE);
6821 if (target)
6822 return target;
6823 break;
6824
6825 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6826 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6828 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6829 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6831 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6832 ignore, BUILT_IN_NONE);
6833 if (target)
6834 return target;
6835 break;
6836
6837 case BUILT_IN_ATOMIC_FETCH_AND_1:
6838 case BUILT_IN_ATOMIC_FETCH_AND_2:
6839 case BUILT_IN_ATOMIC_FETCH_AND_4:
6840 case BUILT_IN_ATOMIC_FETCH_AND_8:
6841 case BUILT_IN_ATOMIC_FETCH_AND_16:
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6843 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6844 ignore, BUILT_IN_NONE);
6845 if (target)
6846 return target;
6847 break;
6848
6849 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6850 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6852 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6853 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6855 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6856 ignore, BUILT_IN_NONE);
6857 if (target)
6858 return target;
6859 break;
6860
6861 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6862 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6864 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6865 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6867 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6868 ignore, BUILT_IN_NONE);
6869 if (target)
6870 return target;
6871 break;
6872
6873 case BUILT_IN_ATOMIC_FETCH_OR_1:
6874 case BUILT_IN_ATOMIC_FETCH_OR_2:
6875 case BUILT_IN_ATOMIC_FETCH_OR_4:
6876 case BUILT_IN_ATOMIC_FETCH_OR_8:
6877 case BUILT_IN_ATOMIC_FETCH_OR_16:
6878 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6879 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6880 ignore, BUILT_IN_NONE);
6881 if (target)
6882 return target;
6883 break;
6884
6885 case BUILT_IN_ATOMIC_TEST_AND_SET:
6886 return expand_builtin_atomic_test_and_set (exp, target);
6887
6888 case BUILT_IN_ATOMIC_CLEAR:
6889 return expand_builtin_atomic_clear (exp);
6890
6891 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6892 return expand_builtin_atomic_always_lock_free (exp);
6893
6894 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6895 target = expand_builtin_atomic_is_lock_free (exp);
6896 if (target)
6897 return target;
6898 break;
6899
6900 case BUILT_IN_ATOMIC_THREAD_FENCE:
6901 expand_builtin_atomic_thread_fence (exp);
6902 return const0_rtx;
6903
6904 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6905 expand_builtin_atomic_signal_fence (exp);
6906 return const0_rtx;
6907
6908 case BUILT_IN_OBJECT_SIZE:
6909 return expand_builtin_object_size (exp);
6910
6911 case BUILT_IN_MEMCPY_CHK:
6912 case BUILT_IN_MEMPCPY_CHK:
6913 case BUILT_IN_MEMMOVE_CHK:
6914 case BUILT_IN_MEMSET_CHK:
6915 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6916 if (target)
6917 return target;
6918 break;
6919
6920 case BUILT_IN_STRCPY_CHK:
6921 case BUILT_IN_STPCPY_CHK:
6922 case BUILT_IN_STRNCPY_CHK:
6923 case BUILT_IN_STPNCPY_CHK:
6924 case BUILT_IN_STRCAT_CHK:
6925 case BUILT_IN_STRNCAT_CHK:
6926 case BUILT_IN_SNPRINTF_CHK:
6927 case BUILT_IN_VSNPRINTF_CHK:
6928 maybe_emit_chk_warning (exp, fcode);
6929 break;
6930
6931 case BUILT_IN_SPRINTF_CHK:
6932 case BUILT_IN_VSPRINTF_CHK:
6933 maybe_emit_sprintf_chk_warning (exp, fcode);
6934 break;
6935
6936 case BUILT_IN_FREE:
6937 if (warn_free_nonheap_object)
6938 maybe_emit_free_warning (exp);
6939 break;
6940
6941 case BUILT_IN_THREAD_POINTER:
6942 return expand_builtin_thread_pointer (exp, target);
6943
6944 case BUILT_IN_SET_THREAD_POINTER:
6945 expand_builtin_set_thread_pointer (exp);
6946 return const0_rtx;
6947
6948 case BUILT_IN_CILK_DETACH:
6949 expand_builtin_cilk_detach (exp);
6950 return const0_rtx;
6951
6952 case BUILT_IN_CILK_POP_FRAME:
6953 expand_builtin_cilk_pop_frame (exp);
6954 return const0_rtx;
6955
6956 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6957 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6958 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6959 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6960 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6961 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6962 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6963 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6964 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6965 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6966 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6967 /* We allow user CHKP builtins if Pointer Bounds
6968 Checker is off. */
6969 if (!chkp_function_instrumented_p (current_function_decl))
6970 {
6971 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6972 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6973 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6974 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6975 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6976 return expand_normal (CALL_EXPR_ARG (exp, 0));
6977 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6978 return expand_normal (size_zero_node);
6979 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6980 return expand_normal (size_int (-1));
6981 else
6982 return const0_rtx;
6983 }
6984 /* FALLTHROUGH */
6985
6986 case BUILT_IN_CHKP_BNDMK:
6987 case BUILT_IN_CHKP_BNDSTX:
6988 case BUILT_IN_CHKP_BNDCL:
6989 case BUILT_IN_CHKP_BNDCU:
6990 case BUILT_IN_CHKP_BNDLDX:
6991 case BUILT_IN_CHKP_BNDRET:
6992 case BUILT_IN_CHKP_INTERSECT:
6993 case BUILT_IN_CHKP_NARROW:
6994 case BUILT_IN_CHKP_EXTRACT_LOWER:
6995 case BUILT_IN_CHKP_EXTRACT_UPPER:
6996 /* Software implementation of Pointer Bounds Checker is NYI.
6997 Target support is required. */
6998 error ("Your target platform does not support -fcheck-pointer-bounds");
6999 break;
7000
7001 case BUILT_IN_ACC_ON_DEVICE:
7002 /* Do library call, if we failed to expand the builtin when
7003 folding. */
7004 break;
7005
7006 default: /* just do library call, if unknown builtin */
7007 break;
7008 }
7009
7010 /* The switch statement above can drop through to cause the function
7011 to be called normally. */
7012 return expand_call (exp, target, ignore);
7013 }
7014
7015 /* Similar to expand_builtin but is used for instrumented calls. */
7016
7017 rtx
7018 expand_builtin_with_bounds (tree exp, rtx target,
7019 rtx subtarget ATTRIBUTE_UNUSED,
7020 machine_mode mode, int ignore)
7021 {
7022 tree fndecl = get_callee_fndecl (exp);
7023 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7024
7025 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7026
7027 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7028 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7029
7030 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7031 && fcode < END_CHKP_BUILTINS);
7032
7033 switch (fcode)
7034 {
7035 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7036 target = expand_builtin_memcpy_with_bounds (exp, target);
7037 if (target)
7038 return target;
7039 break;
7040
7041 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7042 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7043 if (target)
7044 return target;
7045 break;
7046
7047 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7048 target = expand_builtin_memset_with_bounds (exp, target, mode);
7049 if (target)
7050 return target;
7051 break;
7052
7053 default:
7054 break;
7055 }
7056
7057 /* The switch statement above can drop through to cause the function
7058 to be called normally. */
7059 return expand_call (exp, target, ignore);
7060 }
7061
7062 /* Determine whether a tree node represents a call to a built-in
7063 function. If the tree T is a call to a built-in function with
7064 the right number of arguments of the appropriate types, return
7065 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7066 Otherwise the return value is END_BUILTINS. */
7067
7068 enum built_in_function
7069 builtin_mathfn_code (const_tree t)
7070 {
7071 const_tree fndecl, arg, parmlist;
7072 const_tree argtype, parmtype;
7073 const_call_expr_arg_iterator iter;
7074
7075 if (TREE_CODE (t) != CALL_EXPR
7076 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7077 return END_BUILTINS;
7078
7079 fndecl = get_callee_fndecl (t);
7080 if (fndecl == NULL_TREE
7081 || TREE_CODE (fndecl) != FUNCTION_DECL
7082 || ! DECL_BUILT_IN (fndecl)
7083 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7084 return END_BUILTINS;
7085
7086 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7087 init_const_call_expr_arg_iterator (t, &iter);
7088 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7089 {
7090 /* If a function doesn't take a variable number of arguments,
7091 the last element in the list will have type `void'. */
7092 parmtype = TREE_VALUE (parmlist);
7093 if (VOID_TYPE_P (parmtype))
7094 {
7095 if (more_const_call_expr_args_p (&iter))
7096 return END_BUILTINS;
7097 return DECL_FUNCTION_CODE (fndecl);
7098 }
7099
7100 if (! more_const_call_expr_args_p (&iter))
7101 return END_BUILTINS;
7102
7103 arg = next_const_call_expr_arg (&iter);
7104 argtype = TREE_TYPE (arg);
7105
7106 if (SCALAR_FLOAT_TYPE_P (parmtype))
7107 {
7108 if (! SCALAR_FLOAT_TYPE_P (argtype))
7109 return END_BUILTINS;
7110 }
7111 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7112 {
7113 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7114 return END_BUILTINS;
7115 }
7116 else if (POINTER_TYPE_P (parmtype))
7117 {
7118 if (! POINTER_TYPE_P (argtype))
7119 return END_BUILTINS;
7120 }
7121 else if (INTEGRAL_TYPE_P (parmtype))
7122 {
7123 if (! INTEGRAL_TYPE_P (argtype))
7124 return END_BUILTINS;
7125 }
7126 else
7127 return END_BUILTINS;
7128 }
7129
7130 /* Variable-length argument list. */
7131 return DECL_FUNCTION_CODE (fndecl);
7132 }
7133
7134 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7135 evaluate to a constant. */
7136
7137 static tree
7138 fold_builtin_constant_p (tree arg)
7139 {
7140 /* We return 1 for a numeric type that's known to be a constant
7141 value at compile-time or for an aggregate type that's a
7142 literal constant. */
7143 STRIP_NOPS (arg);
7144
7145 /* If we know this is a constant, emit the constant of one. */
7146 if (CONSTANT_CLASS_P (arg)
7147 || (TREE_CODE (arg) == CONSTRUCTOR
7148 && TREE_CONSTANT (arg)))
7149 return integer_one_node;
7150 if (TREE_CODE (arg) == ADDR_EXPR)
7151 {
7152 tree op = TREE_OPERAND (arg, 0);
7153 if (TREE_CODE (op) == STRING_CST
7154 || (TREE_CODE (op) == ARRAY_REF
7155 && integer_zerop (TREE_OPERAND (op, 1))
7156 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7157 return integer_one_node;
7158 }
7159
7160 /* If this expression has side effects, show we don't know it to be a
7161 constant. Likewise if it's a pointer or aggregate type since in
7162 those case we only want literals, since those are only optimized
7163 when generating RTL, not later.
7164 And finally, if we are compiling an initializer, not code, we
7165 need to return a definite result now; there's not going to be any
7166 more optimization done. */
7167 if (TREE_SIDE_EFFECTS (arg)
7168 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7169 || POINTER_TYPE_P (TREE_TYPE (arg))
7170 || cfun == 0
7171 || folding_initializer
7172 || force_folding_builtin_constant_p)
7173 return integer_zero_node;
7174
7175 return NULL_TREE;
7176 }
7177
7178 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7179 return it as a truthvalue. */
7180
7181 static tree
7182 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7183 tree predictor)
7184 {
7185 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7186
7187 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7188 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7189 ret_type = TREE_TYPE (TREE_TYPE (fn));
7190 pred_type = TREE_VALUE (arg_types);
7191 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7192
7193 pred = fold_convert_loc (loc, pred_type, pred);
7194 expected = fold_convert_loc (loc, expected_type, expected);
7195 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7196 predictor);
7197
7198 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7199 build_int_cst (ret_type, 0));
7200 }
7201
7202 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7203 NULL_TREE if no simplification is possible. */
7204
7205 tree
7206 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7207 {
7208 tree inner, fndecl, inner_arg0;
7209 enum tree_code code;
7210
7211 /* Distribute the expected value over short-circuiting operators.
7212 See through the cast from truthvalue_type_node to long. */
7213 inner_arg0 = arg0;
7214 while (CONVERT_EXPR_P (inner_arg0)
7215 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7216 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7217 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7218
7219 /* If this is a builtin_expect within a builtin_expect keep the
7220 inner one. See through a comparison against a constant. It
7221 might have been added to create a thruthvalue. */
7222 inner = inner_arg0;
7223
7224 if (COMPARISON_CLASS_P (inner)
7225 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7226 inner = TREE_OPERAND (inner, 0);
7227
7228 if (TREE_CODE (inner) == CALL_EXPR
7229 && (fndecl = get_callee_fndecl (inner))
7230 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7231 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7232 return arg0;
7233
7234 inner = inner_arg0;
7235 code = TREE_CODE (inner);
7236 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7237 {
7238 tree op0 = TREE_OPERAND (inner, 0);
7239 tree op1 = TREE_OPERAND (inner, 1);
7240
7241 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7242 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7243 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7244
7245 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7246 }
7247
7248 /* If the argument isn't invariant then there's nothing else we can do. */
7249 if (!TREE_CONSTANT (inner_arg0))
7250 return NULL_TREE;
7251
7252 /* If we expect that a comparison against the argument will fold to
7253 a constant return the constant. In practice, this means a true
7254 constant or the address of a non-weak symbol. */
7255 inner = inner_arg0;
7256 STRIP_NOPS (inner);
7257 if (TREE_CODE (inner) == ADDR_EXPR)
7258 {
7259 do
7260 {
7261 inner = TREE_OPERAND (inner, 0);
7262 }
7263 while (TREE_CODE (inner) == COMPONENT_REF
7264 || TREE_CODE (inner) == ARRAY_REF);
7265 if ((TREE_CODE (inner) == VAR_DECL
7266 || TREE_CODE (inner) == FUNCTION_DECL)
7267 && DECL_WEAK (inner))
7268 return NULL_TREE;
7269 }
7270
7271 /* Otherwise, ARG0 already has the proper type for the return value. */
7272 return arg0;
7273 }
7274
7275 /* Fold a call to __builtin_classify_type with argument ARG. */
7276
7277 static tree
7278 fold_builtin_classify_type (tree arg)
7279 {
7280 if (arg == 0)
7281 return build_int_cst (integer_type_node, no_type_class);
7282
7283 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7284 }
7285
7286 /* Fold a call to __builtin_strlen with argument ARG. */
7287
7288 static tree
7289 fold_builtin_strlen (location_t loc, tree type, tree arg)
7290 {
7291 if (!validate_arg (arg, POINTER_TYPE))
7292 return NULL_TREE;
7293 else
7294 {
7295 tree len = c_strlen (arg, 0);
7296
7297 if (len)
7298 return fold_convert_loc (loc, type, len);
7299
7300 return NULL_TREE;
7301 }
7302 }
7303
7304 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7305
7306 static tree
7307 fold_builtin_inf (location_t loc, tree type, int warn)
7308 {
7309 REAL_VALUE_TYPE real;
7310
7311 /* __builtin_inff is intended to be usable to define INFINITY on all
7312 targets. If an infinity is not available, INFINITY expands "to a
7313 positive constant of type float that overflows at translation
7314 time", footnote "In this case, using INFINITY will violate the
7315 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7316 Thus we pedwarn to ensure this constraint violation is
7317 diagnosed. */
7318 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7319 pedwarn (loc, 0, "target format does not support infinity");
7320
7321 real_inf (&real);
7322 return build_real (type, real);
7323 }
7324
7325 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7326
7327 static tree
7328 fold_builtin_nan (tree arg, tree type, int quiet)
7329 {
7330 REAL_VALUE_TYPE real;
7331 const char *str;
7332
7333 if (!validate_arg (arg, POINTER_TYPE))
7334 return NULL_TREE;
7335 str = c_getstr (arg);
7336 if (!str)
7337 return NULL_TREE;
7338
7339 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7340 return NULL_TREE;
7341
7342 return build_real (type, real);
7343 }
7344
7345 /* Return true if the floating point expression T has an integer value.
7346 We also allow +Inf, -Inf and NaN to be considered integer values. */
7347
7348 static bool
7349 integer_valued_real_p (tree t)
7350 {
7351 switch (TREE_CODE (t))
7352 {
7353 case FLOAT_EXPR:
7354 return true;
7355
7356 case ABS_EXPR:
7357 case SAVE_EXPR:
7358 return integer_valued_real_p (TREE_OPERAND (t, 0));
7359
7360 case COMPOUND_EXPR:
7361 case MODIFY_EXPR:
7362 case BIND_EXPR:
7363 return integer_valued_real_p (TREE_OPERAND (t, 1));
7364
7365 case PLUS_EXPR:
7366 case MINUS_EXPR:
7367 case MULT_EXPR:
7368 case MIN_EXPR:
7369 case MAX_EXPR:
7370 return integer_valued_real_p (TREE_OPERAND (t, 0))
7371 && integer_valued_real_p (TREE_OPERAND (t, 1));
7372
7373 case COND_EXPR:
7374 return integer_valued_real_p (TREE_OPERAND (t, 1))
7375 && integer_valued_real_p (TREE_OPERAND (t, 2));
7376
7377 case REAL_CST:
7378 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7379
7380 CASE_CONVERT:
7381 {
7382 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7383 if (TREE_CODE (type) == INTEGER_TYPE)
7384 return true;
7385 if (TREE_CODE (type) == REAL_TYPE)
7386 return integer_valued_real_p (TREE_OPERAND (t, 0));
7387 break;
7388 }
7389
7390 case CALL_EXPR:
7391 switch (builtin_mathfn_code (t))
7392 {
7393 CASE_FLT_FN (BUILT_IN_CEIL):
7394 CASE_FLT_FN (BUILT_IN_FLOOR):
7395 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7396 CASE_FLT_FN (BUILT_IN_RINT):
7397 CASE_FLT_FN (BUILT_IN_ROUND):
7398 CASE_FLT_FN (BUILT_IN_TRUNC):
7399 return true;
7400
7401 CASE_FLT_FN (BUILT_IN_FMIN):
7402 CASE_FLT_FN (BUILT_IN_FMAX):
7403 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7404 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7405
7406 default:
7407 break;
7408 }
7409 break;
7410
7411 default:
7412 break;
7413 }
7414 return false;
7415 }
7416
7417 /* FNDECL is assumed to be a builtin where truncation can be propagated
7418 across (for instance floor((double)f) == (double)floorf (f).
7419 Do the transformation for a call with argument ARG. */
7420
7421 static tree
7422 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7423 {
7424 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7425
7426 if (!validate_arg (arg, REAL_TYPE))
7427 return NULL_TREE;
7428
7429 /* Integer rounding functions are idempotent. */
7430 if (fcode == builtin_mathfn_code (arg))
7431 return arg;
7432
7433 /* If argument is already integer valued, and we don't need to worry
7434 about setting errno, there's no need to perform rounding. */
7435 if (! flag_errno_math && integer_valued_real_p (arg))
7436 return arg;
7437
7438 if (optimize)
7439 {
7440 tree arg0 = strip_float_extensions (arg);
7441 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7442 tree newtype = TREE_TYPE (arg0);
7443 tree decl;
7444
7445 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7446 && (decl = mathfn_built_in (newtype, fcode)))
7447 return fold_convert_loc (loc, ftype,
7448 build_call_expr_loc (loc, decl, 1,
7449 fold_convert_loc (loc,
7450 newtype,
7451 arg0)));
7452 }
7453 return NULL_TREE;
7454 }
7455
7456 /* FNDECL is assumed to be builtin which can narrow the FP type of
7457 the argument, for instance lround((double)f) -> lroundf (f).
7458 Do the transformation for a call with argument ARG. */
7459
7460 static tree
7461 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7462 {
7463 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7464
7465 if (!validate_arg (arg, REAL_TYPE))
7466 return NULL_TREE;
7467
7468 /* If argument is already integer valued, and we don't need to worry
7469 about setting errno, there's no need to perform rounding. */
7470 if (! flag_errno_math && integer_valued_real_p (arg))
7471 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7472 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7473
7474 if (optimize)
7475 {
7476 tree ftype = TREE_TYPE (arg);
7477 tree arg0 = strip_float_extensions (arg);
7478 tree newtype = TREE_TYPE (arg0);
7479 tree decl;
7480
7481 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7482 && (decl = mathfn_built_in (newtype, fcode)))
7483 return build_call_expr_loc (loc, decl, 1,
7484 fold_convert_loc (loc, newtype, arg0));
7485 }
7486
7487 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7488 sizeof (int) == sizeof (long). */
7489 if (TYPE_PRECISION (integer_type_node)
7490 == TYPE_PRECISION (long_integer_type_node))
7491 {
7492 tree newfn = NULL_TREE;
7493 switch (fcode)
7494 {
7495 CASE_FLT_FN (BUILT_IN_ICEIL):
7496 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7497 break;
7498
7499 CASE_FLT_FN (BUILT_IN_IFLOOR):
7500 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7501 break;
7502
7503 CASE_FLT_FN (BUILT_IN_IROUND):
7504 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7505 break;
7506
7507 CASE_FLT_FN (BUILT_IN_IRINT):
7508 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7509 break;
7510
7511 default:
7512 break;
7513 }
7514
7515 if (newfn)
7516 {
7517 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7518 return fold_convert_loc (loc,
7519 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7520 }
7521 }
7522
7523 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7524 sizeof (long long) == sizeof (long). */
7525 if (TYPE_PRECISION (long_long_integer_type_node)
7526 == TYPE_PRECISION (long_integer_type_node))
7527 {
7528 tree newfn = NULL_TREE;
7529 switch (fcode)
7530 {
7531 CASE_FLT_FN (BUILT_IN_LLCEIL):
7532 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7533 break;
7534
7535 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7536 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7537 break;
7538
7539 CASE_FLT_FN (BUILT_IN_LLROUND):
7540 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7541 break;
7542
7543 CASE_FLT_FN (BUILT_IN_LLRINT):
7544 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7545 break;
7546
7547 default:
7548 break;
7549 }
7550
7551 if (newfn)
7552 {
7553 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7554 return fold_convert_loc (loc,
7555 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7556 }
7557 }
7558
7559 return NULL_TREE;
7560 }
7561
7562 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7563 return type. Return NULL_TREE if no simplification can be made. */
7564
7565 static tree
7566 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7567 {
7568 tree res;
7569
7570 if (!validate_arg (arg, COMPLEX_TYPE)
7571 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7572 return NULL_TREE;
7573
7574 /* Calculate the result when the argument is a constant. */
7575 if (TREE_CODE (arg) == COMPLEX_CST
7576 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7577 type, mpfr_hypot)))
7578 return res;
7579
7580 if (TREE_CODE (arg) == COMPLEX_EXPR)
7581 {
7582 tree real = TREE_OPERAND (arg, 0);
7583 tree imag = TREE_OPERAND (arg, 1);
7584
7585 /* If either part is zero, cabs is fabs of the other. */
7586 if (real_zerop (real))
7587 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7588 if (real_zerop (imag))
7589 return fold_build1_loc (loc, ABS_EXPR, type, real);
7590
7591 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7592 if (flag_unsafe_math_optimizations
7593 && operand_equal_p (real, imag, OEP_PURE_SAME))
7594 {
7595 const REAL_VALUE_TYPE sqrt2_trunc
7596 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7597 STRIP_NOPS (real);
7598 return fold_build2_loc (loc, MULT_EXPR, type,
7599 fold_build1_loc (loc, ABS_EXPR, type, real),
7600 build_real (type, sqrt2_trunc));
7601 }
7602 }
7603
7604 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7605 if (TREE_CODE (arg) == NEGATE_EXPR
7606 || TREE_CODE (arg) == CONJ_EXPR)
7607 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7608
7609 /* Don't do this when optimizing for size. */
7610 if (flag_unsafe_math_optimizations
7611 && optimize && optimize_function_for_speed_p (cfun))
7612 {
7613 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7614
7615 if (sqrtfn != NULL_TREE)
7616 {
7617 tree rpart, ipart, result;
7618
7619 arg = builtin_save_expr (arg);
7620
7621 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7622 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7623
7624 rpart = builtin_save_expr (rpart);
7625 ipart = builtin_save_expr (ipart);
7626
7627 result = fold_build2_loc (loc, PLUS_EXPR, type,
7628 fold_build2_loc (loc, MULT_EXPR, type,
7629 rpart, rpart),
7630 fold_build2_loc (loc, MULT_EXPR, type,
7631 ipart, ipart));
7632
7633 return build_call_expr_loc (loc, sqrtfn, 1, result);
7634 }
7635 }
7636
7637 return NULL_TREE;
7638 }
7639
7640 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7641 complex tree type of the result. If NEG is true, the imaginary
7642 zero is negative. */
7643
7644 static tree
7645 build_complex_cproj (tree type, bool neg)
7646 {
7647 REAL_VALUE_TYPE rinf, rzero = dconst0;
7648
7649 real_inf (&rinf);
7650 rzero.sign = neg;
7651 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7652 build_real (TREE_TYPE (type), rzero));
7653 }
7654
7655 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7656 return type. Return NULL_TREE if no simplification can be made. */
7657
7658 static tree
7659 fold_builtin_cproj (location_t loc, tree arg, tree type)
7660 {
7661 if (!validate_arg (arg, COMPLEX_TYPE)
7662 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7663 return NULL_TREE;
7664
7665 /* If there are no infinities, return arg. */
7666 if (! HONOR_INFINITIES (type))
7667 return non_lvalue_loc (loc, arg);
7668
7669 /* Calculate the result when the argument is a constant. */
7670 if (TREE_CODE (arg) == COMPLEX_CST)
7671 {
7672 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7673 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7674
7675 if (real_isinf (real) || real_isinf (imag))
7676 return build_complex_cproj (type, imag->sign);
7677 else
7678 return arg;
7679 }
7680 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7681 {
7682 tree real = TREE_OPERAND (arg, 0);
7683 tree imag = TREE_OPERAND (arg, 1);
7684
7685 STRIP_NOPS (real);
7686 STRIP_NOPS (imag);
7687
7688 /* If the real part is inf and the imag part is known to be
7689 nonnegative, return (inf + 0i). Remember side-effects are
7690 possible in the imag part. */
7691 if (TREE_CODE (real) == REAL_CST
7692 && real_isinf (TREE_REAL_CST_PTR (real))
7693 && tree_expr_nonnegative_p (imag))
7694 return omit_one_operand_loc (loc, type,
7695 build_complex_cproj (type, false),
7696 arg);
7697
7698 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7699 Remember side-effects are possible in the real part. */
7700 if (TREE_CODE (imag) == REAL_CST
7701 && real_isinf (TREE_REAL_CST_PTR (imag)))
7702 return
7703 omit_one_operand_loc (loc, type,
7704 build_complex_cproj (type, TREE_REAL_CST_PTR
7705 (imag)->sign), arg);
7706 }
7707
7708 return NULL_TREE;
7709 }
7710
7711 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7712 Return NULL_TREE if no simplification can be made. */
7713
7714 static tree
7715 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7716 {
7717
7718 enum built_in_function fcode;
7719 tree res;
7720
7721 if (!validate_arg (arg, REAL_TYPE))
7722 return NULL_TREE;
7723
7724 /* Calculate the result when the argument is a constant. */
7725 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7726 return res;
7727
7728 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7729 fcode = builtin_mathfn_code (arg);
7730 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7731 {
7732 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7733 arg = fold_build2_loc (loc, MULT_EXPR, type,
7734 CALL_EXPR_ARG (arg, 0),
7735 build_real (type, dconsthalf));
7736 return build_call_expr_loc (loc, expfn, 1, arg);
7737 }
7738
7739 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7740 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7741 {
7742 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7743
7744 if (powfn)
7745 {
7746 tree arg0 = CALL_EXPR_ARG (arg, 0);
7747 tree tree_root;
7748 /* The inner root was either sqrt or cbrt. */
7749 /* This was a conditional expression but it triggered a bug
7750 in Sun C 5.5. */
7751 REAL_VALUE_TYPE dconstroot;
7752 if (BUILTIN_SQRT_P (fcode))
7753 dconstroot = dconsthalf;
7754 else
7755 dconstroot = dconst_third ();
7756
7757 /* Adjust for the outer root. */
7758 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7759 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7760 tree_root = build_real (type, dconstroot);
7761 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7762 }
7763 }
7764
7765 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7766 if (flag_unsafe_math_optimizations
7767 && (fcode == BUILT_IN_POW
7768 || fcode == BUILT_IN_POWF
7769 || fcode == BUILT_IN_POWL))
7770 {
7771 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7772 tree arg0 = CALL_EXPR_ARG (arg, 0);
7773 tree arg1 = CALL_EXPR_ARG (arg, 1);
7774 tree narg1;
7775 if (!tree_expr_nonnegative_p (arg0))
7776 arg0 = build1 (ABS_EXPR, type, arg0);
7777 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7778 build_real (type, dconsthalf));
7779 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7780 }
7781
7782 return NULL_TREE;
7783 }
7784
7785 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7786 Return NULL_TREE if no simplification can be made. */
7787
7788 static tree
7789 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7790 {
7791 const enum built_in_function fcode = builtin_mathfn_code (arg);
7792 tree res;
7793
7794 if (!validate_arg (arg, REAL_TYPE))
7795 return NULL_TREE;
7796
7797 /* Calculate the result when the argument is a constant. */
7798 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7799 return res;
7800
7801 if (flag_unsafe_math_optimizations)
7802 {
7803 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7804 if (BUILTIN_EXPONENT_P (fcode))
7805 {
7806 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7807 const REAL_VALUE_TYPE third_trunc =
7808 real_value_truncate (TYPE_MODE (type), dconst_third ());
7809 arg = fold_build2_loc (loc, MULT_EXPR, type,
7810 CALL_EXPR_ARG (arg, 0),
7811 build_real (type, third_trunc));
7812 return build_call_expr_loc (loc, expfn, 1, arg);
7813 }
7814
7815 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7816 if (BUILTIN_SQRT_P (fcode))
7817 {
7818 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7819
7820 if (powfn)
7821 {
7822 tree arg0 = CALL_EXPR_ARG (arg, 0);
7823 tree tree_root;
7824 REAL_VALUE_TYPE dconstroot = dconst_third ();
7825
7826 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7827 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7828 tree_root = build_real (type, dconstroot);
7829 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7830 }
7831 }
7832
7833 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7834 if (BUILTIN_CBRT_P (fcode))
7835 {
7836 tree arg0 = CALL_EXPR_ARG (arg, 0);
7837 if (tree_expr_nonnegative_p (arg0))
7838 {
7839 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7840
7841 if (powfn)
7842 {
7843 tree tree_root;
7844 REAL_VALUE_TYPE dconstroot;
7845
7846 real_arithmetic (&dconstroot, MULT_EXPR,
7847 dconst_third_ptr (), dconst_third_ptr ());
7848 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7849 tree_root = build_real (type, dconstroot);
7850 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7851 }
7852 }
7853 }
7854
7855 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7856 if (fcode == BUILT_IN_POW
7857 || fcode == BUILT_IN_POWF
7858 || fcode == BUILT_IN_POWL)
7859 {
7860 tree arg00 = CALL_EXPR_ARG (arg, 0);
7861 tree arg01 = CALL_EXPR_ARG (arg, 1);
7862 if (tree_expr_nonnegative_p (arg00))
7863 {
7864 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7865 const REAL_VALUE_TYPE dconstroot
7866 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7867 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7868 build_real (type, dconstroot));
7869 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7870 }
7871 }
7872 }
7873 return NULL_TREE;
7874 }
7875
7876 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7877 TYPE is the type of the return value. Return NULL_TREE if no
7878 simplification can be made. */
7879
7880 static tree
7881 fold_builtin_cos (location_t loc,
7882 tree arg, tree type, tree fndecl)
7883 {
7884 tree res, narg;
7885
7886 if (!validate_arg (arg, REAL_TYPE))
7887 return NULL_TREE;
7888
7889 /* Calculate the result when the argument is a constant. */
7890 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7891 return res;
7892
7893 /* Optimize cos(-x) into cos (x). */
7894 if ((narg = fold_strip_sign_ops (arg)))
7895 return build_call_expr_loc (loc, fndecl, 1, narg);
7896
7897 return NULL_TREE;
7898 }
7899
7900 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7901 Return NULL_TREE if no simplification can be made. */
7902
7903 static tree
7904 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7905 {
7906 if (validate_arg (arg, REAL_TYPE))
7907 {
7908 tree res, narg;
7909
7910 /* Calculate the result when the argument is a constant. */
7911 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7912 return res;
7913
7914 /* Optimize cosh(-x) into cosh (x). */
7915 if ((narg = fold_strip_sign_ops (arg)))
7916 return build_call_expr_loc (loc, fndecl, 1, narg);
7917 }
7918
7919 return NULL_TREE;
7920 }
7921
7922 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7923 argument ARG. TYPE is the type of the return value. Return
7924 NULL_TREE if no simplification can be made. */
7925
7926 static tree
7927 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7928 bool hyper)
7929 {
7930 if (validate_arg (arg, COMPLEX_TYPE)
7931 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7932 {
7933 tree tmp;
7934
7935 /* Calculate the result when the argument is a constant. */
7936 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7937 return tmp;
7938
7939 /* Optimize fn(-x) into fn(x). */
7940 if ((tmp = fold_strip_sign_ops (arg)))
7941 return build_call_expr_loc (loc, fndecl, 1, tmp);
7942 }
7943
7944 return NULL_TREE;
7945 }
7946
7947 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7948 Return NULL_TREE if no simplification can be made. */
7949
7950 static tree
7951 fold_builtin_tan (tree arg, tree type)
7952 {
7953 enum built_in_function fcode;
7954 tree res;
7955
7956 if (!validate_arg (arg, REAL_TYPE))
7957 return NULL_TREE;
7958
7959 /* Calculate the result when the argument is a constant. */
7960 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7961 return res;
7962
7963 /* Optimize tan(atan(x)) = x. */
7964 fcode = builtin_mathfn_code (arg);
7965 if (flag_unsafe_math_optimizations
7966 && (fcode == BUILT_IN_ATAN
7967 || fcode == BUILT_IN_ATANF
7968 || fcode == BUILT_IN_ATANL))
7969 return CALL_EXPR_ARG (arg, 0);
7970
7971 return NULL_TREE;
7972 }
7973
7974 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7975 NULL_TREE if no simplification can be made. */
7976
7977 static tree
7978 fold_builtin_sincos (location_t loc,
7979 tree arg0, tree arg1, tree arg2)
7980 {
7981 tree type;
7982 tree res, fn, call;
7983
7984 if (!validate_arg (arg0, REAL_TYPE)
7985 || !validate_arg (arg1, POINTER_TYPE)
7986 || !validate_arg (arg2, POINTER_TYPE))
7987 return NULL_TREE;
7988
7989 type = TREE_TYPE (arg0);
7990
7991 /* Calculate the result when the argument is a constant. */
7992 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7993 return res;
7994
7995 /* Canonicalize sincos to cexpi. */
7996 if (!targetm.libc_has_function (function_c99_math_complex))
7997 return NULL_TREE;
7998 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7999 if (!fn)
8000 return NULL_TREE;
8001
8002 call = build_call_expr_loc (loc, fn, 1, arg0);
8003 call = builtin_save_expr (call);
8004
8005 return build2 (COMPOUND_EXPR, void_type_node,
8006 build2 (MODIFY_EXPR, void_type_node,
8007 build_fold_indirect_ref_loc (loc, arg1),
8008 build1 (IMAGPART_EXPR, type, call)),
8009 build2 (MODIFY_EXPR, void_type_node,
8010 build_fold_indirect_ref_loc (loc, arg2),
8011 build1 (REALPART_EXPR, type, call)));
8012 }
8013
8014 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8015 NULL_TREE if no simplification can be made. */
8016
8017 static tree
8018 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8019 {
8020 tree rtype;
8021 tree realp, imagp, ifn;
8022 tree res;
8023
8024 if (!validate_arg (arg0, COMPLEX_TYPE)
8025 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
8026 return NULL_TREE;
8027
8028 /* Calculate the result when the argument is a constant. */
8029 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8030 return res;
8031
8032 rtype = TREE_TYPE (TREE_TYPE (arg0));
8033
8034 /* In case we can figure out the real part of arg0 and it is constant zero
8035 fold to cexpi. */
8036 if (!targetm.libc_has_function (function_c99_math_complex))
8037 return NULL_TREE;
8038 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8039 if (!ifn)
8040 return NULL_TREE;
8041
8042 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8043 && real_zerop (realp))
8044 {
8045 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8046 return build_call_expr_loc (loc, ifn, 1, narg);
8047 }
8048
8049 /* In case we can easily decompose real and imaginary parts split cexp
8050 to exp (r) * cexpi (i). */
8051 if (flag_unsafe_math_optimizations
8052 && realp)
8053 {
8054 tree rfn, rcall, icall;
8055
8056 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8057 if (!rfn)
8058 return NULL_TREE;
8059
8060 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8061 if (!imagp)
8062 return NULL_TREE;
8063
8064 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8065 icall = builtin_save_expr (icall);
8066 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8067 rcall = builtin_save_expr (rcall);
8068 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8069 fold_build2_loc (loc, MULT_EXPR, rtype,
8070 rcall,
8071 fold_build1_loc (loc, REALPART_EXPR,
8072 rtype, icall)),
8073 fold_build2_loc (loc, MULT_EXPR, rtype,
8074 rcall,
8075 fold_build1_loc (loc, IMAGPART_EXPR,
8076 rtype, icall)));
8077 }
8078
8079 return NULL_TREE;
8080 }
8081
8082 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8083 Return NULL_TREE if no simplification can be made. */
8084
8085 static tree
8086 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8087 {
8088 if (!validate_arg (arg, REAL_TYPE))
8089 return NULL_TREE;
8090
8091 /* Optimize trunc of constant value. */
8092 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8093 {
8094 REAL_VALUE_TYPE r, x;
8095 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8096
8097 x = TREE_REAL_CST (arg);
8098 real_trunc (&r, TYPE_MODE (type), &x);
8099 return build_real (type, r);
8100 }
8101
8102 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8103 }
8104
8105 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8106 Return NULL_TREE if no simplification can be made. */
8107
8108 static tree
8109 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8110 {
8111 if (!validate_arg (arg, REAL_TYPE))
8112 return NULL_TREE;
8113
8114 /* Optimize floor of constant value. */
8115 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8116 {
8117 REAL_VALUE_TYPE x;
8118
8119 x = TREE_REAL_CST (arg);
8120 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8121 {
8122 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8123 REAL_VALUE_TYPE r;
8124
8125 real_floor (&r, TYPE_MODE (type), &x);
8126 return build_real (type, r);
8127 }
8128 }
8129
8130 /* Fold floor (x) where x is nonnegative to trunc (x). */
8131 if (tree_expr_nonnegative_p (arg))
8132 {
8133 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8134 if (truncfn)
8135 return build_call_expr_loc (loc, truncfn, 1, arg);
8136 }
8137
8138 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8139 }
8140
8141 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8142 Return NULL_TREE if no simplification can be made. */
8143
8144 static tree
8145 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8146 {
8147 if (!validate_arg (arg, REAL_TYPE))
8148 return NULL_TREE;
8149
8150 /* Optimize ceil of constant value. */
8151 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8152 {
8153 REAL_VALUE_TYPE x;
8154
8155 x = TREE_REAL_CST (arg);
8156 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8157 {
8158 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8159 REAL_VALUE_TYPE r;
8160
8161 real_ceil (&r, TYPE_MODE (type), &x);
8162 return build_real (type, r);
8163 }
8164 }
8165
8166 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8167 }
8168
8169 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8170 Return NULL_TREE if no simplification can be made. */
8171
8172 static tree
8173 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8174 {
8175 if (!validate_arg (arg, REAL_TYPE))
8176 return NULL_TREE;
8177
8178 /* Optimize round of constant value. */
8179 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8180 {
8181 REAL_VALUE_TYPE x;
8182
8183 x = TREE_REAL_CST (arg);
8184 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8185 {
8186 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8187 REAL_VALUE_TYPE r;
8188
8189 real_round (&r, TYPE_MODE (type), &x);
8190 return build_real (type, r);
8191 }
8192 }
8193
8194 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8195 }
8196
8197 /* Fold function call to builtin lround, lroundf or lroundl (or the
8198 corresponding long long versions) and other rounding functions. ARG
8199 is the argument to the call. Return NULL_TREE if no simplification
8200 can be made. */
8201
8202 static tree
8203 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8204 {
8205 if (!validate_arg (arg, REAL_TYPE))
8206 return NULL_TREE;
8207
8208 /* Optimize lround of constant value. */
8209 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8210 {
8211 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8212
8213 if (real_isfinite (&x))
8214 {
8215 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8216 tree ftype = TREE_TYPE (arg);
8217 REAL_VALUE_TYPE r;
8218 bool fail = false;
8219
8220 switch (DECL_FUNCTION_CODE (fndecl))
8221 {
8222 CASE_FLT_FN (BUILT_IN_IFLOOR):
8223 CASE_FLT_FN (BUILT_IN_LFLOOR):
8224 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8225 real_floor (&r, TYPE_MODE (ftype), &x);
8226 break;
8227
8228 CASE_FLT_FN (BUILT_IN_ICEIL):
8229 CASE_FLT_FN (BUILT_IN_LCEIL):
8230 CASE_FLT_FN (BUILT_IN_LLCEIL):
8231 real_ceil (&r, TYPE_MODE (ftype), &x);
8232 break;
8233
8234 CASE_FLT_FN (BUILT_IN_IROUND):
8235 CASE_FLT_FN (BUILT_IN_LROUND):
8236 CASE_FLT_FN (BUILT_IN_LLROUND):
8237 real_round (&r, TYPE_MODE (ftype), &x);
8238 break;
8239
8240 default:
8241 gcc_unreachable ();
8242 }
8243
8244 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8245 if (!fail)
8246 return wide_int_to_tree (itype, val);
8247 }
8248 }
8249
8250 switch (DECL_FUNCTION_CODE (fndecl))
8251 {
8252 CASE_FLT_FN (BUILT_IN_LFLOOR):
8253 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8254 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8255 if (tree_expr_nonnegative_p (arg))
8256 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8257 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8258 break;
8259 default:;
8260 }
8261
8262 return fold_fixed_mathfn (loc, fndecl, arg);
8263 }
8264
8265 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8266 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8267 the argument to the call. Return NULL_TREE if no simplification can
8268 be made. */
8269
8270 static tree
8271 fold_builtin_bitop (tree fndecl, tree arg)
8272 {
8273 if (!validate_arg (arg, INTEGER_TYPE))
8274 return NULL_TREE;
8275
8276 /* Optimize for constant argument. */
8277 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8278 {
8279 tree type = TREE_TYPE (arg);
8280 int result;
8281
8282 switch (DECL_FUNCTION_CODE (fndecl))
8283 {
8284 CASE_INT_FN (BUILT_IN_FFS):
8285 result = wi::ffs (arg);
8286 break;
8287
8288 CASE_INT_FN (BUILT_IN_CLZ):
8289 if (wi::ne_p (arg, 0))
8290 result = wi::clz (arg);
8291 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8292 result = TYPE_PRECISION (type);
8293 break;
8294
8295 CASE_INT_FN (BUILT_IN_CTZ):
8296 if (wi::ne_p (arg, 0))
8297 result = wi::ctz (arg);
8298 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8299 result = TYPE_PRECISION (type);
8300 break;
8301
8302 CASE_INT_FN (BUILT_IN_CLRSB):
8303 result = wi::clrsb (arg);
8304 break;
8305
8306 CASE_INT_FN (BUILT_IN_POPCOUNT):
8307 result = wi::popcount (arg);
8308 break;
8309
8310 CASE_INT_FN (BUILT_IN_PARITY):
8311 result = wi::parity (arg);
8312 break;
8313
8314 default:
8315 gcc_unreachable ();
8316 }
8317
8318 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8319 }
8320
8321 return NULL_TREE;
8322 }
8323
8324 /* Fold function call to builtin_bswap and the short, long and long long
8325 variants. Return NULL_TREE if no simplification can be made. */
8326 static tree
8327 fold_builtin_bswap (tree fndecl, tree arg)
8328 {
8329 if (! validate_arg (arg, INTEGER_TYPE))
8330 return NULL_TREE;
8331
8332 /* Optimize constant value. */
8333 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8334 {
8335 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8336
8337 switch (DECL_FUNCTION_CODE (fndecl))
8338 {
8339 case BUILT_IN_BSWAP16:
8340 case BUILT_IN_BSWAP32:
8341 case BUILT_IN_BSWAP64:
8342 {
8343 signop sgn = TYPE_SIGN (type);
8344 tree result =
8345 wide_int_to_tree (type,
8346 wide_int::from (arg, TYPE_PRECISION (type),
8347 sgn).bswap ());
8348 return result;
8349 }
8350 default:
8351 gcc_unreachable ();
8352 }
8353 }
8354
8355 return NULL_TREE;
8356 }
8357
8358 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8359 NULL_TREE if no simplification can be made. */
8360
8361 static tree
8362 fold_builtin_hypot (location_t loc, tree fndecl,
8363 tree arg0, tree arg1, tree type)
8364 {
8365 tree res, narg0, narg1;
8366
8367 if (!validate_arg (arg0, REAL_TYPE)
8368 || !validate_arg (arg1, REAL_TYPE))
8369 return NULL_TREE;
8370
8371 /* Calculate the result when the argument is a constant. */
8372 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8373 return res;
8374
8375 /* If either argument to hypot has a negate or abs, strip that off.
8376 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8377 narg0 = fold_strip_sign_ops (arg0);
8378 narg1 = fold_strip_sign_ops (arg1);
8379 if (narg0 || narg1)
8380 {
8381 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8382 narg1 ? narg1 : arg1);
8383 }
8384
8385 /* If either argument is zero, hypot is fabs of the other. */
8386 if (real_zerop (arg0))
8387 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8388 else if (real_zerop (arg1))
8389 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8390
8391 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8392 if (flag_unsafe_math_optimizations
8393 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8394 {
8395 const REAL_VALUE_TYPE sqrt2_trunc
8396 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8397 return fold_build2_loc (loc, MULT_EXPR, type,
8398 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8399 build_real (type, sqrt2_trunc));
8400 }
8401
8402 return NULL_TREE;
8403 }
8404
8405
8406 /* Fold a builtin function call to pow, powf, or powl. Return
8407 NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8410 {
8411 tree res;
8412
8413 if (!validate_arg (arg0, REAL_TYPE)
8414 || !validate_arg (arg1, REAL_TYPE))
8415 return NULL_TREE;
8416
8417 /* Calculate the result when the argument is a constant. */
8418 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8419 return res;
8420
8421 /* Optimize pow(1.0,y) = 1.0. */
8422 if (real_onep (arg0))
8423 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8424
8425 if (TREE_CODE (arg1) == REAL_CST
8426 && !TREE_OVERFLOW (arg1))
8427 {
8428 REAL_VALUE_TYPE cint;
8429 REAL_VALUE_TYPE c;
8430 HOST_WIDE_INT n;
8431
8432 c = TREE_REAL_CST (arg1);
8433
8434 /* Optimize pow(x,0.0) = 1.0. */
8435 if (real_equal (&c, &dconst0))
8436 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8437 arg0);
8438
8439 /* Optimize pow(x,1.0) = x. */
8440 if (real_equal (&c, &dconst1))
8441 return arg0;
8442
8443 /* Optimize pow(x,-1.0) = 1.0/x. */
8444 if (real_equal (&c, &dconstm1))
8445 return fold_build2_loc (loc, RDIV_EXPR, type,
8446 build_real (type, dconst1), arg0);
8447
8448 /* Optimize pow(x,0.5) = sqrt(x). */
8449 if (flag_unsafe_math_optimizations
8450 && real_equal (&c, &dconsthalf))
8451 {
8452 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8453
8454 if (sqrtfn != NULL_TREE)
8455 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8456 }
8457
8458 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8459 if (flag_unsafe_math_optimizations)
8460 {
8461 const REAL_VALUE_TYPE dconstroot
8462 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8463
8464 if (real_equal (&c, &dconstroot))
8465 {
8466 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8467 if (cbrtfn != NULL_TREE)
8468 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8469 }
8470 }
8471
8472 /* Check for an integer exponent. */
8473 n = real_to_integer (&c);
8474 real_from_integer (&cint, VOIDmode, n, SIGNED);
8475 if (real_identical (&c, &cint))
8476 {
8477 /* Attempt to evaluate pow at compile-time, unless this should
8478 raise an exception. */
8479 if (TREE_CODE (arg0) == REAL_CST
8480 && !TREE_OVERFLOW (arg0)
8481 && (n > 0
8482 || (!flag_trapping_math && !flag_errno_math)
8483 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
8484 {
8485 REAL_VALUE_TYPE x;
8486 bool inexact;
8487
8488 x = TREE_REAL_CST (arg0);
8489 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8490 if (flag_unsafe_math_optimizations || !inexact)
8491 return build_real (type, x);
8492 }
8493
8494 /* Strip sign ops from even integer powers. */
8495 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8496 {
8497 tree narg0 = fold_strip_sign_ops (arg0);
8498 if (narg0)
8499 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8500 }
8501 }
8502 }
8503
8504 if (flag_unsafe_math_optimizations)
8505 {
8506 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8507
8508 /* Optimize pow(expN(x),y) = expN(x*y). */
8509 if (BUILTIN_EXPONENT_P (fcode))
8510 {
8511 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8512 tree arg = CALL_EXPR_ARG (arg0, 0);
8513 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8514 return build_call_expr_loc (loc, expfn, 1, arg);
8515 }
8516
8517 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8518 if (BUILTIN_SQRT_P (fcode))
8519 {
8520 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8521 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8522 build_real (type, dconsthalf));
8523 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8524 }
8525
8526 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8527 if (BUILTIN_CBRT_P (fcode))
8528 {
8529 tree arg = CALL_EXPR_ARG (arg0, 0);
8530 if (tree_expr_nonnegative_p (arg))
8531 {
8532 const REAL_VALUE_TYPE dconstroot
8533 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8534 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8535 build_real (type, dconstroot));
8536 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8537 }
8538 }
8539
8540 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8541 if (fcode == BUILT_IN_POW
8542 || fcode == BUILT_IN_POWF
8543 || fcode == BUILT_IN_POWL)
8544 {
8545 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8546 if (tree_expr_nonnegative_p (arg00))
8547 {
8548 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8549 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8550 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8551 }
8552 }
8553 }
8554
8555 return NULL_TREE;
8556 }
8557
8558 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8559 Return NULL_TREE if no simplification can be made. */
8560 static tree
8561 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8562 tree arg0, tree arg1, tree type)
8563 {
8564 if (!validate_arg (arg0, REAL_TYPE)
8565 || !validate_arg (arg1, INTEGER_TYPE))
8566 return NULL_TREE;
8567
8568 /* Optimize pow(1.0,y) = 1.0. */
8569 if (real_onep (arg0))
8570 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8571
8572 if (tree_fits_shwi_p (arg1))
8573 {
8574 HOST_WIDE_INT c = tree_to_shwi (arg1);
8575
8576 /* Evaluate powi at compile-time. */
8577 if (TREE_CODE (arg0) == REAL_CST
8578 && !TREE_OVERFLOW (arg0))
8579 {
8580 REAL_VALUE_TYPE x;
8581 x = TREE_REAL_CST (arg0);
8582 real_powi (&x, TYPE_MODE (type), &x, c);
8583 return build_real (type, x);
8584 }
8585
8586 /* Optimize pow(x,0) = 1.0. */
8587 if (c == 0)
8588 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8589 arg0);
8590
8591 /* Optimize pow(x,1) = x. */
8592 if (c == 1)
8593 return arg0;
8594
8595 /* Optimize pow(x,-1) = 1.0/x. */
8596 if (c == -1)
8597 return fold_build2_loc (loc, RDIV_EXPR, type,
8598 build_real (type, dconst1), arg0);
8599 }
8600
8601 return NULL_TREE;
8602 }
8603
8604 /* A subroutine of fold_builtin to fold the various exponent
8605 functions. Return NULL_TREE if no simplification can be made.
8606 FUNC is the corresponding MPFR exponent function. */
8607
8608 static tree
8609 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8610 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8611 {
8612 if (validate_arg (arg, REAL_TYPE))
8613 {
8614 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8615 tree res;
8616
8617 /* Calculate the result when the argument is a constant. */
8618 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8619 return res;
8620
8621 /* Optimize expN(logN(x)) = x. */
8622 if (flag_unsafe_math_optimizations)
8623 {
8624 const enum built_in_function fcode = builtin_mathfn_code (arg);
8625
8626 if ((func == mpfr_exp
8627 && (fcode == BUILT_IN_LOG
8628 || fcode == BUILT_IN_LOGF
8629 || fcode == BUILT_IN_LOGL))
8630 || (func == mpfr_exp2
8631 && (fcode == BUILT_IN_LOG2
8632 || fcode == BUILT_IN_LOG2F
8633 || fcode == BUILT_IN_LOG2L))
8634 || (func == mpfr_exp10
8635 && (fcode == BUILT_IN_LOG10
8636 || fcode == BUILT_IN_LOG10F
8637 || fcode == BUILT_IN_LOG10L)))
8638 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8639 }
8640 }
8641
8642 return NULL_TREE;
8643 }
8644
8645 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8646 arguments to the call, and TYPE is its return type.
8647 Return NULL_TREE if no simplification can be made. */
8648
8649 static tree
8650 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8651 {
8652 if (!validate_arg (arg1, POINTER_TYPE)
8653 || !validate_arg (arg2, INTEGER_TYPE)
8654 || !validate_arg (len, INTEGER_TYPE))
8655 return NULL_TREE;
8656 else
8657 {
8658 const char *p1;
8659
8660 if (TREE_CODE (arg2) != INTEGER_CST
8661 || !tree_fits_uhwi_p (len))
8662 return NULL_TREE;
8663
8664 p1 = c_getstr (arg1);
8665 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8666 {
8667 char c;
8668 const char *r;
8669 tree tem;
8670
8671 if (target_char_cast (arg2, &c))
8672 return NULL_TREE;
8673
8674 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8675
8676 if (r == NULL)
8677 return build_int_cst (TREE_TYPE (arg1), 0);
8678
8679 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8680 return fold_convert_loc (loc, type, tem);
8681 }
8682 return NULL_TREE;
8683 }
8684 }
8685
8686 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8687 Return NULL_TREE if no simplification can be made. */
8688
8689 static tree
8690 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8691 {
8692 const char *p1, *p2;
8693
8694 if (!validate_arg (arg1, POINTER_TYPE)
8695 || !validate_arg (arg2, POINTER_TYPE)
8696 || !validate_arg (len, INTEGER_TYPE))
8697 return NULL_TREE;
8698
8699 /* If the LEN parameter is zero, return zero. */
8700 if (integer_zerop (len))
8701 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8702 arg1, arg2);
8703
8704 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8705 if (operand_equal_p (arg1, arg2, 0))
8706 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8707
8708 p1 = c_getstr (arg1);
8709 p2 = c_getstr (arg2);
8710
8711 /* If all arguments are constant, and the value of len is not greater
8712 than the lengths of arg1 and arg2, evaluate at compile-time. */
8713 if (tree_fits_uhwi_p (len) && p1 && p2
8714 && compare_tree_int (len, strlen (p1) + 1) <= 0
8715 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8716 {
8717 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8718
8719 if (r > 0)
8720 return integer_one_node;
8721 else if (r < 0)
8722 return integer_minus_one_node;
8723 else
8724 return integer_zero_node;
8725 }
8726
8727 /* If len parameter is one, return an expression corresponding to
8728 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8729 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8730 {
8731 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8732 tree cst_uchar_ptr_node
8733 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8734
8735 tree ind1
8736 = fold_convert_loc (loc, integer_type_node,
8737 build1 (INDIRECT_REF, cst_uchar_node,
8738 fold_convert_loc (loc,
8739 cst_uchar_ptr_node,
8740 arg1)));
8741 tree ind2
8742 = fold_convert_loc (loc, integer_type_node,
8743 build1 (INDIRECT_REF, cst_uchar_node,
8744 fold_convert_loc (loc,
8745 cst_uchar_ptr_node,
8746 arg2)));
8747 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8748 }
8749
8750 return NULL_TREE;
8751 }
8752
8753 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8754 Return NULL_TREE if no simplification can be made. */
8755
8756 static tree
8757 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8758 {
8759 const char *p1, *p2;
8760
8761 if (!validate_arg (arg1, POINTER_TYPE)
8762 || !validate_arg (arg2, POINTER_TYPE))
8763 return NULL_TREE;
8764
8765 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8766 if (operand_equal_p (arg1, arg2, 0))
8767 return integer_zero_node;
8768
8769 p1 = c_getstr (arg1);
8770 p2 = c_getstr (arg2);
8771
8772 if (p1 && p2)
8773 {
8774 const int i = strcmp (p1, p2);
8775 if (i < 0)
8776 return integer_minus_one_node;
8777 else if (i > 0)
8778 return integer_one_node;
8779 else
8780 return integer_zero_node;
8781 }
8782
8783 /* If the second arg is "", return *(const unsigned char*)arg1. */
8784 if (p2 && *p2 == '\0')
8785 {
8786 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8787 tree cst_uchar_ptr_node
8788 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8789
8790 return fold_convert_loc (loc, integer_type_node,
8791 build1 (INDIRECT_REF, cst_uchar_node,
8792 fold_convert_loc (loc,
8793 cst_uchar_ptr_node,
8794 arg1)));
8795 }
8796
8797 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8798 if (p1 && *p1 == '\0')
8799 {
8800 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8801 tree cst_uchar_ptr_node
8802 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8803
8804 tree temp
8805 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8808 cst_uchar_ptr_node,
8809 arg2)));
8810 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8811 }
8812
8813 return NULL_TREE;
8814 }
8815
8816 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8817 Return NULL_TREE if no simplification can be made. */
8818
8819 static tree
8820 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8821 {
8822 const char *p1, *p2;
8823
8824 if (!validate_arg (arg1, POINTER_TYPE)
8825 || !validate_arg (arg2, POINTER_TYPE)
8826 || !validate_arg (len, INTEGER_TYPE))
8827 return NULL_TREE;
8828
8829 /* If the LEN parameter is zero, return zero. */
8830 if (integer_zerop (len))
8831 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8832 arg1, arg2);
8833
8834 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8835 if (operand_equal_p (arg1, arg2, 0))
8836 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8837
8838 p1 = c_getstr (arg1);
8839 p2 = c_getstr (arg2);
8840
8841 if (tree_fits_uhwi_p (len) && p1 && p2)
8842 {
8843 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8844 if (i > 0)
8845 return integer_one_node;
8846 else if (i < 0)
8847 return integer_minus_one_node;
8848 else
8849 return integer_zero_node;
8850 }
8851
8852 /* If the second arg is "", and the length is greater than zero,
8853 return *(const unsigned char*)arg1. */
8854 if (p2 && *p2 == '\0'
8855 && TREE_CODE (len) == INTEGER_CST
8856 && tree_int_cst_sgn (len) == 1)
8857 {
8858 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8859 tree cst_uchar_ptr_node
8860 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8861
8862 return fold_convert_loc (loc, integer_type_node,
8863 build1 (INDIRECT_REF, cst_uchar_node,
8864 fold_convert_loc (loc,
8865 cst_uchar_ptr_node,
8866 arg1)));
8867 }
8868
8869 /* If the first arg is "", and the length is greater than zero,
8870 return -*(const unsigned char*)arg2. */
8871 if (p1 && *p1 == '\0'
8872 && TREE_CODE (len) == INTEGER_CST
8873 && tree_int_cst_sgn (len) == 1)
8874 {
8875 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8876 tree cst_uchar_ptr_node
8877 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8878
8879 tree temp = fold_convert_loc (loc, integer_type_node,
8880 build1 (INDIRECT_REF, cst_uchar_node,
8881 fold_convert_loc (loc,
8882 cst_uchar_ptr_node,
8883 arg2)));
8884 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8885 }
8886
8887 /* If len parameter is one, return an expression corresponding to
8888 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8889 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8890 {
8891 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8892 tree cst_uchar_ptr_node
8893 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8894
8895 tree ind1 = fold_convert_loc (loc, integer_type_node,
8896 build1 (INDIRECT_REF, cst_uchar_node,
8897 fold_convert_loc (loc,
8898 cst_uchar_ptr_node,
8899 arg1)));
8900 tree ind2 = fold_convert_loc (loc, integer_type_node,
8901 build1 (INDIRECT_REF, cst_uchar_node,
8902 fold_convert_loc (loc,
8903 cst_uchar_ptr_node,
8904 arg2)));
8905 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8906 }
8907
8908 return NULL_TREE;
8909 }
8910
8911 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8912 ARG. Return NULL_TREE if no simplification can be made. */
8913
8914 static tree
8915 fold_builtin_signbit (location_t loc, tree arg, tree type)
8916 {
8917 if (!validate_arg (arg, REAL_TYPE))
8918 return NULL_TREE;
8919
8920 /* If ARG is a compile-time constant, determine the result. */
8921 if (TREE_CODE (arg) == REAL_CST
8922 && !TREE_OVERFLOW (arg))
8923 {
8924 REAL_VALUE_TYPE c;
8925
8926 c = TREE_REAL_CST (arg);
8927 return (REAL_VALUE_NEGATIVE (c)
8928 ? build_one_cst (type)
8929 : build_zero_cst (type));
8930 }
8931
8932 /* If ARG is non-negative, the result is always zero. */
8933 if (tree_expr_nonnegative_p (arg))
8934 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8935
8936 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8937 if (!HONOR_SIGNED_ZEROS (arg))
8938 return fold_convert (type,
8939 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8940 build_real (TREE_TYPE (arg), dconst0)));
8941
8942 return NULL_TREE;
8943 }
8944
8945 /* Fold function call to builtin copysign, copysignf or copysignl with
8946 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8947 be made. */
8948
8949 static tree
8950 fold_builtin_copysign (location_t loc, tree fndecl,
8951 tree arg1, tree arg2, tree type)
8952 {
8953 tree tem;
8954
8955 if (!validate_arg (arg1, REAL_TYPE)
8956 || !validate_arg (arg2, REAL_TYPE))
8957 return NULL_TREE;
8958
8959 /* copysign(X,X) is X. */
8960 if (operand_equal_p (arg1, arg2, 0))
8961 return fold_convert_loc (loc, type, arg1);
8962
8963 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8964 if (TREE_CODE (arg1) == REAL_CST
8965 && TREE_CODE (arg2) == REAL_CST
8966 && !TREE_OVERFLOW (arg1)
8967 && !TREE_OVERFLOW (arg2))
8968 {
8969 REAL_VALUE_TYPE c1, c2;
8970
8971 c1 = TREE_REAL_CST (arg1);
8972 c2 = TREE_REAL_CST (arg2);
8973 /* c1.sign := c2.sign. */
8974 real_copysign (&c1, &c2);
8975 return build_real (type, c1);
8976 }
8977
8978 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8979 Remember to evaluate Y for side-effects. */
8980 if (tree_expr_nonnegative_p (arg2))
8981 return omit_one_operand_loc (loc, type,
8982 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8983 arg2);
8984
8985 /* Strip sign changing operations for the first argument. */
8986 tem = fold_strip_sign_ops (arg1);
8987 if (tem)
8988 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8989
8990 return NULL_TREE;
8991 }
8992
8993 /* Fold a call to builtin isascii with argument ARG. */
8994
8995 static tree
8996 fold_builtin_isascii (location_t loc, tree arg)
8997 {
8998 if (!validate_arg (arg, INTEGER_TYPE))
8999 return NULL_TREE;
9000 else
9001 {
9002 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9003 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9004 build_int_cst (integer_type_node,
9005 ~ (unsigned HOST_WIDE_INT) 0x7f));
9006 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9007 arg, integer_zero_node);
9008 }
9009 }
9010
9011 /* Fold a call to builtin toascii with argument ARG. */
9012
9013 static tree
9014 fold_builtin_toascii (location_t loc, tree arg)
9015 {
9016 if (!validate_arg (arg, INTEGER_TYPE))
9017 return NULL_TREE;
9018
9019 /* Transform toascii(c) -> (c & 0x7f). */
9020 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9021 build_int_cst (integer_type_node, 0x7f));
9022 }
9023
9024 /* Fold a call to builtin isdigit with argument ARG. */
9025
9026 static tree
9027 fold_builtin_isdigit (location_t loc, tree arg)
9028 {
9029 if (!validate_arg (arg, INTEGER_TYPE))
9030 return NULL_TREE;
9031 else
9032 {
9033 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9034 /* According to the C standard, isdigit is unaffected by locale.
9035 However, it definitely is affected by the target character set. */
9036 unsigned HOST_WIDE_INT target_digit0
9037 = lang_hooks.to_target_charset ('0');
9038
9039 if (target_digit0 == 0)
9040 return NULL_TREE;
9041
9042 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9043 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9044 build_int_cst (unsigned_type_node, target_digit0));
9045 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9046 build_int_cst (unsigned_type_node, 9));
9047 }
9048 }
9049
9050 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9051
9052 static tree
9053 fold_builtin_fabs (location_t loc, tree arg, tree type)
9054 {
9055 if (!validate_arg (arg, REAL_TYPE))
9056 return NULL_TREE;
9057
9058 arg = fold_convert_loc (loc, type, arg);
9059 if (TREE_CODE (arg) == REAL_CST)
9060 return fold_abs_const (arg, type);
9061 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9062 }
9063
9064 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9065
9066 static tree
9067 fold_builtin_abs (location_t loc, tree arg, tree type)
9068 {
9069 if (!validate_arg (arg, INTEGER_TYPE))
9070 return NULL_TREE;
9071
9072 arg = fold_convert_loc (loc, type, arg);
9073 if (TREE_CODE (arg) == INTEGER_CST)
9074 return fold_abs_const (arg, type);
9075 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9076 }
9077
9078 /* Fold a fma operation with arguments ARG[012]. */
9079
9080 tree
9081 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9082 tree type, tree arg0, tree arg1, tree arg2)
9083 {
9084 if (TREE_CODE (arg0) == REAL_CST
9085 && TREE_CODE (arg1) == REAL_CST
9086 && TREE_CODE (arg2) == REAL_CST)
9087 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9088
9089 return NULL_TREE;
9090 }
9091
9092 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9093
9094 static tree
9095 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9096 {
9097 if (validate_arg (arg0, REAL_TYPE)
9098 && validate_arg (arg1, REAL_TYPE)
9099 && validate_arg (arg2, REAL_TYPE))
9100 {
9101 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9102 if (tem)
9103 return tem;
9104
9105 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9106 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9107 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9108 }
9109 return NULL_TREE;
9110 }
9111
9112 /* Fold a call to builtin fmin or fmax. */
9113
9114 static tree
9115 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9116 tree type, bool max)
9117 {
9118 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9119 {
9120 /* Calculate the result when the argument is a constant. */
9121 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9122
9123 if (res)
9124 return res;
9125
9126 /* If either argument is NaN, return the other one. Avoid the
9127 transformation if we get (and honor) a signalling NaN. Using
9128 omit_one_operand() ensures we create a non-lvalue. */
9129 if (TREE_CODE (arg0) == REAL_CST
9130 && real_isnan (&TREE_REAL_CST (arg0))
9131 && (! HONOR_SNANS (arg0)
9132 || ! TREE_REAL_CST (arg0).signalling))
9133 return omit_one_operand_loc (loc, type, arg1, arg0);
9134 if (TREE_CODE (arg1) == REAL_CST
9135 && real_isnan (&TREE_REAL_CST (arg1))
9136 && (! HONOR_SNANS (arg1)
9137 || ! TREE_REAL_CST (arg1).signalling))
9138 return omit_one_operand_loc (loc, type, arg0, arg1);
9139
9140 /* Transform fmin/fmax(x,x) -> x. */
9141 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9142 return omit_one_operand_loc (loc, type, arg0, arg1);
9143
9144 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9145 functions to return the numeric arg if the other one is NaN.
9146 These tree codes don't honor that, so only transform if
9147 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9148 handled, so we don't have to worry about it either. */
9149 if (flag_finite_math_only)
9150 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9151 fold_convert_loc (loc, type, arg0),
9152 fold_convert_loc (loc, type, arg1));
9153 }
9154 return NULL_TREE;
9155 }
9156
9157 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9158
9159 static tree
9160 fold_builtin_carg (location_t loc, tree arg, tree type)
9161 {
9162 if (validate_arg (arg, COMPLEX_TYPE)
9163 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9164 {
9165 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9166
9167 if (atan2_fn)
9168 {
9169 tree new_arg = builtin_save_expr (arg);
9170 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9171 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9172 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9173 }
9174 }
9175
9176 return NULL_TREE;
9177 }
9178
9179 /* Fold a call to builtin logb/ilogb. */
9180
9181 static tree
9182 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9183 {
9184 if (! validate_arg (arg, REAL_TYPE))
9185 return NULL_TREE;
9186
9187 STRIP_NOPS (arg);
9188
9189 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9190 {
9191 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9192
9193 switch (value->cl)
9194 {
9195 case rvc_nan:
9196 case rvc_inf:
9197 /* If arg is Inf or NaN and we're logb, return it. */
9198 if (TREE_CODE (rettype) == REAL_TYPE)
9199 {
9200 /* For logb(-Inf) we have to return +Inf. */
9201 if (real_isinf (value) && real_isneg (value))
9202 {
9203 REAL_VALUE_TYPE tem;
9204 real_inf (&tem);
9205 return build_real (rettype, tem);
9206 }
9207 return fold_convert_loc (loc, rettype, arg);
9208 }
9209 /* Fall through... */
9210 case rvc_zero:
9211 /* Zero may set errno and/or raise an exception for logb, also
9212 for ilogb we don't know FP_ILOGB0. */
9213 return NULL_TREE;
9214 case rvc_normal:
9215 /* For normal numbers, proceed iff radix == 2. In GCC,
9216 normalized significands are in the range [0.5, 1.0). We
9217 want the exponent as if they were [1.0, 2.0) so get the
9218 exponent and subtract 1. */
9219 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9220 return fold_convert_loc (loc, rettype,
9221 build_int_cst (integer_type_node,
9222 REAL_EXP (value)-1));
9223 break;
9224 }
9225 }
9226
9227 return NULL_TREE;
9228 }
9229
9230 /* Fold a call to builtin significand, if radix == 2. */
9231
9232 static tree
9233 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9234 {
9235 if (! validate_arg (arg, REAL_TYPE))
9236 return NULL_TREE;
9237
9238 STRIP_NOPS (arg);
9239
9240 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9241 {
9242 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9243
9244 switch (value->cl)
9245 {
9246 case rvc_zero:
9247 case rvc_nan:
9248 case rvc_inf:
9249 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9250 return fold_convert_loc (loc, rettype, arg);
9251 case rvc_normal:
9252 /* For normal numbers, proceed iff radix == 2. */
9253 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9254 {
9255 REAL_VALUE_TYPE result = *value;
9256 /* In GCC, normalized significands are in the range [0.5,
9257 1.0). We want them to be [1.0, 2.0) so set the
9258 exponent to 1. */
9259 SET_REAL_EXP (&result, 1);
9260 return build_real (rettype, result);
9261 }
9262 break;
9263 }
9264 }
9265
9266 return NULL_TREE;
9267 }
9268
9269 /* Fold a call to builtin frexp, we can assume the base is 2. */
9270
9271 static tree
9272 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9273 {
9274 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9275 return NULL_TREE;
9276
9277 STRIP_NOPS (arg0);
9278
9279 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9280 return NULL_TREE;
9281
9282 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9283
9284 /* Proceed if a valid pointer type was passed in. */
9285 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9286 {
9287 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9288 tree frac, exp;
9289
9290 switch (value->cl)
9291 {
9292 case rvc_zero:
9293 /* For +-0, return (*exp = 0, +-0). */
9294 exp = integer_zero_node;
9295 frac = arg0;
9296 break;
9297 case rvc_nan:
9298 case rvc_inf:
9299 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9300 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9301 case rvc_normal:
9302 {
9303 /* Since the frexp function always expects base 2, and in
9304 GCC normalized significands are already in the range
9305 [0.5, 1.0), we have exactly what frexp wants. */
9306 REAL_VALUE_TYPE frac_rvt = *value;
9307 SET_REAL_EXP (&frac_rvt, 0);
9308 frac = build_real (rettype, frac_rvt);
9309 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9310 }
9311 break;
9312 default:
9313 gcc_unreachable ();
9314 }
9315
9316 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9317 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9318 TREE_SIDE_EFFECTS (arg1) = 1;
9319 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9320 }
9321
9322 return NULL_TREE;
9323 }
9324
9325 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9326 then we can assume the base is two. If it's false, then we have to
9327 check the mode of the TYPE parameter in certain cases. */
9328
9329 static tree
9330 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9331 tree type, bool ldexp)
9332 {
9333 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9334 {
9335 STRIP_NOPS (arg0);
9336 STRIP_NOPS (arg1);
9337
9338 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9339 if (real_zerop (arg0) || integer_zerop (arg1)
9340 || (TREE_CODE (arg0) == REAL_CST
9341 && !real_isfinite (&TREE_REAL_CST (arg0))))
9342 return omit_one_operand_loc (loc, type, arg0, arg1);
9343
9344 /* If both arguments are constant, then try to evaluate it. */
9345 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9346 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9347 && tree_fits_shwi_p (arg1))
9348 {
9349 /* Bound the maximum adjustment to twice the range of the
9350 mode's valid exponents. Use abs to ensure the range is
9351 positive as a sanity check. */
9352 const long max_exp_adj = 2 *
9353 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9354 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9355
9356 /* Get the user-requested adjustment. */
9357 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9358
9359 /* The requested adjustment must be inside this range. This
9360 is a preliminary cap to avoid things like overflow, we
9361 may still fail to compute the result for other reasons. */
9362 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9363 {
9364 REAL_VALUE_TYPE initial_result;
9365
9366 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9367
9368 /* Ensure we didn't overflow. */
9369 if (! real_isinf (&initial_result))
9370 {
9371 const REAL_VALUE_TYPE trunc_result
9372 = real_value_truncate (TYPE_MODE (type), initial_result);
9373
9374 /* Only proceed if the target mode can hold the
9375 resulting value. */
9376 if (real_equal (&initial_result, &trunc_result))
9377 return build_real (type, trunc_result);
9378 }
9379 }
9380 }
9381 }
9382
9383 return NULL_TREE;
9384 }
9385
9386 /* Fold a call to builtin modf. */
9387
9388 static tree
9389 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9390 {
9391 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9392 return NULL_TREE;
9393
9394 STRIP_NOPS (arg0);
9395
9396 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9397 return NULL_TREE;
9398
9399 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9400
9401 /* Proceed if a valid pointer type was passed in. */
9402 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9403 {
9404 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9405 REAL_VALUE_TYPE trunc, frac;
9406
9407 switch (value->cl)
9408 {
9409 case rvc_nan:
9410 case rvc_zero:
9411 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9412 trunc = frac = *value;
9413 break;
9414 case rvc_inf:
9415 /* For +-Inf, return (*arg1 = arg0, +-0). */
9416 frac = dconst0;
9417 frac.sign = value->sign;
9418 trunc = *value;
9419 break;
9420 case rvc_normal:
9421 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9422 real_trunc (&trunc, VOIDmode, value);
9423 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9424 /* If the original number was negative and already
9425 integral, then the fractional part is -0.0. */
9426 if (value->sign && frac.cl == rvc_zero)
9427 frac.sign = value->sign;
9428 break;
9429 }
9430
9431 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9432 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9433 build_real (rettype, trunc));
9434 TREE_SIDE_EFFECTS (arg1) = 1;
9435 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9436 build_real (rettype, frac));
9437 }
9438
9439 return NULL_TREE;
9440 }
9441
9442 /* Given a location LOC, an interclass builtin function decl FNDECL
9443 and its single argument ARG, return an folded expression computing
9444 the same, or NULL_TREE if we either couldn't or didn't want to fold
9445 (the latter happen if there's an RTL instruction available). */
9446
9447 static tree
9448 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9449 {
9450 machine_mode mode;
9451
9452 if (!validate_arg (arg, REAL_TYPE))
9453 return NULL_TREE;
9454
9455 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9456 return NULL_TREE;
9457
9458 mode = TYPE_MODE (TREE_TYPE (arg));
9459
9460 /* If there is no optab, try generic code. */
9461 switch (DECL_FUNCTION_CODE (fndecl))
9462 {
9463 tree result;
9464
9465 CASE_FLT_FN (BUILT_IN_ISINF):
9466 {
9467 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9468 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9469 tree const type = TREE_TYPE (arg);
9470 REAL_VALUE_TYPE r;
9471 char buf[128];
9472
9473 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9474 real_from_string (&r, buf);
9475 result = build_call_expr (isgr_fn, 2,
9476 fold_build1_loc (loc, ABS_EXPR, type, arg),
9477 build_real (type, r));
9478 return result;
9479 }
9480 CASE_FLT_FN (BUILT_IN_FINITE):
9481 case BUILT_IN_ISFINITE:
9482 {
9483 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9484 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9485 tree const type = TREE_TYPE (arg);
9486 REAL_VALUE_TYPE r;
9487 char buf[128];
9488
9489 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9490 real_from_string (&r, buf);
9491 result = build_call_expr (isle_fn, 2,
9492 fold_build1_loc (loc, ABS_EXPR, type, arg),
9493 build_real (type, r));
9494 /*result = fold_build2_loc (loc, UNGT_EXPR,
9495 TREE_TYPE (TREE_TYPE (fndecl)),
9496 fold_build1_loc (loc, ABS_EXPR, type, arg),
9497 build_real (type, r));
9498 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9499 TREE_TYPE (TREE_TYPE (fndecl)),
9500 result);*/
9501 return result;
9502 }
9503 case BUILT_IN_ISNORMAL:
9504 {
9505 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9506 islessequal(fabs(x),DBL_MAX). */
9507 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9508 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9509 tree const type = TREE_TYPE (arg);
9510 REAL_VALUE_TYPE rmax, rmin;
9511 char buf[128];
9512
9513 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9514 real_from_string (&rmax, buf);
9515 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9516 real_from_string (&rmin, buf);
9517 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9518 result = build_call_expr (isle_fn, 2, arg,
9519 build_real (type, rmax));
9520 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9521 build_call_expr (isge_fn, 2, arg,
9522 build_real (type, rmin)));
9523 return result;
9524 }
9525 default:
9526 break;
9527 }
9528
9529 return NULL_TREE;
9530 }
9531
9532 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9533 ARG is the argument for the call. */
9534
9535 static tree
9536 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9537 {
9538 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9539 REAL_VALUE_TYPE r;
9540
9541 if (!validate_arg (arg, REAL_TYPE))
9542 return NULL_TREE;
9543
9544 switch (builtin_index)
9545 {
9546 case BUILT_IN_ISINF:
9547 if (!HONOR_INFINITIES (arg))
9548 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9549
9550 if (TREE_CODE (arg) == REAL_CST)
9551 {
9552 r = TREE_REAL_CST (arg);
9553 if (real_isinf (&r))
9554 return real_compare (GT_EXPR, &r, &dconst0)
9555 ? integer_one_node : integer_minus_one_node;
9556 else
9557 return integer_zero_node;
9558 }
9559
9560 return NULL_TREE;
9561
9562 case BUILT_IN_ISINF_SIGN:
9563 {
9564 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9565 /* In a boolean context, GCC will fold the inner COND_EXPR to
9566 1. So e.g. "if (isinf_sign(x))" would be folded to just
9567 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9568 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9569 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9570 tree tmp = NULL_TREE;
9571
9572 arg = builtin_save_expr (arg);
9573
9574 if (signbit_fn && isinf_fn)
9575 {
9576 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9577 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9578
9579 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9580 signbit_call, integer_zero_node);
9581 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9582 isinf_call, integer_zero_node);
9583
9584 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9585 integer_minus_one_node, integer_one_node);
9586 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9587 isinf_call, tmp,
9588 integer_zero_node);
9589 }
9590
9591 return tmp;
9592 }
9593
9594 case BUILT_IN_ISFINITE:
9595 if (!HONOR_NANS (arg)
9596 && !HONOR_INFINITIES (arg))
9597 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9598
9599 if (TREE_CODE (arg) == REAL_CST)
9600 {
9601 r = TREE_REAL_CST (arg);
9602 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9603 }
9604
9605 return NULL_TREE;
9606
9607 case BUILT_IN_ISNAN:
9608 if (!HONOR_NANS (arg))
9609 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9610
9611 if (TREE_CODE (arg) == REAL_CST)
9612 {
9613 r = TREE_REAL_CST (arg);
9614 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9615 }
9616
9617 arg = builtin_save_expr (arg);
9618 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9619
9620 default:
9621 gcc_unreachable ();
9622 }
9623 }
9624
9625 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9626 This builtin will generate code to return the appropriate floating
9627 point classification depending on the value of the floating point
9628 number passed in. The possible return values must be supplied as
9629 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9630 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9631 one floating point argument which is "type generic". */
9632
9633 static tree
9634 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9635 {
9636 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9637 arg, type, res, tmp;
9638 machine_mode mode;
9639 REAL_VALUE_TYPE r;
9640 char buf[128];
9641
9642 /* Verify the required arguments in the original call. */
9643 if (nargs != 6
9644 || !validate_arg (args[0], INTEGER_TYPE)
9645 || !validate_arg (args[1], INTEGER_TYPE)
9646 || !validate_arg (args[2], INTEGER_TYPE)
9647 || !validate_arg (args[3], INTEGER_TYPE)
9648 || !validate_arg (args[4], INTEGER_TYPE)
9649 || !validate_arg (args[5], REAL_TYPE))
9650 return NULL_TREE;
9651
9652 fp_nan = args[0];
9653 fp_infinite = args[1];
9654 fp_normal = args[2];
9655 fp_subnormal = args[3];
9656 fp_zero = args[4];
9657 arg = args[5];
9658 type = TREE_TYPE (arg);
9659 mode = TYPE_MODE (type);
9660 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9661
9662 /* fpclassify(x) ->
9663 isnan(x) ? FP_NAN :
9664 (fabs(x) == Inf ? FP_INFINITE :
9665 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9666 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9667
9668 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9669 build_real (type, dconst0));
9670 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9671 tmp, fp_zero, fp_subnormal);
9672
9673 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9674 real_from_string (&r, buf);
9675 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9676 arg, build_real (type, r));
9677 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9678
9679 if (HONOR_INFINITIES (mode))
9680 {
9681 real_inf (&r);
9682 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9683 build_real (type, r));
9684 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9685 fp_infinite, res);
9686 }
9687
9688 if (HONOR_NANS (mode))
9689 {
9690 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9691 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9692 }
9693
9694 return res;
9695 }
9696
9697 /* Fold a call to an unordered comparison function such as
9698 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9699 being called and ARG0 and ARG1 are the arguments for the call.
9700 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9701 the opposite of the desired result. UNORDERED_CODE is used
9702 for modes that can hold NaNs and ORDERED_CODE is used for
9703 the rest. */
9704
9705 static tree
9706 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9707 enum tree_code unordered_code,
9708 enum tree_code ordered_code)
9709 {
9710 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9711 enum tree_code code;
9712 tree type0, type1;
9713 enum tree_code code0, code1;
9714 tree cmp_type = NULL_TREE;
9715
9716 type0 = TREE_TYPE (arg0);
9717 type1 = TREE_TYPE (arg1);
9718
9719 code0 = TREE_CODE (type0);
9720 code1 = TREE_CODE (type1);
9721
9722 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9723 /* Choose the wider of two real types. */
9724 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9725 ? type0 : type1;
9726 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9727 cmp_type = type0;
9728 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9729 cmp_type = type1;
9730
9731 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9732 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9733
9734 if (unordered_code == UNORDERED_EXPR)
9735 {
9736 if (!HONOR_NANS (arg0))
9737 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9738 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9739 }
9740
9741 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9742 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9743 fold_build2_loc (loc, code, type, arg0, arg1));
9744 }
9745
9746 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9747 arithmetics if it can never overflow, or into internal functions that
9748 return both result of arithmetics and overflowed boolean flag in
9749 a complex integer result, or some other check for overflow. */
9750
9751 static tree
9752 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9753 tree arg0, tree arg1, tree arg2)
9754 {
9755 enum internal_fn ifn = IFN_LAST;
9756 tree type = TREE_TYPE (TREE_TYPE (arg2));
9757 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9758 switch (fcode)
9759 {
9760 case BUILT_IN_ADD_OVERFLOW:
9761 case BUILT_IN_SADD_OVERFLOW:
9762 case BUILT_IN_SADDL_OVERFLOW:
9763 case BUILT_IN_SADDLL_OVERFLOW:
9764 case BUILT_IN_UADD_OVERFLOW:
9765 case BUILT_IN_UADDL_OVERFLOW:
9766 case BUILT_IN_UADDLL_OVERFLOW:
9767 ifn = IFN_ADD_OVERFLOW;
9768 break;
9769 case BUILT_IN_SUB_OVERFLOW:
9770 case BUILT_IN_SSUB_OVERFLOW:
9771 case BUILT_IN_SSUBL_OVERFLOW:
9772 case BUILT_IN_SSUBLL_OVERFLOW:
9773 case BUILT_IN_USUB_OVERFLOW:
9774 case BUILT_IN_USUBL_OVERFLOW:
9775 case BUILT_IN_USUBLL_OVERFLOW:
9776 ifn = IFN_SUB_OVERFLOW;
9777 break;
9778 case BUILT_IN_MUL_OVERFLOW:
9779 case BUILT_IN_SMUL_OVERFLOW:
9780 case BUILT_IN_SMULL_OVERFLOW:
9781 case BUILT_IN_SMULLL_OVERFLOW:
9782 case BUILT_IN_UMUL_OVERFLOW:
9783 case BUILT_IN_UMULL_OVERFLOW:
9784 case BUILT_IN_UMULLL_OVERFLOW:
9785 ifn = IFN_MUL_OVERFLOW;
9786 break;
9787 default:
9788 gcc_unreachable ();
9789 }
9790 tree ctype = build_complex_type (type);
9791 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9792 2, arg0, arg1);
9793 tree tgt = save_expr (call);
9794 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9795 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9796 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9797 tree store
9798 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9799 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9800 }
9801
9802 /* Fold a call to built-in function FNDECL with 0 arguments.
9803 This function returns NULL_TREE if no simplification was possible. */
9804
9805 static tree
9806 fold_builtin_0 (location_t loc, tree fndecl)
9807 {
9808 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9809 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9810 switch (fcode)
9811 {
9812 CASE_FLT_FN (BUILT_IN_INF):
9813 case BUILT_IN_INFD32:
9814 case BUILT_IN_INFD64:
9815 case BUILT_IN_INFD128:
9816 return fold_builtin_inf (loc, type, true);
9817
9818 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9819 return fold_builtin_inf (loc, type, false);
9820
9821 case BUILT_IN_CLASSIFY_TYPE:
9822 return fold_builtin_classify_type (NULL_TREE);
9823
9824 default:
9825 break;
9826 }
9827 return NULL_TREE;
9828 }
9829
9830 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9831 This function returns NULL_TREE if no simplification was possible. */
9832
9833 static tree
9834 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9835 {
9836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9837 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9838 switch (fcode)
9839 {
9840 case BUILT_IN_CONSTANT_P:
9841 {
9842 tree val = fold_builtin_constant_p (arg0);
9843
9844 /* Gimplification will pull the CALL_EXPR for the builtin out of
9845 an if condition. When not optimizing, we'll not CSE it back.
9846 To avoid link error types of regressions, return false now. */
9847 if (!val && !optimize)
9848 val = integer_zero_node;
9849
9850 return val;
9851 }
9852
9853 case BUILT_IN_CLASSIFY_TYPE:
9854 return fold_builtin_classify_type (arg0);
9855
9856 case BUILT_IN_STRLEN:
9857 return fold_builtin_strlen (loc, type, arg0);
9858
9859 CASE_FLT_FN (BUILT_IN_FABS):
9860 case BUILT_IN_FABSD32:
9861 case BUILT_IN_FABSD64:
9862 case BUILT_IN_FABSD128:
9863 return fold_builtin_fabs (loc, arg0, type);
9864
9865 case BUILT_IN_ABS:
9866 case BUILT_IN_LABS:
9867 case BUILT_IN_LLABS:
9868 case BUILT_IN_IMAXABS:
9869 return fold_builtin_abs (loc, arg0, type);
9870
9871 CASE_FLT_FN (BUILT_IN_CONJ):
9872 if (validate_arg (arg0, COMPLEX_TYPE)
9873 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9874 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9875 break;
9876
9877 CASE_FLT_FN (BUILT_IN_CREAL):
9878 if (validate_arg (arg0, COMPLEX_TYPE)
9879 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9880 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9881 break;
9882
9883 CASE_FLT_FN (BUILT_IN_CIMAG):
9884 if (validate_arg (arg0, COMPLEX_TYPE)
9885 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9886 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9887 break;
9888
9889 CASE_FLT_FN (BUILT_IN_CCOS):
9890 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
9891
9892 CASE_FLT_FN (BUILT_IN_CCOSH):
9893 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
9894
9895 CASE_FLT_FN (BUILT_IN_CPROJ):
9896 return fold_builtin_cproj (loc, arg0, type);
9897
9898 CASE_FLT_FN (BUILT_IN_CSIN):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return do_mpc_arg1 (arg0, type, mpc_sin);
9902 break;
9903
9904 CASE_FLT_FN (BUILT_IN_CSINH):
9905 if (validate_arg (arg0, COMPLEX_TYPE)
9906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9907 return do_mpc_arg1 (arg0, type, mpc_sinh);
9908 break;
9909
9910 CASE_FLT_FN (BUILT_IN_CTAN):
9911 if (validate_arg (arg0, COMPLEX_TYPE)
9912 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9913 return do_mpc_arg1 (arg0, type, mpc_tan);
9914 break;
9915
9916 CASE_FLT_FN (BUILT_IN_CTANH):
9917 if (validate_arg (arg0, COMPLEX_TYPE)
9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9919 return do_mpc_arg1 (arg0, type, mpc_tanh);
9920 break;
9921
9922 CASE_FLT_FN (BUILT_IN_CLOG):
9923 if (validate_arg (arg0, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9925 return do_mpc_arg1 (arg0, type, mpc_log);
9926 break;
9927
9928 CASE_FLT_FN (BUILT_IN_CSQRT):
9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9931 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9932 break;
9933
9934 CASE_FLT_FN (BUILT_IN_CASIN):
9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9937 return do_mpc_arg1 (arg0, type, mpc_asin);
9938 break;
9939
9940 CASE_FLT_FN (BUILT_IN_CACOS):
9941 if (validate_arg (arg0, COMPLEX_TYPE)
9942 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9943 return do_mpc_arg1 (arg0, type, mpc_acos);
9944 break;
9945
9946 CASE_FLT_FN (BUILT_IN_CATAN):
9947 if (validate_arg (arg0, COMPLEX_TYPE)
9948 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9949 return do_mpc_arg1 (arg0, type, mpc_atan);
9950 break;
9951
9952 CASE_FLT_FN (BUILT_IN_CASINH):
9953 if (validate_arg (arg0, COMPLEX_TYPE)
9954 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9955 return do_mpc_arg1 (arg0, type, mpc_asinh);
9956 break;
9957
9958 CASE_FLT_FN (BUILT_IN_CACOSH):
9959 if (validate_arg (arg0, COMPLEX_TYPE)
9960 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9961 return do_mpc_arg1 (arg0, type, mpc_acosh);
9962 break;
9963
9964 CASE_FLT_FN (BUILT_IN_CATANH):
9965 if (validate_arg (arg0, COMPLEX_TYPE)
9966 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9967 return do_mpc_arg1 (arg0, type, mpc_atanh);
9968 break;
9969
9970 CASE_FLT_FN (BUILT_IN_CABS):
9971 return fold_builtin_cabs (loc, arg0, type, fndecl);
9972
9973 CASE_FLT_FN (BUILT_IN_CARG):
9974 return fold_builtin_carg (loc, arg0, type);
9975
9976 CASE_FLT_FN (BUILT_IN_SQRT):
9977 return fold_builtin_sqrt (loc, arg0, type);
9978
9979 CASE_FLT_FN (BUILT_IN_CBRT):
9980 return fold_builtin_cbrt (loc, arg0, type);
9981
9982 CASE_FLT_FN (BUILT_IN_ASIN):
9983 if (validate_arg (arg0, REAL_TYPE))
9984 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9985 &dconstm1, &dconst1, true);
9986 break;
9987
9988 CASE_FLT_FN (BUILT_IN_ACOS):
9989 if (validate_arg (arg0, REAL_TYPE))
9990 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9991 &dconstm1, &dconst1, true);
9992 break;
9993
9994 CASE_FLT_FN (BUILT_IN_ATAN):
9995 if (validate_arg (arg0, REAL_TYPE))
9996 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9997 break;
9998
9999 CASE_FLT_FN (BUILT_IN_ASINH):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10002 break;
10003
10004 CASE_FLT_FN (BUILT_IN_ACOSH):
10005 if (validate_arg (arg0, REAL_TYPE))
10006 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10007 &dconst1, NULL, true);
10008 break;
10009
10010 CASE_FLT_FN (BUILT_IN_ATANH):
10011 if (validate_arg (arg0, REAL_TYPE))
10012 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10013 &dconstm1, &dconst1, false);
10014 break;
10015
10016 CASE_FLT_FN (BUILT_IN_SIN):
10017 if (validate_arg (arg0, REAL_TYPE))
10018 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10019 break;
10020
10021 CASE_FLT_FN (BUILT_IN_COS):
10022 return fold_builtin_cos (loc, arg0, type, fndecl);
10023
10024 CASE_FLT_FN (BUILT_IN_TAN):
10025 return fold_builtin_tan (arg0, type);
10026
10027 CASE_FLT_FN (BUILT_IN_CEXP):
10028 return fold_builtin_cexp (loc, arg0, type);
10029
10030 CASE_FLT_FN (BUILT_IN_CEXPI):
10031 if (validate_arg (arg0, REAL_TYPE))
10032 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10033 break;
10034
10035 CASE_FLT_FN (BUILT_IN_SINH):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10038 break;
10039
10040 CASE_FLT_FN (BUILT_IN_COSH):
10041 return fold_builtin_cosh (loc, arg0, type, fndecl);
10042
10043 CASE_FLT_FN (BUILT_IN_TANH):
10044 if (validate_arg (arg0, REAL_TYPE))
10045 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10046 break;
10047
10048 CASE_FLT_FN (BUILT_IN_ERF):
10049 if (validate_arg (arg0, REAL_TYPE))
10050 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10051 break;
10052
10053 CASE_FLT_FN (BUILT_IN_ERFC):
10054 if (validate_arg (arg0, REAL_TYPE))
10055 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10056 break;
10057
10058 CASE_FLT_FN (BUILT_IN_TGAMMA):
10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10061 break;
10062
10063 CASE_FLT_FN (BUILT_IN_EXP):
10064 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10065
10066 CASE_FLT_FN (BUILT_IN_EXP2):
10067 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10068
10069 CASE_FLT_FN (BUILT_IN_EXP10):
10070 CASE_FLT_FN (BUILT_IN_POW10):
10071 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10072
10073 CASE_FLT_FN (BUILT_IN_EXPM1):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10076 break;
10077
10078 CASE_FLT_FN (BUILT_IN_LOG):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10081 break;
10082
10083 CASE_FLT_FN (BUILT_IN_LOG2):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10086 break;
10087
10088 CASE_FLT_FN (BUILT_IN_LOG10):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_LOG1P):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10096 &dconstm1, NULL, false);
10097 break;
10098
10099 CASE_FLT_FN (BUILT_IN_J0):
10100 if (validate_arg (arg0, REAL_TYPE))
10101 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10102 NULL, NULL, 0);
10103 break;
10104
10105 CASE_FLT_FN (BUILT_IN_J1):
10106 if (validate_arg (arg0, REAL_TYPE))
10107 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10108 NULL, NULL, 0);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_Y0):
10112 if (validate_arg (arg0, REAL_TYPE))
10113 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10114 &dconst0, NULL, false);
10115 break;
10116
10117 CASE_FLT_FN (BUILT_IN_Y1):
10118 if (validate_arg (arg0, REAL_TYPE))
10119 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10120 &dconst0, NULL, false);
10121 break;
10122
10123 CASE_FLT_FN (BUILT_IN_NAN):
10124 case BUILT_IN_NAND32:
10125 case BUILT_IN_NAND64:
10126 case BUILT_IN_NAND128:
10127 return fold_builtin_nan (arg0, type, true);
10128
10129 CASE_FLT_FN (BUILT_IN_NANS):
10130 return fold_builtin_nan (arg0, type, false);
10131
10132 CASE_FLT_FN (BUILT_IN_FLOOR):
10133 return fold_builtin_floor (loc, fndecl, arg0);
10134
10135 CASE_FLT_FN (BUILT_IN_CEIL):
10136 return fold_builtin_ceil (loc, fndecl, arg0);
10137
10138 CASE_FLT_FN (BUILT_IN_TRUNC):
10139 return fold_builtin_trunc (loc, fndecl, arg0);
10140
10141 CASE_FLT_FN (BUILT_IN_ROUND):
10142 return fold_builtin_round (loc, fndecl, arg0);
10143
10144 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10145 CASE_FLT_FN (BUILT_IN_RINT):
10146 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10147
10148 CASE_FLT_FN (BUILT_IN_ICEIL):
10149 CASE_FLT_FN (BUILT_IN_LCEIL):
10150 CASE_FLT_FN (BUILT_IN_LLCEIL):
10151 CASE_FLT_FN (BUILT_IN_LFLOOR):
10152 CASE_FLT_FN (BUILT_IN_IFLOOR):
10153 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10154 CASE_FLT_FN (BUILT_IN_IROUND):
10155 CASE_FLT_FN (BUILT_IN_LROUND):
10156 CASE_FLT_FN (BUILT_IN_LLROUND):
10157 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10158
10159 CASE_FLT_FN (BUILT_IN_IRINT):
10160 CASE_FLT_FN (BUILT_IN_LRINT):
10161 CASE_FLT_FN (BUILT_IN_LLRINT):
10162 return fold_fixed_mathfn (loc, fndecl, arg0);
10163
10164 case BUILT_IN_BSWAP16:
10165 case BUILT_IN_BSWAP32:
10166 case BUILT_IN_BSWAP64:
10167 return fold_builtin_bswap (fndecl, arg0);
10168
10169 CASE_INT_FN (BUILT_IN_FFS):
10170 CASE_INT_FN (BUILT_IN_CLZ):
10171 CASE_INT_FN (BUILT_IN_CTZ):
10172 CASE_INT_FN (BUILT_IN_CLRSB):
10173 CASE_INT_FN (BUILT_IN_POPCOUNT):
10174 CASE_INT_FN (BUILT_IN_PARITY):
10175 return fold_builtin_bitop (fndecl, arg0);
10176
10177 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10178 return fold_builtin_signbit (loc, arg0, type);
10179
10180 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10181 return fold_builtin_significand (loc, arg0, type);
10182
10183 CASE_FLT_FN (BUILT_IN_ILOGB):
10184 CASE_FLT_FN (BUILT_IN_LOGB):
10185 return fold_builtin_logb (loc, arg0, type);
10186
10187 case BUILT_IN_ISASCII:
10188 return fold_builtin_isascii (loc, arg0);
10189
10190 case BUILT_IN_TOASCII:
10191 return fold_builtin_toascii (loc, arg0);
10192
10193 case BUILT_IN_ISDIGIT:
10194 return fold_builtin_isdigit (loc, arg0);
10195
10196 CASE_FLT_FN (BUILT_IN_FINITE):
10197 case BUILT_IN_FINITED32:
10198 case BUILT_IN_FINITED64:
10199 case BUILT_IN_FINITED128:
10200 case BUILT_IN_ISFINITE:
10201 {
10202 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10203 if (ret)
10204 return ret;
10205 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10206 }
10207
10208 CASE_FLT_FN (BUILT_IN_ISINF):
10209 case BUILT_IN_ISINFD32:
10210 case BUILT_IN_ISINFD64:
10211 case BUILT_IN_ISINFD128:
10212 {
10213 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10214 if (ret)
10215 return ret;
10216 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10217 }
10218
10219 case BUILT_IN_ISNORMAL:
10220 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10221
10222 case BUILT_IN_ISINF_SIGN:
10223 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10224
10225 CASE_FLT_FN (BUILT_IN_ISNAN):
10226 case BUILT_IN_ISNAND32:
10227 case BUILT_IN_ISNAND64:
10228 case BUILT_IN_ISNAND128:
10229 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10230
10231 case BUILT_IN_FREE:
10232 if (integer_zerop (arg0))
10233 return build_empty_stmt (loc);
10234 break;
10235
10236 default:
10237 break;
10238 }
10239
10240 return NULL_TREE;
10241
10242 }
10243
10244 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10245 This function returns NULL_TREE if no simplification was possible. */
10246
10247 static tree
10248 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
10249 {
10250 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10251 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10252
10253 switch (fcode)
10254 {
10255 CASE_FLT_FN (BUILT_IN_JN):
10256 if (validate_arg (arg0, INTEGER_TYPE)
10257 && validate_arg (arg1, REAL_TYPE))
10258 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10259 break;
10260
10261 CASE_FLT_FN (BUILT_IN_YN):
10262 if (validate_arg (arg0, INTEGER_TYPE)
10263 && validate_arg (arg1, REAL_TYPE))
10264 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10265 &dconst0, false);
10266 break;
10267
10268 CASE_FLT_FN (BUILT_IN_DREM):
10269 CASE_FLT_FN (BUILT_IN_REMAINDER):
10270 if (validate_arg (arg0, REAL_TYPE)
10271 && validate_arg (arg1, REAL_TYPE))
10272 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10273 break;
10274
10275 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10276 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10277 if (validate_arg (arg0, REAL_TYPE)
10278 && validate_arg (arg1, POINTER_TYPE))
10279 return do_mpfr_lgamma_r (arg0, arg1, type);
10280 break;
10281
10282 CASE_FLT_FN (BUILT_IN_ATAN2):
10283 if (validate_arg (arg0, REAL_TYPE)
10284 && validate_arg (arg1, REAL_TYPE))
10285 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10286 break;
10287
10288 CASE_FLT_FN (BUILT_IN_FDIM):
10289 if (validate_arg (arg0, REAL_TYPE)
10290 && validate_arg (arg1, REAL_TYPE))
10291 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10292 break;
10293
10294 CASE_FLT_FN (BUILT_IN_HYPOT):
10295 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10296
10297 CASE_FLT_FN (BUILT_IN_CPOW):
10298 if (validate_arg (arg0, COMPLEX_TYPE)
10299 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10300 && validate_arg (arg1, COMPLEX_TYPE)
10301 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10302 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10303 break;
10304
10305 CASE_FLT_FN (BUILT_IN_LDEXP):
10306 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10307 CASE_FLT_FN (BUILT_IN_SCALBN):
10308 CASE_FLT_FN (BUILT_IN_SCALBLN):
10309 return fold_builtin_load_exponent (loc, arg0, arg1,
10310 type, /*ldexp=*/false);
10311
10312 CASE_FLT_FN (BUILT_IN_FREXP):
10313 return fold_builtin_frexp (loc, arg0, arg1, type);
10314
10315 CASE_FLT_FN (BUILT_IN_MODF):
10316 return fold_builtin_modf (loc, arg0, arg1, type);
10317
10318 case BUILT_IN_STRSTR:
10319 return fold_builtin_strstr (loc, arg0, arg1, type);
10320
10321 case BUILT_IN_STRSPN:
10322 return fold_builtin_strspn (loc, arg0, arg1);
10323
10324 case BUILT_IN_STRCSPN:
10325 return fold_builtin_strcspn (loc, arg0, arg1);
10326
10327 case BUILT_IN_STRCHR:
10328 case BUILT_IN_INDEX:
10329 return fold_builtin_strchr (loc, arg0, arg1, type);
10330
10331 case BUILT_IN_STRRCHR:
10332 case BUILT_IN_RINDEX:
10333 return fold_builtin_strrchr (loc, arg0, arg1, type);
10334
10335 case BUILT_IN_STRCMP:
10336 return fold_builtin_strcmp (loc, arg0, arg1);
10337
10338 case BUILT_IN_STRPBRK:
10339 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10340
10341 case BUILT_IN_EXPECT:
10342 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10343
10344 CASE_FLT_FN (BUILT_IN_POW):
10345 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10346
10347 CASE_FLT_FN (BUILT_IN_POWI):
10348 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10349
10350 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10351 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10352
10353 CASE_FLT_FN (BUILT_IN_FMIN):
10354 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10355
10356 CASE_FLT_FN (BUILT_IN_FMAX):
10357 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10358
10359 case BUILT_IN_ISGREATER:
10360 return fold_builtin_unordered_cmp (loc, fndecl,
10361 arg0, arg1, UNLE_EXPR, LE_EXPR);
10362 case BUILT_IN_ISGREATEREQUAL:
10363 return fold_builtin_unordered_cmp (loc, fndecl,
10364 arg0, arg1, UNLT_EXPR, LT_EXPR);
10365 case BUILT_IN_ISLESS:
10366 return fold_builtin_unordered_cmp (loc, fndecl,
10367 arg0, arg1, UNGE_EXPR, GE_EXPR);
10368 case BUILT_IN_ISLESSEQUAL:
10369 return fold_builtin_unordered_cmp (loc, fndecl,
10370 arg0, arg1, UNGT_EXPR, GT_EXPR);
10371 case BUILT_IN_ISLESSGREATER:
10372 return fold_builtin_unordered_cmp (loc, fndecl,
10373 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10374 case BUILT_IN_ISUNORDERED:
10375 return fold_builtin_unordered_cmp (loc, fndecl,
10376 arg0, arg1, UNORDERED_EXPR,
10377 NOP_EXPR);
10378
10379 /* We do the folding for va_start in the expander. */
10380 case BUILT_IN_VA_START:
10381 break;
10382
10383 case BUILT_IN_OBJECT_SIZE:
10384 return fold_builtin_object_size (arg0, arg1);
10385
10386 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10387 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10388
10389 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10390 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10391
10392 default:
10393 break;
10394 }
10395 return NULL_TREE;
10396 }
10397
10398 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10399 and ARG2.
10400 This function returns NULL_TREE if no simplification was possible. */
10401
10402 static tree
10403 fold_builtin_3 (location_t loc, tree fndecl,
10404 tree arg0, tree arg1, tree arg2)
10405 {
10406 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10407 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10408 switch (fcode)
10409 {
10410
10411 CASE_FLT_FN (BUILT_IN_SINCOS):
10412 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10413
10414 CASE_FLT_FN (BUILT_IN_FMA):
10415 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10416 break;
10417
10418 CASE_FLT_FN (BUILT_IN_REMQUO):
10419 if (validate_arg (arg0, REAL_TYPE)
10420 && validate_arg (arg1, REAL_TYPE)
10421 && validate_arg (arg2, POINTER_TYPE))
10422 return do_mpfr_remquo (arg0, arg1, arg2);
10423 break;
10424
10425 case BUILT_IN_STRNCMP:
10426 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10427
10428 case BUILT_IN_MEMCHR:
10429 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10430
10431 case BUILT_IN_BCMP:
10432 case BUILT_IN_MEMCMP:
10433 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10434
10435 case BUILT_IN_EXPECT:
10436 return fold_builtin_expect (loc, arg0, arg1, arg2);
10437
10438 case BUILT_IN_ADD_OVERFLOW:
10439 case BUILT_IN_SUB_OVERFLOW:
10440 case BUILT_IN_MUL_OVERFLOW:
10441 case BUILT_IN_SADD_OVERFLOW:
10442 case BUILT_IN_SADDL_OVERFLOW:
10443 case BUILT_IN_SADDLL_OVERFLOW:
10444 case BUILT_IN_SSUB_OVERFLOW:
10445 case BUILT_IN_SSUBL_OVERFLOW:
10446 case BUILT_IN_SSUBLL_OVERFLOW:
10447 case BUILT_IN_SMUL_OVERFLOW:
10448 case BUILT_IN_SMULL_OVERFLOW:
10449 case BUILT_IN_SMULLL_OVERFLOW:
10450 case BUILT_IN_UADD_OVERFLOW:
10451 case BUILT_IN_UADDL_OVERFLOW:
10452 case BUILT_IN_UADDLL_OVERFLOW:
10453 case BUILT_IN_USUB_OVERFLOW:
10454 case BUILT_IN_USUBL_OVERFLOW:
10455 case BUILT_IN_USUBLL_OVERFLOW:
10456 case BUILT_IN_UMUL_OVERFLOW:
10457 case BUILT_IN_UMULL_OVERFLOW:
10458 case BUILT_IN_UMULLL_OVERFLOW:
10459 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10460
10461 default:
10462 break;
10463 }
10464 return NULL_TREE;
10465 }
10466
10467 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10468 arguments. IGNORE is true if the result of the
10469 function call is ignored. This function returns NULL_TREE if no
10470 simplification was possible. */
10471
10472 tree
10473 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10474 {
10475 tree ret = NULL_TREE;
10476
10477 switch (nargs)
10478 {
10479 case 0:
10480 ret = fold_builtin_0 (loc, fndecl);
10481 break;
10482 case 1:
10483 ret = fold_builtin_1 (loc, fndecl, args[0]);
10484 break;
10485 case 2:
10486 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10487 break;
10488 case 3:
10489 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10490 break;
10491 default:
10492 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10493 break;
10494 }
10495 if (ret)
10496 {
10497 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10498 SET_EXPR_LOCATION (ret, loc);
10499 TREE_NO_WARNING (ret) = 1;
10500 return ret;
10501 }
10502 return NULL_TREE;
10503 }
10504
10505 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10506 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10507 of arguments in ARGS to be omitted. OLDNARGS is the number of
10508 elements in ARGS. */
10509
10510 static tree
10511 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10512 int skip, tree fndecl, int n, va_list newargs)
10513 {
10514 int nargs = oldnargs - skip + n;
10515 tree *buffer;
10516
10517 if (n > 0)
10518 {
10519 int i, j;
10520
10521 buffer = XALLOCAVEC (tree, nargs);
10522 for (i = 0; i < n; i++)
10523 buffer[i] = va_arg (newargs, tree);
10524 for (j = skip; j < oldnargs; j++, i++)
10525 buffer[i] = args[j];
10526 }
10527 else
10528 buffer = args + skip;
10529
10530 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10531 }
10532
10533 /* Return true if FNDECL shouldn't be folded right now.
10534 If a built-in function has an inline attribute always_inline
10535 wrapper, defer folding it after always_inline functions have
10536 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10537 might not be performed. */
10538
10539 bool
10540 avoid_folding_inline_builtin (tree fndecl)
10541 {
10542 return (DECL_DECLARED_INLINE_P (fndecl)
10543 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10544 && cfun
10545 && !cfun->always_inline_functions_inlined
10546 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10547 }
10548
10549 /* A wrapper function for builtin folding that prevents warnings for
10550 "statement without effect" and the like, caused by removing the
10551 call node earlier than the warning is generated. */
10552
10553 tree
10554 fold_call_expr (location_t loc, tree exp, bool ignore)
10555 {
10556 tree ret = NULL_TREE;
10557 tree fndecl = get_callee_fndecl (exp);
10558 if (fndecl
10559 && TREE_CODE (fndecl) == FUNCTION_DECL
10560 && DECL_BUILT_IN (fndecl)
10561 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10562 yet. Defer folding until we see all the arguments
10563 (after inlining). */
10564 && !CALL_EXPR_VA_ARG_PACK (exp))
10565 {
10566 int nargs = call_expr_nargs (exp);
10567
10568 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10569 instead last argument is __builtin_va_arg_pack (). Defer folding
10570 even in that case, until arguments are finalized. */
10571 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10572 {
10573 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10574 if (fndecl2
10575 && TREE_CODE (fndecl2) == FUNCTION_DECL
10576 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10577 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10578 return NULL_TREE;
10579 }
10580
10581 if (avoid_folding_inline_builtin (fndecl))
10582 return NULL_TREE;
10583
10584 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10585 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10586 CALL_EXPR_ARGP (exp), ignore);
10587 else
10588 {
10589 tree *args = CALL_EXPR_ARGP (exp);
10590 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10591 if (ret)
10592 return ret;
10593 }
10594 }
10595 return NULL_TREE;
10596 }
10597
10598 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10599 N arguments are passed in the array ARGARRAY. Return a folded
10600 expression or NULL_TREE if no simplification was possible. */
10601
10602 tree
10603 fold_builtin_call_array (location_t loc, tree,
10604 tree fn,
10605 int n,
10606 tree *argarray)
10607 {
10608 if (TREE_CODE (fn) != ADDR_EXPR)
10609 return NULL_TREE;
10610
10611 tree fndecl = TREE_OPERAND (fn, 0);
10612 if (TREE_CODE (fndecl) == FUNCTION_DECL
10613 && DECL_BUILT_IN (fndecl))
10614 {
10615 /* If last argument is __builtin_va_arg_pack (), arguments to this
10616 function are not finalized yet. Defer folding until they are. */
10617 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10618 {
10619 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10620 if (fndecl2
10621 && TREE_CODE (fndecl2) == FUNCTION_DECL
10622 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10623 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10624 return NULL_TREE;
10625 }
10626 if (avoid_folding_inline_builtin (fndecl))
10627 return NULL_TREE;
10628 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10629 return targetm.fold_builtin (fndecl, n, argarray, false);
10630 else
10631 return fold_builtin_n (loc, fndecl, argarray, n, false);
10632 }
10633
10634 return NULL_TREE;
10635 }
10636
10637 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10638 along with N new arguments specified as the "..." parameters. SKIP
10639 is the number of arguments in EXP to be omitted. This function is used
10640 to do varargs-to-varargs transformations. */
10641
10642 static tree
10643 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10644 {
10645 va_list ap;
10646 tree t;
10647
10648 va_start (ap, n);
10649 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10650 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10651 va_end (ap);
10652
10653 return t;
10654 }
10655
10656 /* Validate a single argument ARG against a tree code CODE representing
10657 a type. */
10658
10659 static bool
10660 validate_arg (const_tree arg, enum tree_code code)
10661 {
10662 if (!arg)
10663 return false;
10664 else if (code == POINTER_TYPE)
10665 return POINTER_TYPE_P (TREE_TYPE (arg));
10666 else if (code == INTEGER_TYPE)
10667 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10668 return code == TREE_CODE (TREE_TYPE (arg));
10669 }
10670
10671 /* This function validates the types of a function call argument list
10672 against a specified list of tree_codes. If the last specifier is a 0,
10673 that represents an ellipses, otherwise the last specifier must be a
10674 VOID_TYPE.
10675
10676 This is the GIMPLE version of validate_arglist. Eventually we want to
10677 completely convert builtins.c to work from GIMPLEs and the tree based
10678 validate_arglist will then be removed. */
10679
10680 bool
10681 validate_gimple_arglist (const gcall *call, ...)
10682 {
10683 enum tree_code code;
10684 bool res = 0;
10685 va_list ap;
10686 const_tree arg;
10687 size_t i;
10688
10689 va_start (ap, call);
10690 i = 0;
10691
10692 do
10693 {
10694 code = (enum tree_code) va_arg (ap, int);
10695 switch (code)
10696 {
10697 case 0:
10698 /* This signifies an ellipses, any further arguments are all ok. */
10699 res = true;
10700 goto end;
10701 case VOID_TYPE:
10702 /* This signifies an endlink, if no arguments remain, return
10703 true, otherwise return false. */
10704 res = (i == gimple_call_num_args (call));
10705 goto end;
10706 default:
10707 /* If no parameters remain or the parameter's code does not
10708 match the specified code, return false. Otherwise continue
10709 checking any remaining arguments. */
10710 arg = gimple_call_arg (call, i++);
10711 if (!validate_arg (arg, code))
10712 goto end;
10713 break;
10714 }
10715 }
10716 while (1);
10717
10718 /* We need gotos here since we can only have one VA_CLOSE in a
10719 function. */
10720 end: ;
10721 va_end (ap);
10722
10723 return res;
10724 }
10725
10726 /* Default target-specific builtin expander that does nothing. */
10727
10728 rtx
10729 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10730 rtx target ATTRIBUTE_UNUSED,
10731 rtx subtarget ATTRIBUTE_UNUSED,
10732 machine_mode mode ATTRIBUTE_UNUSED,
10733 int ignore ATTRIBUTE_UNUSED)
10734 {
10735 return NULL_RTX;
10736 }
10737
10738 /* Returns true is EXP represents data that would potentially reside
10739 in a readonly section. */
10740
10741 bool
10742 readonly_data_expr (tree exp)
10743 {
10744 STRIP_NOPS (exp);
10745
10746 if (TREE_CODE (exp) != ADDR_EXPR)
10747 return false;
10748
10749 exp = get_base_address (TREE_OPERAND (exp, 0));
10750 if (!exp)
10751 return false;
10752
10753 /* Make sure we call decl_readonly_section only for trees it
10754 can handle (since it returns true for everything it doesn't
10755 understand). */
10756 if (TREE_CODE (exp) == STRING_CST
10757 || TREE_CODE (exp) == CONSTRUCTOR
10758 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10759 return decl_readonly_section (exp, 0);
10760 else
10761 return false;
10762 }
10763
10764 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10765 to the call, and TYPE is its return type.
10766
10767 Return NULL_TREE if no simplification was possible, otherwise return the
10768 simplified form of the call as a tree.
10769
10770 The simplified form may be a constant or other expression which
10771 computes the same value, but in a more efficient manner (including
10772 calls to other builtin functions).
10773
10774 The call may contain arguments which need to be evaluated, but
10775 which are not useful to determine the result of the call. In
10776 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10777 COMPOUND_EXPR will be an argument which must be evaluated.
10778 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10779 COMPOUND_EXPR in the chain will contain the tree for the simplified
10780 form of the builtin function call. */
10781
10782 static tree
10783 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10784 {
10785 if (!validate_arg (s1, POINTER_TYPE)
10786 || !validate_arg (s2, POINTER_TYPE))
10787 return NULL_TREE;
10788 else
10789 {
10790 tree fn;
10791 const char *p1, *p2;
10792
10793 p2 = c_getstr (s2);
10794 if (p2 == NULL)
10795 return NULL_TREE;
10796
10797 p1 = c_getstr (s1);
10798 if (p1 != NULL)
10799 {
10800 const char *r = strstr (p1, p2);
10801 tree tem;
10802
10803 if (r == NULL)
10804 return build_int_cst (TREE_TYPE (s1), 0);
10805
10806 /* Return an offset into the constant string argument. */
10807 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10808 return fold_convert_loc (loc, type, tem);
10809 }
10810
10811 /* The argument is const char *, and the result is char *, so we need
10812 a type conversion here to avoid a warning. */
10813 if (p2[0] == '\0')
10814 return fold_convert_loc (loc, type, s1);
10815
10816 if (p2[1] != '\0')
10817 return NULL_TREE;
10818
10819 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10820 if (!fn)
10821 return NULL_TREE;
10822
10823 /* New argument list transforming strstr(s1, s2) to
10824 strchr(s1, s2[0]). */
10825 return build_call_expr_loc (loc, fn, 2, s1,
10826 build_int_cst (integer_type_node, p2[0]));
10827 }
10828 }
10829
10830 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10831 the call, and TYPE is its return type.
10832
10833 Return NULL_TREE if no simplification was possible, otherwise return the
10834 simplified form of the call as a tree.
10835
10836 The simplified form may be a constant or other expression which
10837 computes the same value, but in a more efficient manner (including
10838 calls to other builtin functions).
10839
10840 The call may contain arguments which need to be evaluated, but
10841 which are not useful to determine the result of the call. In
10842 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10843 COMPOUND_EXPR will be an argument which must be evaluated.
10844 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10845 COMPOUND_EXPR in the chain will contain the tree for the simplified
10846 form of the builtin function call. */
10847
10848 static tree
10849 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10850 {
10851 if (!validate_arg (s1, POINTER_TYPE)
10852 || !validate_arg (s2, INTEGER_TYPE))
10853 return NULL_TREE;
10854 else
10855 {
10856 const char *p1;
10857
10858 if (TREE_CODE (s2) != INTEGER_CST)
10859 return NULL_TREE;
10860
10861 p1 = c_getstr (s1);
10862 if (p1 != NULL)
10863 {
10864 char c;
10865 const char *r;
10866 tree tem;
10867
10868 if (target_char_cast (s2, &c))
10869 return NULL_TREE;
10870
10871 r = strchr (p1, c);
10872
10873 if (r == NULL)
10874 return build_int_cst (TREE_TYPE (s1), 0);
10875
10876 /* Return an offset into the constant string argument. */
10877 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10878 return fold_convert_loc (loc, type, tem);
10879 }
10880 return NULL_TREE;
10881 }
10882 }
10883
10884 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10885 the call, and TYPE is its return type.
10886
10887 Return NULL_TREE if no simplification was possible, otherwise return the
10888 simplified form of the call as a tree.
10889
10890 The simplified form may be a constant or other expression which
10891 computes the same value, but in a more efficient manner (including
10892 calls to other builtin functions).
10893
10894 The call may contain arguments which need to be evaluated, but
10895 which are not useful to determine the result of the call. In
10896 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10897 COMPOUND_EXPR will be an argument which must be evaluated.
10898 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10899 COMPOUND_EXPR in the chain will contain the tree for the simplified
10900 form of the builtin function call. */
10901
10902 static tree
10903 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10904 {
10905 if (!validate_arg (s1, POINTER_TYPE)
10906 || !validate_arg (s2, INTEGER_TYPE))
10907 return NULL_TREE;
10908 else
10909 {
10910 tree fn;
10911 const char *p1;
10912
10913 if (TREE_CODE (s2) != INTEGER_CST)
10914 return NULL_TREE;
10915
10916 p1 = c_getstr (s1);
10917 if (p1 != NULL)
10918 {
10919 char c;
10920 const char *r;
10921 tree tem;
10922
10923 if (target_char_cast (s2, &c))
10924 return NULL_TREE;
10925
10926 r = strrchr (p1, c);
10927
10928 if (r == NULL)
10929 return build_int_cst (TREE_TYPE (s1), 0);
10930
10931 /* Return an offset into the constant string argument. */
10932 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10933 return fold_convert_loc (loc, type, tem);
10934 }
10935
10936 if (! integer_zerop (s2))
10937 return NULL_TREE;
10938
10939 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10940 if (!fn)
10941 return NULL_TREE;
10942
10943 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10944 return build_call_expr_loc (loc, fn, 2, s1, s2);
10945 }
10946 }
10947
10948 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10949 to the call, and TYPE is its return type.
10950
10951 Return NULL_TREE if no simplification was possible, otherwise return the
10952 simplified form of the call as a tree.
10953
10954 The simplified form may be a constant or other expression which
10955 computes the same value, but in a more efficient manner (including
10956 calls to other builtin functions).
10957
10958 The call may contain arguments which need to be evaluated, but
10959 which are not useful to determine the result of the call. In
10960 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10961 COMPOUND_EXPR will be an argument which must be evaluated.
10962 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10963 COMPOUND_EXPR in the chain will contain the tree for the simplified
10964 form of the builtin function call. */
10965
10966 static tree
10967 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10968 {
10969 if (!validate_arg (s1, POINTER_TYPE)
10970 || !validate_arg (s2, POINTER_TYPE))
10971 return NULL_TREE;
10972 else
10973 {
10974 tree fn;
10975 const char *p1, *p2;
10976
10977 p2 = c_getstr (s2);
10978 if (p2 == NULL)
10979 return NULL_TREE;
10980
10981 p1 = c_getstr (s1);
10982 if (p1 != NULL)
10983 {
10984 const char *r = strpbrk (p1, p2);
10985 tree tem;
10986
10987 if (r == NULL)
10988 return build_int_cst (TREE_TYPE (s1), 0);
10989
10990 /* Return an offset into the constant string argument. */
10991 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10992 return fold_convert_loc (loc, type, tem);
10993 }
10994
10995 if (p2[0] == '\0')
10996 /* strpbrk(x, "") == NULL.
10997 Evaluate and ignore s1 in case it had side-effects. */
10998 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10999
11000 if (p2[1] != '\0')
11001 return NULL_TREE; /* Really call strpbrk. */
11002
11003 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11004 if (!fn)
11005 return NULL_TREE;
11006
11007 /* New argument list transforming strpbrk(s1, s2) to
11008 strchr(s1, s2[0]). */
11009 return build_call_expr_loc (loc, fn, 2, s1,
11010 build_int_cst (integer_type_node, p2[0]));
11011 }
11012 }
11013
11014 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11015 to the call.
11016
11017 Return NULL_TREE if no simplification was possible, otherwise return the
11018 simplified form of the call as a tree.
11019
11020 The simplified form may be a constant or other expression which
11021 computes the same value, but in a more efficient manner (including
11022 calls to other builtin functions).
11023
11024 The call may contain arguments which need to be evaluated, but
11025 which are not useful to determine the result of the call. In
11026 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11027 COMPOUND_EXPR will be an argument which must be evaluated.
11028 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11029 COMPOUND_EXPR in the chain will contain the tree for the simplified
11030 form of the builtin function call. */
11031
11032 static tree
11033 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11034 {
11035 if (!validate_arg (s1, POINTER_TYPE)
11036 || !validate_arg (s2, POINTER_TYPE))
11037 return NULL_TREE;
11038 else
11039 {
11040 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11041
11042 /* If both arguments are constants, evaluate at compile-time. */
11043 if (p1 && p2)
11044 {
11045 const size_t r = strspn (p1, p2);
11046 return build_int_cst (size_type_node, r);
11047 }
11048
11049 /* If either argument is "", return NULL_TREE. */
11050 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11051 /* Evaluate and ignore both arguments in case either one has
11052 side-effects. */
11053 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11054 s1, s2);
11055 return NULL_TREE;
11056 }
11057 }
11058
11059 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11060 to the call.
11061
11062 Return NULL_TREE if no simplification was possible, otherwise return the
11063 simplified form of the call as a tree.
11064
11065 The simplified form may be a constant or other expression which
11066 computes the same value, but in a more efficient manner (including
11067 calls to other builtin functions).
11068
11069 The call may contain arguments which need to be evaluated, but
11070 which are not useful to determine the result of the call. In
11071 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11072 COMPOUND_EXPR will be an argument which must be evaluated.
11073 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11074 COMPOUND_EXPR in the chain will contain the tree for the simplified
11075 form of the builtin function call. */
11076
11077 static tree
11078 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11079 {
11080 if (!validate_arg (s1, POINTER_TYPE)
11081 || !validate_arg (s2, POINTER_TYPE))
11082 return NULL_TREE;
11083 else
11084 {
11085 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11086
11087 /* If both arguments are constants, evaluate at compile-time. */
11088 if (p1 && p2)
11089 {
11090 const size_t r = strcspn (p1, p2);
11091 return build_int_cst (size_type_node, r);
11092 }
11093
11094 /* If the first argument is "", return NULL_TREE. */
11095 if (p1 && *p1 == '\0')
11096 {
11097 /* Evaluate and ignore argument s2 in case it has
11098 side-effects. */
11099 return omit_one_operand_loc (loc, size_type_node,
11100 size_zero_node, s2);
11101 }
11102
11103 /* If the second argument is "", return __builtin_strlen(s1). */
11104 if (p2 && *p2 == '\0')
11105 {
11106 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11107
11108 /* If the replacement _DECL isn't initialized, don't do the
11109 transformation. */
11110 if (!fn)
11111 return NULL_TREE;
11112
11113 return build_call_expr_loc (loc, fn, 1, s1);
11114 }
11115 return NULL_TREE;
11116 }
11117 }
11118
11119 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11120 produced. False otherwise. This is done so that we don't output the error
11121 or warning twice or three times. */
11122
11123 bool
11124 fold_builtin_next_arg (tree exp, bool va_start_p)
11125 {
11126 tree fntype = TREE_TYPE (current_function_decl);
11127 int nargs = call_expr_nargs (exp);
11128 tree arg;
11129 /* There is good chance the current input_location points inside the
11130 definition of the va_start macro (perhaps on the token for
11131 builtin) in a system header, so warnings will not be emitted.
11132 Use the location in real source code. */
11133 source_location current_location =
11134 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11135 NULL);
11136
11137 if (!stdarg_p (fntype))
11138 {
11139 error ("%<va_start%> used in function with fixed args");
11140 return true;
11141 }
11142
11143 if (va_start_p)
11144 {
11145 if (va_start_p && (nargs != 2))
11146 {
11147 error ("wrong number of arguments to function %<va_start%>");
11148 return true;
11149 }
11150 arg = CALL_EXPR_ARG (exp, 1);
11151 }
11152 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11153 when we checked the arguments and if needed issued a warning. */
11154 else
11155 {
11156 if (nargs == 0)
11157 {
11158 /* Evidently an out of date version of <stdarg.h>; can't validate
11159 va_start's second argument, but can still work as intended. */
11160 warning_at (current_location,
11161 OPT_Wvarargs,
11162 "%<__builtin_next_arg%> called without an argument");
11163 return true;
11164 }
11165 else if (nargs > 1)
11166 {
11167 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11168 return true;
11169 }
11170 arg = CALL_EXPR_ARG (exp, 0);
11171 }
11172
11173 if (TREE_CODE (arg) == SSA_NAME)
11174 arg = SSA_NAME_VAR (arg);
11175
11176 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11177 or __builtin_next_arg (0) the first time we see it, after checking
11178 the arguments and if needed issuing a warning. */
11179 if (!integer_zerop (arg))
11180 {
11181 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11182
11183 /* Strip off all nops for the sake of the comparison. This
11184 is not quite the same as STRIP_NOPS. It does more.
11185 We must also strip off INDIRECT_EXPR for C++ reference
11186 parameters. */
11187 while (CONVERT_EXPR_P (arg)
11188 || TREE_CODE (arg) == INDIRECT_REF)
11189 arg = TREE_OPERAND (arg, 0);
11190 if (arg != last_parm)
11191 {
11192 /* FIXME: Sometimes with the tree optimizers we can get the
11193 not the last argument even though the user used the last
11194 argument. We just warn and set the arg to be the last
11195 argument so that we will get wrong-code because of
11196 it. */
11197 warning_at (current_location,
11198 OPT_Wvarargs,
11199 "second parameter of %<va_start%> not last named argument");
11200 }
11201
11202 /* Undefined by C99 7.15.1.4p4 (va_start):
11203 "If the parameter parmN is declared with the register storage
11204 class, with a function or array type, or with a type that is
11205 not compatible with the type that results after application of
11206 the default argument promotions, the behavior is undefined."
11207 */
11208 else if (DECL_REGISTER (arg))
11209 {
11210 warning_at (current_location,
11211 OPT_Wvarargs,
11212 "undefined behaviour when second parameter of "
11213 "%<va_start%> is declared with %<register%> storage");
11214 }
11215
11216 /* We want to verify the second parameter just once before the tree
11217 optimizers are run and then avoid keeping it in the tree,
11218 as otherwise we could warn even for correct code like:
11219 void foo (int i, ...)
11220 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11221 if (va_start_p)
11222 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11223 else
11224 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11225 }
11226 return false;
11227 }
11228
11229
11230 /* Expand a call EXP to __builtin_object_size. */
11231
11232 static rtx
11233 expand_builtin_object_size (tree exp)
11234 {
11235 tree ost;
11236 int object_size_type;
11237 tree fndecl = get_callee_fndecl (exp);
11238
11239 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11240 {
11241 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11242 exp, fndecl);
11243 expand_builtin_trap ();
11244 return const0_rtx;
11245 }
11246
11247 ost = CALL_EXPR_ARG (exp, 1);
11248 STRIP_NOPS (ost);
11249
11250 if (TREE_CODE (ost) != INTEGER_CST
11251 || tree_int_cst_sgn (ost) < 0
11252 || compare_tree_int (ost, 3) > 0)
11253 {
11254 error ("%Klast argument of %D is not integer constant between 0 and 3",
11255 exp, fndecl);
11256 expand_builtin_trap ();
11257 return const0_rtx;
11258 }
11259
11260 object_size_type = tree_to_shwi (ost);
11261
11262 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11263 }
11264
11265 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11266 FCODE is the BUILT_IN_* to use.
11267 Return NULL_RTX if we failed; the caller should emit a normal call,
11268 otherwise try to get the result in TARGET, if convenient (and in
11269 mode MODE if that's convenient). */
11270
11271 static rtx
11272 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11273 enum built_in_function fcode)
11274 {
11275 tree dest, src, len, size;
11276
11277 if (!validate_arglist (exp,
11278 POINTER_TYPE,
11279 fcode == BUILT_IN_MEMSET_CHK
11280 ? INTEGER_TYPE : POINTER_TYPE,
11281 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11282 return NULL_RTX;
11283
11284 dest = CALL_EXPR_ARG (exp, 0);
11285 src = CALL_EXPR_ARG (exp, 1);
11286 len = CALL_EXPR_ARG (exp, 2);
11287 size = CALL_EXPR_ARG (exp, 3);
11288
11289 if (! tree_fits_uhwi_p (size))
11290 return NULL_RTX;
11291
11292 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11293 {
11294 tree fn;
11295
11296 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11297 {
11298 warning_at (tree_nonartificial_location (exp),
11299 0, "%Kcall to %D will always overflow destination buffer",
11300 exp, get_callee_fndecl (exp));
11301 return NULL_RTX;
11302 }
11303
11304 fn = NULL_TREE;
11305 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11306 mem{cpy,pcpy,move,set} is available. */
11307 switch (fcode)
11308 {
11309 case BUILT_IN_MEMCPY_CHK:
11310 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11311 break;
11312 case BUILT_IN_MEMPCPY_CHK:
11313 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11314 break;
11315 case BUILT_IN_MEMMOVE_CHK:
11316 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11317 break;
11318 case BUILT_IN_MEMSET_CHK:
11319 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11320 break;
11321 default:
11322 break;
11323 }
11324
11325 if (! fn)
11326 return NULL_RTX;
11327
11328 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11329 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11330 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11331 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11332 }
11333 else if (fcode == BUILT_IN_MEMSET_CHK)
11334 return NULL_RTX;
11335 else
11336 {
11337 unsigned int dest_align = get_pointer_alignment (dest);
11338
11339 /* If DEST is not a pointer type, call the normal function. */
11340 if (dest_align == 0)
11341 return NULL_RTX;
11342
11343 /* If SRC and DEST are the same (and not volatile), do nothing. */
11344 if (operand_equal_p (src, dest, 0))
11345 {
11346 tree expr;
11347
11348 if (fcode != BUILT_IN_MEMPCPY_CHK)
11349 {
11350 /* Evaluate and ignore LEN in case it has side-effects. */
11351 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11352 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11353 }
11354
11355 expr = fold_build_pointer_plus (dest, len);
11356 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11357 }
11358
11359 /* __memmove_chk special case. */
11360 if (fcode == BUILT_IN_MEMMOVE_CHK)
11361 {
11362 unsigned int src_align = get_pointer_alignment (src);
11363
11364 if (src_align == 0)
11365 return NULL_RTX;
11366
11367 /* If src is categorized for a readonly section we can use
11368 normal __memcpy_chk. */
11369 if (readonly_data_expr (src))
11370 {
11371 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11372 if (!fn)
11373 return NULL_RTX;
11374 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11375 dest, src, len, size);
11376 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11377 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11378 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11379 }
11380 }
11381 return NULL_RTX;
11382 }
11383 }
11384
11385 /* Emit warning if a buffer overflow is detected at compile time. */
11386
11387 static void
11388 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11389 {
11390 int is_strlen = 0;
11391 tree len, size;
11392 location_t loc = tree_nonartificial_location (exp);
11393
11394 switch (fcode)
11395 {
11396 case BUILT_IN_STRCPY_CHK:
11397 case BUILT_IN_STPCPY_CHK:
11398 /* For __strcat_chk the warning will be emitted only if overflowing
11399 by at least strlen (dest) + 1 bytes. */
11400 case BUILT_IN_STRCAT_CHK:
11401 len = CALL_EXPR_ARG (exp, 1);
11402 size = CALL_EXPR_ARG (exp, 2);
11403 is_strlen = 1;
11404 break;
11405 case BUILT_IN_STRNCAT_CHK:
11406 case BUILT_IN_STRNCPY_CHK:
11407 case BUILT_IN_STPNCPY_CHK:
11408 len = CALL_EXPR_ARG (exp, 2);
11409 size = CALL_EXPR_ARG (exp, 3);
11410 break;
11411 case BUILT_IN_SNPRINTF_CHK:
11412 case BUILT_IN_VSNPRINTF_CHK:
11413 len = CALL_EXPR_ARG (exp, 1);
11414 size = CALL_EXPR_ARG (exp, 3);
11415 break;
11416 default:
11417 gcc_unreachable ();
11418 }
11419
11420 if (!len || !size)
11421 return;
11422
11423 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11424 return;
11425
11426 if (is_strlen)
11427 {
11428 len = c_strlen (len, 1);
11429 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11430 return;
11431 }
11432 else if (fcode == BUILT_IN_STRNCAT_CHK)
11433 {
11434 tree src = CALL_EXPR_ARG (exp, 1);
11435 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11436 return;
11437 src = c_strlen (src, 1);
11438 if (! src || ! tree_fits_uhwi_p (src))
11439 {
11440 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11441 exp, get_callee_fndecl (exp));
11442 return;
11443 }
11444 else if (tree_int_cst_lt (src, size))
11445 return;
11446 }
11447 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11448 return;
11449
11450 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11451 exp, get_callee_fndecl (exp));
11452 }
11453
11454 /* Emit warning if a buffer overflow is detected at compile time
11455 in __sprintf_chk/__vsprintf_chk calls. */
11456
11457 static void
11458 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11459 {
11460 tree size, len, fmt;
11461 const char *fmt_str;
11462 int nargs = call_expr_nargs (exp);
11463
11464 /* Verify the required arguments in the original call. */
11465
11466 if (nargs < 4)
11467 return;
11468 size = CALL_EXPR_ARG (exp, 2);
11469 fmt = CALL_EXPR_ARG (exp, 3);
11470
11471 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11472 return;
11473
11474 /* Check whether the format is a literal string constant. */
11475 fmt_str = c_getstr (fmt);
11476 if (fmt_str == NULL)
11477 return;
11478
11479 if (!init_target_chars ())
11480 return;
11481
11482 /* If the format doesn't contain % args or %%, we know its size. */
11483 if (strchr (fmt_str, target_percent) == 0)
11484 len = build_int_cstu (size_type_node, strlen (fmt_str));
11485 /* If the format is "%s" and first ... argument is a string literal,
11486 we know it too. */
11487 else if (fcode == BUILT_IN_SPRINTF_CHK
11488 && strcmp (fmt_str, target_percent_s) == 0)
11489 {
11490 tree arg;
11491
11492 if (nargs < 5)
11493 return;
11494 arg = CALL_EXPR_ARG (exp, 4);
11495 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11496 return;
11497
11498 len = c_strlen (arg, 1);
11499 if (!len || ! tree_fits_uhwi_p (len))
11500 return;
11501 }
11502 else
11503 return;
11504
11505 if (! tree_int_cst_lt (len, size))
11506 warning_at (tree_nonartificial_location (exp),
11507 0, "%Kcall to %D will always overflow destination buffer",
11508 exp, get_callee_fndecl (exp));
11509 }
11510
11511 /* Emit warning if a free is called with address of a variable. */
11512
11513 static void
11514 maybe_emit_free_warning (tree exp)
11515 {
11516 tree arg = CALL_EXPR_ARG (exp, 0);
11517
11518 STRIP_NOPS (arg);
11519 if (TREE_CODE (arg) != ADDR_EXPR)
11520 return;
11521
11522 arg = get_base_address (TREE_OPERAND (arg, 0));
11523 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11524 return;
11525
11526 if (SSA_VAR_P (arg))
11527 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11528 "%Kattempt to free a non-heap object %qD", exp, arg);
11529 else
11530 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11531 "%Kattempt to free a non-heap object", exp);
11532 }
11533
11534 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11535 if possible. */
11536
11537 static tree
11538 fold_builtin_object_size (tree ptr, tree ost)
11539 {
11540 unsigned HOST_WIDE_INT bytes;
11541 int object_size_type;
11542
11543 if (!validate_arg (ptr, POINTER_TYPE)
11544 || !validate_arg (ost, INTEGER_TYPE))
11545 return NULL_TREE;
11546
11547 STRIP_NOPS (ost);
11548
11549 if (TREE_CODE (ost) != INTEGER_CST
11550 || tree_int_cst_sgn (ost) < 0
11551 || compare_tree_int (ost, 3) > 0)
11552 return NULL_TREE;
11553
11554 object_size_type = tree_to_shwi (ost);
11555
11556 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11557 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11558 and (size_t) 0 for types 2 and 3. */
11559 if (TREE_SIDE_EFFECTS (ptr))
11560 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11561
11562 if (TREE_CODE (ptr) == ADDR_EXPR)
11563 {
11564 bytes = compute_builtin_object_size (ptr, object_size_type);
11565 if (wi::fits_to_tree_p (bytes, size_type_node))
11566 return build_int_cstu (size_type_node, bytes);
11567 }
11568 else if (TREE_CODE (ptr) == SSA_NAME)
11569 {
11570 /* If object size is not known yet, delay folding until
11571 later. Maybe subsequent passes will help determining
11572 it. */
11573 bytes = compute_builtin_object_size (ptr, object_size_type);
11574 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11575 && wi::fits_to_tree_p (bytes, size_type_node))
11576 return build_int_cstu (size_type_node, bytes);
11577 }
11578
11579 return NULL_TREE;
11580 }
11581
11582 /* Builtins with folding operations that operate on "..." arguments
11583 need special handling; we need to store the arguments in a convenient
11584 data structure before attempting any folding. Fortunately there are
11585 only a few builtins that fall into this category. FNDECL is the
11586 function, EXP is the CALL_EXPR for the call. */
11587
11588 static tree
11589 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11590 {
11591 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11592 tree ret = NULL_TREE;
11593
11594 switch (fcode)
11595 {
11596 case BUILT_IN_FPCLASSIFY:
11597 ret = fold_builtin_fpclassify (loc, args, nargs);
11598 break;
11599
11600 default:
11601 break;
11602 }
11603 if (ret)
11604 {
11605 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11606 SET_EXPR_LOCATION (ret, loc);
11607 TREE_NO_WARNING (ret) = 1;
11608 return ret;
11609 }
11610 return NULL_TREE;
11611 }
11612
11613 /* Initialize format string characters in the target charset. */
11614
11615 bool
11616 init_target_chars (void)
11617 {
11618 static bool init;
11619 if (!init)
11620 {
11621 target_newline = lang_hooks.to_target_charset ('\n');
11622 target_percent = lang_hooks.to_target_charset ('%');
11623 target_c = lang_hooks.to_target_charset ('c');
11624 target_s = lang_hooks.to_target_charset ('s');
11625 if (target_newline == 0 || target_percent == 0 || target_c == 0
11626 || target_s == 0)
11627 return false;
11628
11629 target_percent_c[0] = target_percent;
11630 target_percent_c[1] = target_c;
11631 target_percent_c[2] = '\0';
11632
11633 target_percent_s[0] = target_percent;
11634 target_percent_s[1] = target_s;
11635 target_percent_s[2] = '\0';
11636
11637 target_percent_s_newline[0] = target_percent;
11638 target_percent_s_newline[1] = target_s;
11639 target_percent_s_newline[2] = target_newline;
11640 target_percent_s_newline[3] = '\0';
11641
11642 init = true;
11643 }
11644 return true;
11645 }
11646
11647 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11648 and no overflow/underflow occurred. INEXACT is true if M was not
11649 exactly calculated. TYPE is the tree type for the result. This
11650 function assumes that you cleared the MPFR flags and then
11651 calculated M to see if anything subsequently set a flag prior to
11652 entering this function. Return NULL_TREE if any checks fail. */
11653
11654 static tree
11655 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11656 {
11657 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11658 overflow/underflow occurred. If -frounding-math, proceed iff the
11659 result of calling FUNC was exact. */
11660 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11661 && (!flag_rounding_math || !inexact))
11662 {
11663 REAL_VALUE_TYPE rr;
11664
11665 real_from_mpfr (&rr, m, type, GMP_RNDN);
11666 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11667 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11668 but the mpft_t is not, then we underflowed in the
11669 conversion. */
11670 if (real_isfinite (&rr)
11671 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11672 {
11673 REAL_VALUE_TYPE rmode;
11674
11675 real_convert (&rmode, TYPE_MODE (type), &rr);
11676 /* Proceed iff the specified mode can hold the value. */
11677 if (real_identical (&rmode, &rr))
11678 return build_real (type, rmode);
11679 }
11680 }
11681 return NULL_TREE;
11682 }
11683
11684 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11685 number and no overflow/underflow occurred. INEXACT is true if M
11686 was not exactly calculated. TYPE is the tree type for the result.
11687 This function assumes that you cleared the MPFR flags and then
11688 calculated M to see if anything subsequently set a flag prior to
11689 entering this function. Return NULL_TREE if any checks fail, if
11690 FORCE_CONVERT is true, then bypass the checks. */
11691
11692 static tree
11693 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11694 {
11695 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11696 overflow/underflow occurred. If -frounding-math, proceed iff the
11697 result of calling FUNC was exact. */
11698 if (force_convert
11699 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11700 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11701 && (!flag_rounding_math || !inexact)))
11702 {
11703 REAL_VALUE_TYPE re, im;
11704
11705 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11706 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11707 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11708 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11709 but the mpft_t is not, then we underflowed in the
11710 conversion. */
11711 if (force_convert
11712 || (real_isfinite (&re) && real_isfinite (&im)
11713 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11714 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11715 {
11716 REAL_VALUE_TYPE re_mode, im_mode;
11717
11718 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11719 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11720 /* Proceed iff the specified mode can hold the value. */
11721 if (force_convert
11722 || (real_identical (&re_mode, &re)
11723 && real_identical (&im_mode, &im)))
11724 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11725 build_real (TREE_TYPE (type), im_mode));
11726 }
11727 }
11728 return NULL_TREE;
11729 }
11730
11731 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11732 FUNC on it and return the resulting value as a tree with type TYPE.
11733 If MIN and/or MAX are not NULL, then the supplied ARG must be
11734 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11735 acceptable values, otherwise they are not. The mpfr precision is
11736 set to the precision of TYPE. We assume that function FUNC returns
11737 zero if the result could be calculated exactly within the requested
11738 precision. */
11739
11740 static tree
11741 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11742 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11743 bool inclusive)
11744 {
11745 tree result = NULL_TREE;
11746
11747 STRIP_NOPS (arg);
11748
11749 /* To proceed, MPFR must exactly represent the target floating point
11750 format, which only happens when the target base equals two. */
11751 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11752 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11753 {
11754 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11755
11756 if (real_isfinite (ra)
11757 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11758 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11759 {
11760 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11761 const int prec = fmt->p;
11762 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11763 int inexact;
11764 mpfr_t m;
11765
11766 mpfr_init2 (m, prec);
11767 mpfr_from_real (m, ra, GMP_RNDN);
11768 mpfr_clear_flags ();
11769 inexact = func (m, m, rnd);
11770 result = do_mpfr_ckconv (m, type, inexact);
11771 mpfr_clear (m);
11772 }
11773 }
11774
11775 return result;
11776 }
11777
11778 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11779 FUNC on it and return the resulting value as a tree with type TYPE.
11780 The mpfr precision is set to the precision of TYPE. We assume that
11781 function FUNC returns zero if the result could be calculated
11782 exactly within the requested precision. */
11783
11784 static tree
11785 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11786 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11787 {
11788 tree result = NULL_TREE;
11789
11790 STRIP_NOPS (arg1);
11791 STRIP_NOPS (arg2);
11792
11793 /* To proceed, MPFR must exactly represent the target floating point
11794 format, which only happens when the target base equals two. */
11795 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11796 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11797 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11798 {
11799 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11800 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11801
11802 if (real_isfinite (ra1) && real_isfinite (ra2))
11803 {
11804 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11805 const int prec = fmt->p;
11806 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11807 int inexact;
11808 mpfr_t m1, m2;
11809
11810 mpfr_inits2 (prec, m1, m2, NULL);
11811 mpfr_from_real (m1, ra1, GMP_RNDN);
11812 mpfr_from_real (m2, ra2, GMP_RNDN);
11813 mpfr_clear_flags ();
11814 inexact = func (m1, m1, m2, rnd);
11815 result = do_mpfr_ckconv (m1, type, inexact);
11816 mpfr_clears (m1, m2, NULL);
11817 }
11818 }
11819
11820 return result;
11821 }
11822
11823 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11824 FUNC on it and return the resulting value as a tree with type TYPE.
11825 The mpfr precision is set to the precision of TYPE. We assume that
11826 function FUNC returns zero if the result could be calculated
11827 exactly within the requested precision. */
11828
11829 static tree
11830 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11831 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11832 {
11833 tree result = NULL_TREE;
11834
11835 STRIP_NOPS (arg1);
11836 STRIP_NOPS (arg2);
11837 STRIP_NOPS (arg3);
11838
11839 /* To proceed, MPFR must exactly represent the target floating point
11840 format, which only happens when the target base equals two. */
11841 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11842 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11843 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11844 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11845 {
11846 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11847 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11848 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11849
11850 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11851 {
11852 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11853 const int prec = fmt->p;
11854 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11855 int inexact;
11856 mpfr_t m1, m2, m3;
11857
11858 mpfr_inits2 (prec, m1, m2, m3, NULL);
11859 mpfr_from_real (m1, ra1, GMP_RNDN);
11860 mpfr_from_real (m2, ra2, GMP_RNDN);
11861 mpfr_from_real (m3, ra3, GMP_RNDN);
11862 mpfr_clear_flags ();
11863 inexact = func (m1, m1, m2, m3, rnd);
11864 result = do_mpfr_ckconv (m1, type, inexact);
11865 mpfr_clears (m1, m2, m3, NULL);
11866 }
11867 }
11868
11869 return result;
11870 }
11871
11872 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11873 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11874 If ARG_SINP and ARG_COSP are NULL then the result is returned
11875 as a complex value.
11876 The type is taken from the type of ARG and is used for setting the
11877 precision of the calculation and results. */
11878
11879 static tree
11880 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11881 {
11882 tree const type = TREE_TYPE (arg);
11883 tree result = NULL_TREE;
11884
11885 STRIP_NOPS (arg);
11886
11887 /* To proceed, MPFR must exactly represent the target floating point
11888 format, which only happens when the target base equals two. */
11889 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11890 && TREE_CODE (arg) == REAL_CST
11891 && !TREE_OVERFLOW (arg))
11892 {
11893 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11894
11895 if (real_isfinite (ra))
11896 {
11897 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11898 const int prec = fmt->p;
11899 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11900 tree result_s, result_c;
11901 int inexact;
11902 mpfr_t m, ms, mc;
11903
11904 mpfr_inits2 (prec, m, ms, mc, NULL);
11905 mpfr_from_real (m, ra, GMP_RNDN);
11906 mpfr_clear_flags ();
11907 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11908 result_s = do_mpfr_ckconv (ms, type, inexact);
11909 result_c = do_mpfr_ckconv (mc, type, inexact);
11910 mpfr_clears (m, ms, mc, NULL);
11911 if (result_s && result_c)
11912 {
11913 /* If we are to return in a complex value do so. */
11914 if (!arg_sinp && !arg_cosp)
11915 return build_complex (build_complex_type (type),
11916 result_c, result_s);
11917
11918 /* Dereference the sin/cos pointer arguments. */
11919 arg_sinp = build_fold_indirect_ref (arg_sinp);
11920 arg_cosp = build_fold_indirect_ref (arg_cosp);
11921 /* Proceed if valid pointer type were passed in. */
11922 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11923 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11924 {
11925 /* Set the values. */
11926 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11927 result_s);
11928 TREE_SIDE_EFFECTS (result_s) = 1;
11929 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11930 result_c);
11931 TREE_SIDE_EFFECTS (result_c) = 1;
11932 /* Combine the assignments into a compound expr. */
11933 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11934 result_s, result_c));
11935 }
11936 }
11937 }
11938 }
11939 return result;
11940 }
11941
11942 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11943 two-argument mpfr order N Bessel function FUNC on them and return
11944 the resulting value as a tree with type TYPE. The mpfr precision
11945 is set to the precision of TYPE. We assume that function FUNC
11946 returns zero if the result could be calculated exactly within the
11947 requested precision. */
11948 static tree
11949 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11950 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11951 const REAL_VALUE_TYPE *min, bool inclusive)
11952 {
11953 tree result = NULL_TREE;
11954
11955 STRIP_NOPS (arg1);
11956 STRIP_NOPS (arg2);
11957
11958 /* To proceed, MPFR must exactly represent the target floating point
11959 format, which only happens when the target base equals two. */
11960 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11961 && tree_fits_shwi_p (arg1)
11962 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11963 {
11964 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11965 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11966
11967 if (n == (long)n
11968 && real_isfinite (ra)
11969 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11970 {
11971 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11972 const int prec = fmt->p;
11973 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11974 int inexact;
11975 mpfr_t m;
11976
11977 mpfr_init2 (m, prec);
11978 mpfr_from_real (m, ra, GMP_RNDN);
11979 mpfr_clear_flags ();
11980 inexact = func (m, n, m, rnd);
11981 result = do_mpfr_ckconv (m, type, inexact);
11982 mpfr_clear (m);
11983 }
11984 }
11985
11986 return result;
11987 }
11988
11989 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11990 the pointer *(ARG_QUO) and return the result. The type is taken
11991 from the type of ARG0 and is used for setting the precision of the
11992 calculation and results. */
11993
11994 static tree
11995 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11996 {
11997 tree const type = TREE_TYPE (arg0);
11998 tree result = NULL_TREE;
11999
12000 STRIP_NOPS (arg0);
12001 STRIP_NOPS (arg1);
12002
12003 /* To proceed, MPFR must exactly represent the target floating point
12004 format, which only happens when the target base equals two. */
12005 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12006 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12007 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12008 {
12009 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12010 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12011
12012 if (real_isfinite (ra0) && real_isfinite (ra1))
12013 {
12014 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12015 const int prec = fmt->p;
12016 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12017 tree result_rem;
12018 long integer_quo;
12019 mpfr_t m0, m1;
12020
12021 mpfr_inits2 (prec, m0, m1, NULL);
12022 mpfr_from_real (m0, ra0, GMP_RNDN);
12023 mpfr_from_real (m1, ra1, GMP_RNDN);
12024 mpfr_clear_flags ();
12025 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12026 /* Remquo is independent of the rounding mode, so pass
12027 inexact=0 to do_mpfr_ckconv(). */
12028 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12029 mpfr_clears (m0, m1, NULL);
12030 if (result_rem)
12031 {
12032 /* MPFR calculates quo in the host's long so it may
12033 return more bits in quo than the target int can hold
12034 if sizeof(host long) > sizeof(target int). This can
12035 happen even for native compilers in LP64 mode. In
12036 these cases, modulo the quo value with the largest
12037 number that the target int can hold while leaving one
12038 bit for the sign. */
12039 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12040 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12041
12042 /* Dereference the quo pointer argument. */
12043 arg_quo = build_fold_indirect_ref (arg_quo);
12044 /* Proceed iff a valid pointer type was passed in. */
12045 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12046 {
12047 /* Set the value. */
12048 tree result_quo
12049 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12050 build_int_cst (TREE_TYPE (arg_quo),
12051 integer_quo));
12052 TREE_SIDE_EFFECTS (result_quo) = 1;
12053 /* Combine the quo assignment with the rem. */
12054 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12055 result_quo, result_rem));
12056 }
12057 }
12058 }
12059 }
12060 return result;
12061 }
12062
12063 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12064 resulting value as a tree with type TYPE. The mpfr precision is
12065 set to the precision of TYPE. We assume that this mpfr function
12066 returns zero if the result could be calculated exactly within the
12067 requested precision. In addition, the integer pointer represented
12068 by ARG_SG will be dereferenced and set to the appropriate signgam
12069 (-1,1) value. */
12070
12071 static tree
12072 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12073 {
12074 tree result = NULL_TREE;
12075
12076 STRIP_NOPS (arg);
12077
12078 /* To proceed, MPFR must exactly represent the target floating point
12079 format, which only happens when the target base equals two. Also
12080 verify ARG is a constant and that ARG_SG is an int pointer. */
12081 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12082 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12083 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12084 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12085 {
12086 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12087
12088 /* In addition to NaN and Inf, the argument cannot be zero or a
12089 negative integer. */
12090 if (real_isfinite (ra)
12091 && ra->cl != rvc_zero
12092 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12093 {
12094 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12095 const int prec = fmt->p;
12096 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12097 int inexact, sg;
12098 mpfr_t m;
12099 tree result_lg;
12100
12101 mpfr_init2 (m, prec);
12102 mpfr_from_real (m, ra, GMP_RNDN);
12103 mpfr_clear_flags ();
12104 inexact = mpfr_lgamma (m, &sg, m, rnd);
12105 result_lg = do_mpfr_ckconv (m, type, inexact);
12106 mpfr_clear (m);
12107 if (result_lg)
12108 {
12109 tree result_sg;
12110
12111 /* Dereference the arg_sg pointer argument. */
12112 arg_sg = build_fold_indirect_ref (arg_sg);
12113 /* Assign the signgam value into *arg_sg. */
12114 result_sg = fold_build2 (MODIFY_EXPR,
12115 TREE_TYPE (arg_sg), arg_sg,
12116 build_int_cst (TREE_TYPE (arg_sg), sg));
12117 TREE_SIDE_EFFECTS (result_sg) = 1;
12118 /* Combine the signgam assignment with the lgamma result. */
12119 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12120 result_sg, result_lg));
12121 }
12122 }
12123 }
12124
12125 return result;
12126 }
12127
12128 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12129 function FUNC on it and return the resulting value as a tree with
12130 type TYPE. The mpfr precision is set to the precision of TYPE. We
12131 assume that function FUNC returns zero if the result could be
12132 calculated exactly within the requested precision. */
12133
12134 static tree
12135 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12136 {
12137 tree result = NULL_TREE;
12138
12139 STRIP_NOPS (arg);
12140
12141 /* To proceed, MPFR must exactly represent the target floating point
12142 format, which only happens when the target base equals two. */
12143 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12144 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12145 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12146 {
12147 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12148 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12149
12150 if (real_isfinite (re) && real_isfinite (im))
12151 {
12152 const struct real_format *const fmt =
12153 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12154 const int prec = fmt->p;
12155 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12156 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12157 int inexact;
12158 mpc_t m;
12159
12160 mpc_init2 (m, prec);
12161 mpfr_from_real (mpc_realref (m), re, rnd);
12162 mpfr_from_real (mpc_imagref (m), im, rnd);
12163 mpfr_clear_flags ();
12164 inexact = func (m, m, crnd);
12165 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
12166 mpc_clear (m);
12167 }
12168 }
12169
12170 return result;
12171 }
12172
12173 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12174 mpc function FUNC on it and return the resulting value as a tree
12175 with type TYPE. The mpfr precision is set to the precision of
12176 TYPE. We assume that function FUNC returns zero if the result
12177 could be calculated exactly within the requested precision. If
12178 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12179 in the arguments and/or results. */
12180
12181 tree
12182 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12183 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12184 {
12185 tree result = NULL_TREE;
12186
12187 STRIP_NOPS (arg0);
12188 STRIP_NOPS (arg1);
12189
12190 /* To proceed, MPFR must exactly represent the target floating point
12191 format, which only happens when the target base equals two. */
12192 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12193 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12194 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12195 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12196 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12197 {
12198 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12199 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12200 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12201 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12202
12203 if (do_nonfinite
12204 || (real_isfinite (re0) && real_isfinite (im0)
12205 && real_isfinite (re1) && real_isfinite (im1)))
12206 {
12207 const struct real_format *const fmt =
12208 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12209 const int prec = fmt->p;
12210 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12211 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12212 int inexact;
12213 mpc_t m0, m1;
12214
12215 mpc_init2 (m0, prec);
12216 mpc_init2 (m1, prec);
12217 mpfr_from_real (mpc_realref (m0), re0, rnd);
12218 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12219 mpfr_from_real (mpc_realref (m1), re1, rnd);
12220 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12221 mpfr_clear_flags ();
12222 inexact = func (m0, m0, m1, crnd);
12223 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12224 mpc_clear (m0);
12225 mpc_clear (m1);
12226 }
12227 }
12228
12229 return result;
12230 }
12231
12232 /* A wrapper function for builtin folding that prevents warnings for
12233 "statement without effect" and the like, caused by removing the
12234 call node earlier than the warning is generated. */
12235
12236 tree
12237 fold_call_stmt (gcall *stmt, bool ignore)
12238 {
12239 tree ret = NULL_TREE;
12240 tree fndecl = gimple_call_fndecl (stmt);
12241 location_t loc = gimple_location (stmt);
12242 if (fndecl
12243 && TREE_CODE (fndecl) == FUNCTION_DECL
12244 && DECL_BUILT_IN (fndecl)
12245 && !gimple_call_va_arg_pack_p (stmt))
12246 {
12247 int nargs = gimple_call_num_args (stmt);
12248 tree *args = (nargs > 0
12249 ? gimple_call_arg_ptr (stmt, 0)
12250 : &error_mark_node);
12251
12252 if (avoid_folding_inline_builtin (fndecl))
12253 return NULL_TREE;
12254 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12255 {
12256 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12257 }
12258 else
12259 {
12260 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
12261 if (ret)
12262 {
12263 /* Propagate location information from original call to
12264 expansion of builtin. Otherwise things like
12265 maybe_emit_chk_warning, that operate on the expansion
12266 of a builtin, will use the wrong location information. */
12267 if (gimple_has_location (stmt))
12268 {
12269 tree realret = ret;
12270 if (TREE_CODE (ret) == NOP_EXPR)
12271 realret = TREE_OPERAND (ret, 0);
12272 if (CAN_HAVE_LOCATION_P (realret)
12273 && !EXPR_HAS_LOCATION (realret))
12274 SET_EXPR_LOCATION (realret, loc);
12275 return realret;
12276 }
12277 return ret;
12278 }
12279 }
12280 }
12281 return NULL_TREE;
12282 }
12283
12284 /* Look up the function in builtin_decl that corresponds to DECL
12285 and set ASMSPEC as its user assembler name. DECL must be a
12286 function decl that declares a builtin. */
12287
12288 void
12289 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12290 {
12291 tree builtin;
12292 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12293 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12294 && asmspec != 0);
12295
12296 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12297 set_user_assembler_name (builtin, asmspec);
12298 switch (DECL_FUNCTION_CODE (decl))
12299 {
12300 case BUILT_IN_MEMCPY:
12301 init_block_move_fn (asmspec);
12302 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12303 break;
12304 case BUILT_IN_MEMSET:
12305 init_block_clear_fn (asmspec);
12306 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12307 break;
12308 case BUILT_IN_MEMMOVE:
12309 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12310 break;
12311 case BUILT_IN_MEMCMP:
12312 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12313 break;
12314 case BUILT_IN_ABORT:
12315 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12316 break;
12317 case BUILT_IN_FFS:
12318 if (INT_TYPE_SIZE < BITS_PER_WORD)
12319 {
12320 set_user_assembler_libfunc ("ffs", asmspec);
12321 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12322 MODE_INT, 0), "ffs");
12323 }
12324 break;
12325 default:
12326 break;
12327 }
12328 }
12329
12330 /* Return true if DECL is a builtin that expands to a constant or similarly
12331 simple code. */
12332 bool
12333 is_simple_builtin (tree decl)
12334 {
12335 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12336 switch (DECL_FUNCTION_CODE (decl))
12337 {
12338 /* Builtins that expand to constants. */
12339 case BUILT_IN_CONSTANT_P:
12340 case BUILT_IN_EXPECT:
12341 case BUILT_IN_OBJECT_SIZE:
12342 case BUILT_IN_UNREACHABLE:
12343 /* Simple register moves or loads from stack. */
12344 case BUILT_IN_ASSUME_ALIGNED:
12345 case BUILT_IN_RETURN_ADDRESS:
12346 case BUILT_IN_EXTRACT_RETURN_ADDR:
12347 case BUILT_IN_FROB_RETURN_ADDR:
12348 case BUILT_IN_RETURN:
12349 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12350 case BUILT_IN_FRAME_ADDRESS:
12351 case BUILT_IN_VA_END:
12352 case BUILT_IN_STACK_SAVE:
12353 case BUILT_IN_STACK_RESTORE:
12354 /* Exception state returns or moves registers around. */
12355 case BUILT_IN_EH_FILTER:
12356 case BUILT_IN_EH_POINTER:
12357 case BUILT_IN_EH_COPY_VALUES:
12358 return true;
12359
12360 default:
12361 return false;
12362 }
12363
12364 return false;
12365 }
12366
12367 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12368 most probably expanded inline into reasonably simple code. This is a
12369 superset of is_simple_builtin. */
12370 bool
12371 is_inexpensive_builtin (tree decl)
12372 {
12373 if (!decl)
12374 return false;
12375 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12376 return true;
12377 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12378 switch (DECL_FUNCTION_CODE (decl))
12379 {
12380 case BUILT_IN_ABS:
12381 case BUILT_IN_ALLOCA:
12382 case BUILT_IN_ALLOCA_WITH_ALIGN:
12383 case BUILT_IN_BSWAP16:
12384 case BUILT_IN_BSWAP32:
12385 case BUILT_IN_BSWAP64:
12386 case BUILT_IN_CLZ:
12387 case BUILT_IN_CLZIMAX:
12388 case BUILT_IN_CLZL:
12389 case BUILT_IN_CLZLL:
12390 case BUILT_IN_CTZ:
12391 case BUILT_IN_CTZIMAX:
12392 case BUILT_IN_CTZL:
12393 case BUILT_IN_CTZLL:
12394 case BUILT_IN_FFS:
12395 case BUILT_IN_FFSIMAX:
12396 case BUILT_IN_FFSL:
12397 case BUILT_IN_FFSLL:
12398 case BUILT_IN_IMAXABS:
12399 case BUILT_IN_FINITE:
12400 case BUILT_IN_FINITEF:
12401 case BUILT_IN_FINITEL:
12402 case BUILT_IN_FINITED32:
12403 case BUILT_IN_FINITED64:
12404 case BUILT_IN_FINITED128:
12405 case BUILT_IN_FPCLASSIFY:
12406 case BUILT_IN_ISFINITE:
12407 case BUILT_IN_ISINF_SIGN:
12408 case BUILT_IN_ISINF:
12409 case BUILT_IN_ISINFF:
12410 case BUILT_IN_ISINFL:
12411 case BUILT_IN_ISINFD32:
12412 case BUILT_IN_ISINFD64:
12413 case BUILT_IN_ISINFD128:
12414 case BUILT_IN_ISNAN:
12415 case BUILT_IN_ISNANF:
12416 case BUILT_IN_ISNANL:
12417 case BUILT_IN_ISNAND32:
12418 case BUILT_IN_ISNAND64:
12419 case BUILT_IN_ISNAND128:
12420 case BUILT_IN_ISNORMAL:
12421 case BUILT_IN_ISGREATER:
12422 case BUILT_IN_ISGREATEREQUAL:
12423 case BUILT_IN_ISLESS:
12424 case BUILT_IN_ISLESSEQUAL:
12425 case BUILT_IN_ISLESSGREATER:
12426 case BUILT_IN_ISUNORDERED:
12427 case BUILT_IN_VA_ARG_PACK:
12428 case BUILT_IN_VA_ARG_PACK_LEN:
12429 case BUILT_IN_VA_COPY:
12430 case BUILT_IN_TRAP:
12431 case BUILT_IN_SAVEREGS:
12432 case BUILT_IN_POPCOUNTL:
12433 case BUILT_IN_POPCOUNTLL:
12434 case BUILT_IN_POPCOUNTIMAX:
12435 case BUILT_IN_POPCOUNT:
12436 case BUILT_IN_PARITYL:
12437 case BUILT_IN_PARITYLL:
12438 case BUILT_IN_PARITYIMAX:
12439 case BUILT_IN_PARITY:
12440 case BUILT_IN_LABS:
12441 case BUILT_IN_LLABS:
12442 case BUILT_IN_PREFETCH:
12443 case BUILT_IN_ACC_ON_DEVICE:
12444 return true;
12445
12446 default:
12447 return is_simple_builtin (decl);
12448 }
12449
12450 return false;
12451 }