re PR middle-end/53688 (191.fma3d in SPEC CPU 2000 miscompiled)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
53
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
59
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
79
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
88 #endif
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
191
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
206
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 static bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 if (strncmp (name, "__atomic_", 9) == 0)
238 return true;
239 return false;
240 }
241
242
243 /* Return true if DECL is a function symbol representing a built-in. */
244
245 bool
246 is_builtin_fn (tree decl)
247 {
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249 }
250
251
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
255
256 static bool
257 called_as_built_in (tree node)
258 {
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
264 }
265
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
270
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address. */
277
278 bool
279 get_object_alignment_1 (tree exp, unsigned int *alignp,
280 unsigned HOST_WIDE_INT *bitposp)
281 {
282 HOST_WIDE_INT bitsize, bitpos;
283 tree offset;
284 enum machine_mode mode;
285 int unsignedp, volatilep;
286 unsigned int inner, align = BITS_PER_UNIT;
287 bool known_alignment = false;
288
289 /* Get the innermost object and the constant (bitpos) and possibly
290 variable (offset) offset of the access. */
291 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
292 &mode, &unsignedp, &volatilep, true);
293
294 /* Extract alignment information from the innermost object and
295 possibly adjust bitpos and offset. */
296 if (TREE_CODE (exp) == CONST_DECL)
297 exp = DECL_INITIAL (exp);
298 if (DECL_P (exp)
299 && TREE_CODE (exp) != LABEL_DECL)
300 {
301 if (TREE_CODE (exp) == FUNCTION_DECL)
302 {
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 {
309 known_alignment = true;
310 align = 2 * BITS_PER_UNIT;
311 }
312 }
313 else
314 {
315 known_alignment = true;
316 align = DECL_ALIGN (exp);
317 }
318 }
319 else if (CONSTANT_CLASS_P (exp))
320 {
321 known_alignment = true;
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 #ifdef CONSTANT_ALIGNMENT
324 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
325 #endif
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 known_alignment = true;
330 align = TYPE_ALIGN (TREE_TYPE (exp));
331 }
332 else if (TREE_CODE (exp) == INDIRECT_REF)
333 {
334 known_alignment = true;
335 align = TYPE_ALIGN (TREE_TYPE (exp));
336 }
337 else if (TREE_CODE (exp) == MEM_REF)
338 {
339 tree addr = TREE_OPERAND (exp, 0);
340 unsigned ptr_align;
341 unsigned HOST_WIDE_INT ptr_bitpos;
342
343 if (TREE_CODE (addr) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
345 {
346 known_alignment = true;
347 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
348 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
349 align *= BITS_PER_UNIT;
350 addr = TREE_OPERAND (addr, 0);
351 }
352
353 if (get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos))
354 {
355 known_alignment = true;
356 bitpos += ptr_bitpos & ~(align - 1);
357 align = MAX (ptr_align, align);
358 }
359
360 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
361 }
362 else if (TREE_CODE (exp) == TARGET_MEM_REF)
363 {
364 unsigned ptr_align;
365 unsigned HOST_WIDE_INT ptr_bitpos;
366 tree addr = TMR_BASE (exp);
367
368 if (TREE_CODE (addr) == BIT_AND_EXPR
369 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
370 {
371 known_alignment = true;
372 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
373 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
374 align *= BITS_PER_UNIT;
375 addr = TREE_OPERAND (addr, 0);
376 }
377
378 if (get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos))
379 {
380 known_alignment = true;
381 bitpos += ptr_bitpos & ~(align - 1);
382 align = MAX (ptr_align, align);
383 }
384
385 if (TMR_OFFSET (exp))
386 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
387 if (TMR_INDEX (exp) && TMR_STEP (exp))
388 {
389 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
390 align = MIN (align, (step & -step) * BITS_PER_UNIT);
391 known_alignment = true;
392 }
393 else if (TMR_INDEX (exp))
394 known_alignment = false;
395
396 if (TMR_INDEX2 (exp))
397 known_alignment = false;
398 }
399
400 /* If there is a non-constant offset part extract the maximum
401 alignment that can prevail. */
402 inner = ~0U;
403 while (offset)
404 {
405 tree next_offset;
406
407 if (TREE_CODE (offset) == PLUS_EXPR)
408 {
409 next_offset = TREE_OPERAND (offset, 0);
410 offset = TREE_OPERAND (offset, 1);
411 }
412 else
413 next_offset = NULL;
414 if (host_integerp (offset, 1))
415 {
416 /* Any overflow in calculating offset_bits won't change
417 the alignment. */
418 unsigned offset_bits
419 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
420
421 if (offset_bits)
422 inner = MIN (inner, (offset_bits & -offset_bits));
423 }
424 else if (TREE_CODE (offset) == MULT_EXPR
425 && host_integerp (TREE_OPERAND (offset, 1), 1))
426 {
427 /* Any overflow in calculating offset_factor won't change
428 the alignment. */
429 unsigned offset_factor
430 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
431 * BITS_PER_UNIT);
432
433 if (offset_factor)
434 inner = MIN (inner, (offset_factor & -offset_factor));
435 }
436 else
437 {
438 known_alignment = false;
439 break;
440 }
441 offset = next_offset;
442 }
443
444 if (known_alignment)
445 {
446 /* Alignment is innermost object alignment adjusted by the constant
447 and non-constant offset parts. */
448 align = MIN (align, inner);
449 bitpos = bitpos & (align - 1);
450 *alignp = align;
451 }
452 else
453 {
454 bitpos = bitpos & (BITS_PER_UNIT - 1);
455 *alignp = BITS_PER_UNIT;
456 }
457 *bitposp = bitpos;
458 return known_alignment;
459 }
460
461 /* Return the alignment in bits of EXP, an object. */
462
463 unsigned int
464 get_object_alignment (tree exp)
465 {
466 unsigned HOST_WIDE_INT bitpos = 0;
467 unsigned int align;
468
469 get_object_alignment_1 (exp, &align, &bitpos);
470
471 /* align and bitpos now specify known low bits of the pointer.
472 ptr & (align - 1) == bitpos. */
473
474 if (bitpos != 0)
475 align = (bitpos & -bitpos);
476 return align;
477 }
478
479 /* Return the alignment of object EXP, also considering its type when we do
480 not know of explicit misalignment. Only handle MEM_REF and TARGET_MEM_REF.
481
482 ??? Note that, in the general case, the type of an expression is not kept
483 consistent with misalignment information by the front-end, for example when
484 taking the address of a member of a packed structure. However, in most of
485 the cases, expressions have the alignment of their type so we optimistically
486 fall back to this alignment when we cannot compute a misalignment. */
487
488 unsigned int
489 get_object_or_type_alignment (tree exp)
490 {
491 unsigned HOST_WIDE_INT misalign;
492 unsigned int align;
493 bool known_alignment;
494
495 gcc_assert (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF);
496 known_alignment = get_object_alignment_1 (exp, &align, &misalign);
497 if (misalign != 0)
498 align = (misalign & -misalign);
499 else if (!known_alignment)
500 align = TYPE_ALIGN (TREE_TYPE (exp));
501
502 return align;
503 }
504
505 /* For a pointer valued expression EXP compute values M and N such that M
506 divides (EXP - N) and such that N < M. If these numbers can be determined,
507 store M in alignp and N in *BITPOSP and return true. Otherwise return false
508 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.
509
510 If EXP is not a pointer, false is returned too. */
511
512 bool
513 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
514 unsigned HOST_WIDE_INT *bitposp)
515 {
516 STRIP_NOPS (exp);
517
518 if (TREE_CODE (exp) == ADDR_EXPR)
519 return get_object_alignment_1 (TREE_OPERAND (exp, 0), alignp, bitposp);
520 else if (TREE_CODE (exp) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp)))
522 {
523 unsigned int ptr_align, ptr_misalign;
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
525
526 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
527 {
528 *bitposp = ptr_misalign * BITS_PER_UNIT;
529 *alignp = ptr_align * BITS_PER_UNIT;
530 return true;
531 }
532 else
533 {
534 *bitposp = 0;
535 *alignp = BITS_PER_UNIT;
536 return false;
537 }
538 }
539
540 *bitposp = 0;
541 *alignp = BITS_PER_UNIT;
542 return false;
543 }
544
545 /* Return the alignment in bits of EXP, a pointer valued expression.
546 The alignment returned is, by default, the alignment of the thing that
547 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
548
549 Otherwise, look at the expression to see if we can do better, i.e., if the
550 expression is actually pointing at an object whose alignment is tighter. */
551
552 unsigned int
553 get_pointer_alignment (tree exp)
554 {
555 unsigned HOST_WIDE_INT bitpos = 0;
556 unsigned int align;
557
558 get_pointer_alignment_1 (exp, &align, &bitpos);
559
560 /* align and bitpos now specify known low bits of the pointer.
561 ptr & (align - 1) == bitpos. */
562
563 if (bitpos != 0)
564 align = (bitpos & -bitpos);
565
566 return align;
567 }
568
569 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
570 way, because it could contain a zero byte in the middle.
571 TREE_STRING_LENGTH is the size of the character array, not the string.
572
573 ONLY_VALUE should be nonzero if the result is not going to be emitted
574 into the instruction stream and zero if it is going to be expanded.
575 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
576 is returned, otherwise NULL, since
577 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
578 evaluate the side-effects.
579
580 The value returned is of type `ssizetype'.
581
582 Unfortunately, string_constant can't access the values of const char
583 arrays with initializers, so neither can we do so here. */
584
585 tree
586 c_strlen (tree src, int only_value)
587 {
588 tree offset_node;
589 HOST_WIDE_INT offset;
590 int max;
591 const char *ptr;
592 location_t loc;
593
594 STRIP_NOPS (src);
595 if (TREE_CODE (src) == COND_EXPR
596 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
597 {
598 tree len1, len2;
599
600 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
601 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
602 if (tree_int_cst_equal (len1, len2))
603 return len1;
604 }
605
606 if (TREE_CODE (src) == COMPOUND_EXPR
607 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
608 return c_strlen (TREE_OPERAND (src, 1), only_value);
609
610 loc = EXPR_LOC_OR_HERE (src);
611
612 src = string_constant (src, &offset_node);
613 if (src == 0)
614 return NULL_TREE;
615
616 max = TREE_STRING_LENGTH (src) - 1;
617 ptr = TREE_STRING_POINTER (src);
618
619 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
620 {
621 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
622 compute the offset to the following null if we don't know where to
623 start searching for it. */
624 int i;
625
626 for (i = 0; i < max; i++)
627 if (ptr[i] == 0)
628 return NULL_TREE;
629
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
636
637 return size_diffop_loc (loc, size_int (max), offset_node);
638 }
639
640 /* We have a known offset into the string. Start searching there for
641 a null character if we can represent it as a single HOST_WIDE_INT. */
642 if (offset_node == 0)
643 offset = 0;
644 else if (! host_integerp (offset_node, 0))
645 offset = -1;
646 else
647 offset = tree_low_cst (offset_node, 0);
648
649 /* If the offset is known to be out of bounds, warn, and call strlen at
650 runtime. */
651 if (offset < 0 || offset > max)
652 {
653 /* Suppress multiple warnings for propagated constant strings. */
654 if (! TREE_NO_WARNING (src))
655 {
656 warning_at (loc, 0, "offset outside bounds of constant string");
657 TREE_NO_WARNING (src) = 1;
658 }
659 return NULL_TREE;
660 }
661
662 /* Use strlen to search for the first zero byte. Since any strings
663 constructed with build_string will have nulls appended, we win even
664 if we get handed something like (char[4])"abcd".
665
666 Since OFFSET is our starting index into the string, no further
667 calculation is needed. */
668 return ssize_int (strlen (ptr + offset));
669 }
670
671 /* Return a char pointer for a C string if it is a string constant
672 or sum of string constant and integer constant. */
673
674 static const char *
675 c_getstr (tree src)
676 {
677 tree offset_node;
678
679 src = string_constant (src, &offset_node);
680 if (src == 0)
681 return 0;
682
683 if (offset_node == 0)
684 return TREE_STRING_POINTER (src);
685 else if (!host_integerp (offset_node, 1)
686 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
687 return 0;
688
689 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
690 }
691
692 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
693 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
694
695 static rtx
696 c_readstr (const char *str, enum machine_mode mode)
697 {
698 HOST_WIDE_INT c[2];
699 HOST_WIDE_INT ch;
700 unsigned int i, j;
701
702 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
703
704 c[0] = 0;
705 c[1] = 0;
706 ch = 1;
707 for (i = 0; i < GET_MODE_SIZE (mode); i++)
708 {
709 j = i;
710 if (WORDS_BIG_ENDIAN)
711 j = GET_MODE_SIZE (mode) - i - 1;
712 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
713 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
714 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
715 j *= BITS_PER_UNIT;
716 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
717
718 if (ch)
719 ch = (unsigned char) str[i];
720 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
721 }
722 return immed_double_const (c[0], c[1], mode);
723 }
724
725 /* Cast a target constant CST to target CHAR and if that value fits into
726 host char type, return zero and put that value into variable pointed to by
727 P. */
728
729 static int
730 target_char_cast (tree cst, char *p)
731 {
732 unsigned HOST_WIDE_INT val, hostval;
733
734 if (TREE_CODE (cst) != INTEGER_CST
735 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
736 return 1;
737
738 val = TREE_INT_CST_LOW (cst);
739 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
740 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
741
742 hostval = val;
743 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
744 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
745
746 if (val != hostval)
747 return 1;
748
749 *p = hostval;
750 return 0;
751 }
752
753 /* Similar to save_expr, but assumes that arbitrary code is not executed
754 in between the multiple evaluations. In particular, we assume that a
755 non-addressable local variable will not be modified. */
756
757 static tree
758 builtin_save_expr (tree exp)
759 {
760 if (TREE_CODE (exp) == SSA_NAME
761 || (TREE_ADDRESSABLE (exp) == 0
762 && (TREE_CODE (exp) == PARM_DECL
763 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
764 return exp;
765
766 return save_expr (exp);
767 }
768
769 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
770 times to get the address of either a higher stack frame, or a return
771 address located within it (depending on FNDECL_CODE). */
772
773 static rtx
774 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
775 {
776 int i;
777
778 #ifdef INITIAL_FRAME_ADDRESS_RTX
779 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
780 #else
781 rtx tem;
782
783 /* For a zero count with __builtin_return_address, we don't care what
784 frame address we return, because target-specific definitions will
785 override us. Therefore frame pointer elimination is OK, and using
786 the soft frame pointer is OK.
787
788 For a nonzero count, or a zero count with __builtin_frame_address,
789 we require a stable offset from the current frame pointer to the
790 previous one, so we must use the hard frame pointer, and
791 we must disable frame pointer elimination. */
792 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
793 tem = frame_pointer_rtx;
794 else
795 {
796 tem = hard_frame_pointer_rtx;
797
798 /* Tell reload not to eliminate the frame pointer. */
799 crtl->accesses_prior_frames = 1;
800 }
801 #endif
802
803 /* Some machines need special handling before we can access
804 arbitrary frames. For example, on the SPARC, we must first flush
805 all register windows to the stack. */
806 #ifdef SETUP_FRAME_ADDRESSES
807 if (count > 0)
808 SETUP_FRAME_ADDRESSES ();
809 #endif
810
811 /* On the SPARC, the return address is not in the frame, it is in a
812 register. There is no way to access it off of the current frame
813 pointer, but it can be accessed off the previous frame pointer by
814 reading the value from the register window save area. */
815 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
816 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
817 count--;
818 #endif
819
820 /* Scan back COUNT frames to the specified frame. */
821 for (i = 0; i < count; i++)
822 {
823 /* Assume the dynamic chain pointer is in the word that the
824 frame address points to, unless otherwise specified. */
825 #ifdef DYNAMIC_CHAIN_ADDRESS
826 tem = DYNAMIC_CHAIN_ADDRESS (tem);
827 #endif
828 tem = memory_address (Pmode, tem);
829 tem = gen_frame_mem (Pmode, tem);
830 tem = copy_to_reg (tem);
831 }
832
833 /* For __builtin_frame_address, return what we've got. But, on
834 the SPARC for example, we may have to add a bias. */
835 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
836 #ifdef FRAME_ADDR_RTX
837 return FRAME_ADDR_RTX (tem);
838 #else
839 return tem;
840 #endif
841
842 /* For __builtin_return_address, get the return address from that frame. */
843 #ifdef RETURN_ADDR_RTX
844 tem = RETURN_ADDR_RTX (count, tem);
845 #else
846 tem = memory_address (Pmode,
847 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
848 tem = gen_frame_mem (Pmode, tem);
849 #endif
850 return tem;
851 }
852
853 /* Alias set used for setjmp buffer. */
854 static alias_set_type setjmp_alias_set = -1;
855
856 /* Construct the leading half of a __builtin_setjmp call. Control will
857 return to RECEIVER_LABEL. This is also called directly by the SJLJ
858 exception handling code. */
859
860 void
861 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
862 {
863 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
864 rtx stack_save;
865 rtx mem;
866
867 if (setjmp_alias_set == -1)
868 setjmp_alias_set = new_alias_set ();
869
870 buf_addr = convert_memory_address (Pmode, buf_addr);
871
872 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
873
874 /* We store the frame pointer and the address of receiver_label in
875 the buffer and use the rest of it for the stack save area, which
876 is machine-dependent. */
877
878 mem = gen_rtx_MEM (Pmode, buf_addr);
879 set_mem_alias_set (mem, setjmp_alias_set);
880 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
881
882 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
883 GET_MODE_SIZE (Pmode))),
884 set_mem_alias_set (mem, setjmp_alias_set);
885
886 emit_move_insn (validize_mem (mem),
887 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
888
889 stack_save = gen_rtx_MEM (sa_mode,
890 plus_constant (Pmode, buf_addr,
891 2 * GET_MODE_SIZE (Pmode)));
892 set_mem_alias_set (stack_save, setjmp_alias_set);
893 emit_stack_save (SAVE_NONLOCAL, &stack_save);
894
895 /* If there is further processing to do, do it. */
896 #ifdef HAVE_builtin_setjmp_setup
897 if (HAVE_builtin_setjmp_setup)
898 emit_insn (gen_builtin_setjmp_setup (buf_addr));
899 #endif
900
901 /* We have a nonlocal label. */
902 cfun->has_nonlocal_label = 1;
903 }
904
905 /* Construct the trailing part of a __builtin_setjmp call. This is
906 also called directly by the SJLJ exception handling code. */
907
908 void
909 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
910 {
911 rtx chain;
912
913 /* Clobber the FP when we get here, so we have to make sure it's
914 marked as used by this function. */
915 emit_use (hard_frame_pointer_rtx);
916
917 /* Mark the static chain as clobbered here so life information
918 doesn't get messed up for it. */
919 chain = targetm.calls.static_chain (current_function_decl, true);
920 if (chain && REG_P (chain))
921 emit_clobber (chain);
922
923 /* Now put in the code to restore the frame pointer, and argument
924 pointer, if needed. */
925 #ifdef HAVE_nonlocal_goto
926 if (! HAVE_nonlocal_goto)
927 #endif
928 {
929 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
930 /* This might change the hard frame pointer in ways that aren't
931 apparent to early optimization passes, so force a clobber. */
932 emit_clobber (hard_frame_pointer_rtx);
933 }
934
935 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
936 if (fixed_regs[ARG_POINTER_REGNUM])
937 {
938 #ifdef ELIMINABLE_REGS
939 size_t i;
940 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
941
942 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
943 if (elim_regs[i].from == ARG_POINTER_REGNUM
944 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
945 break;
946
947 if (i == ARRAY_SIZE (elim_regs))
948 #endif
949 {
950 /* Now restore our arg pointer from the address at which it
951 was saved in our stack frame. */
952 emit_move_insn (crtl->args.internal_arg_pointer,
953 copy_to_reg (get_arg_pointer_save_area ()));
954 }
955 }
956 #endif
957
958 #ifdef HAVE_builtin_setjmp_receiver
959 if (HAVE_builtin_setjmp_receiver)
960 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
961 else
962 #endif
963 #ifdef HAVE_nonlocal_goto_receiver
964 if (HAVE_nonlocal_goto_receiver)
965 emit_insn (gen_nonlocal_goto_receiver ());
966 else
967 #endif
968 { /* Nothing */ }
969
970 /* We must not allow the code we just generated to be reordered by
971 scheduling. Specifically, the update of the frame pointer must
972 happen immediately, not later. */
973 emit_insn (gen_blockage ());
974 }
975
976 /* __builtin_longjmp is passed a pointer to an array of five words (not
977 all will be used on all machines). It operates similarly to the C
978 library function of the same name, but is more efficient. Much of
979 the code below is copied from the handling of non-local gotos. */
980
981 static void
982 expand_builtin_longjmp (rtx buf_addr, rtx value)
983 {
984 rtx fp, lab, stack, insn, last;
985 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
986
987 /* DRAP is needed for stack realign if longjmp is expanded to current
988 function */
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
991
992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
994
995 buf_addr = convert_memory_address (Pmode, buf_addr);
996
997 buf_addr = force_reg (Pmode, buf_addr);
998
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value == const1_rtx);
1002
1003 last = get_last_insn ();
1004 #ifdef HAVE_builtin_longjmp
1005 if (HAVE_builtin_longjmp)
1006 emit_insn (gen_builtin_longjmp (buf_addr));
1007 else
1008 #endif
1009 {
1010 fp = gen_rtx_MEM (Pmode, buf_addr);
1011 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1012 GET_MODE_SIZE (Pmode)));
1013
1014 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1015 2 * GET_MODE_SIZE (Pmode)));
1016 set_mem_alias_set (fp, setjmp_alias_set);
1017 set_mem_alias_set (lab, setjmp_alias_set);
1018 set_mem_alias_set (stack, setjmp_alias_set);
1019
1020 /* Pick up FP, label, and SP from the block and jump. This code is
1021 from expand_goto in stmt.c; see there for detailed comments. */
1022 #ifdef HAVE_nonlocal_goto
1023 if (HAVE_nonlocal_goto)
1024 /* We have to pass a value to the nonlocal_goto pattern that will
1025 get copied into the static_chain pointer, but it does not matter
1026 what that value is, because builtin_setjmp does not use it. */
1027 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1028 else
1029 #endif
1030 {
1031 lab = copy_to_reg (lab);
1032
1033 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1034 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1035
1036 emit_move_insn (hard_frame_pointer_rtx, fp);
1037 emit_stack_restore (SAVE_NONLOCAL, stack);
1038
1039 emit_use (hard_frame_pointer_rtx);
1040 emit_use (stack_pointer_rtx);
1041 emit_indirect_jump (lab);
1042 }
1043 }
1044
1045 /* Search backwards and mark the jump insn as a non-local goto.
1046 Note that this precludes the use of __builtin_longjmp to a
1047 __builtin_setjmp target in the same function. However, we've
1048 already cautioned the user that these functions are for
1049 internal exception handling use only. */
1050 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1051 {
1052 gcc_assert (insn != last);
1053
1054 if (JUMP_P (insn))
1055 {
1056 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1057 break;
1058 }
1059 else if (CALL_P (insn))
1060 break;
1061 }
1062 }
1063
1064 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1065 and the address of the save area. */
1066
1067 static rtx
1068 expand_builtin_nonlocal_goto (tree exp)
1069 {
1070 tree t_label, t_save_area;
1071 rtx r_label, r_save_area, r_fp, r_sp, insn;
1072
1073 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1074 return NULL_RTX;
1075
1076 t_label = CALL_EXPR_ARG (exp, 0);
1077 t_save_area = CALL_EXPR_ARG (exp, 1);
1078
1079 r_label = expand_normal (t_label);
1080 r_label = convert_memory_address (Pmode, r_label);
1081 r_save_area = expand_normal (t_save_area);
1082 r_save_area = convert_memory_address (Pmode, r_save_area);
1083 /* Copy the address of the save location to a register just in case it was
1084 based on the frame pointer. */
1085 r_save_area = copy_to_reg (r_save_area);
1086 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1087 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1088 plus_constant (Pmode, r_save_area,
1089 GET_MODE_SIZE (Pmode)));
1090
1091 crtl->has_nonlocal_goto = 1;
1092
1093 #ifdef HAVE_nonlocal_goto
1094 /* ??? We no longer need to pass the static chain value, afaik. */
1095 if (HAVE_nonlocal_goto)
1096 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1097 else
1098 #endif
1099 {
1100 r_label = copy_to_reg (r_label);
1101
1102 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1103 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1104
1105 /* Restore frame pointer for containing function. */
1106 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1107 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1108
1109 /* USE of hard_frame_pointer_rtx added for consistency;
1110 not clear if really needed. */
1111 emit_use (hard_frame_pointer_rtx);
1112 emit_use (stack_pointer_rtx);
1113
1114 /* If the architecture is using a GP register, we must
1115 conservatively assume that the target function makes use of it.
1116 The prologue of functions with nonlocal gotos must therefore
1117 initialize the GP register to the appropriate value, and we
1118 must then make sure that this value is live at the point
1119 of the jump. (Note that this doesn't necessarily apply
1120 to targets with a nonlocal_goto pattern; they are free
1121 to implement it in their own way. Note also that this is
1122 a no-op if the GP register is a global invariant.) */
1123 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1124 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1125 emit_use (pic_offset_table_rtx);
1126
1127 emit_indirect_jump (r_label);
1128 }
1129
1130 /* Search backwards to the jump insn and mark it as a
1131 non-local goto. */
1132 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1133 {
1134 if (JUMP_P (insn))
1135 {
1136 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1137 break;
1138 }
1139 else if (CALL_P (insn))
1140 break;
1141 }
1142
1143 return const0_rtx;
1144 }
1145
1146 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1147 (not all will be used on all machines) that was passed to __builtin_setjmp.
1148 It updates the stack pointer in that block to correspond to the current
1149 stack pointer. */
1150
1151 static void
1152 expand_builtin_update_setjmp_buf (rtx buf_addr)
1153 {
1154 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1155 rtx stack_save
1156 = gen_rtx_MEM (sa_mode,
1157 memory_address
1158 (sa_mode,
1159 plus_constant (Pmode, buf_addr,
1160 2 * GET_MODE_SIZE (Pmode))));
1161
1162 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1163 }
1164
1165 /* Expand a call to __builtin_prefetch. For a target that does not support
1166 data prefetch, evaluate the memory address argument in case it has side
1167 effects. */
1168
1169 static void
1170 expand_builtin_prefetch (tree exp)
1171 {
1172 tree arg0, arg1, arg2;
1173 int nargs;
1174 rtx op0, op1, op2;
1175
1176 if (!validate_arglist (exp, POINTER_TYPE, 0))
1177 return;
1178
1179 arg0 = CALL_EXPR_ARG (exp, 0);
1180
1181 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1182 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1183 locality). */
1184 nargs = call_expr_nargs (exp);
1185 if (nargs > 1)
1186 arg1 = CALL_EXPR_ARG (exp, 1);
1187 else
1188 arg1 = integer_zero_node;
1189 if (nargs > 2)
1190 arg2 = CALL_EXPR_ARG (exp, 2);
1191 else
1192 arg2 = integer_three_node;
1193
1194 /* Argument 0 is an address. */
1195 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1196
1197 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1198 if (TREE_CODE (arg1) != INTEGER_CST)
1199 {
1200 error ("second argument to %<__builtin_prefetch%> must be a constant");
1201 arg1 = integer_zero_node;
1202 }
1203 op1 = expand_normal (arg1);
1204 /* Argument 1 must be either zero or one. */
1205 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1206 {
1207 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1208 " using zero");
1209 op1 = const0_rtx;
1210 }
1211
1212 /* Argument 2 (locality) must be a compile-time constant int. */
1213 if (TREE_CODE (arg2) != INTEGER_CST)
1214 {
1215 error ("third argument to %<__builtin_prefetch%> must be a constant");
1216 arg2 = integer_zero_node;
1217 }
1218 op2 = expand_normal (arg2);
1219 /* Argument 2 must be 0, 1, 2, or 3. */
1220 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1221 {
1222 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1223 op2 = const0_rtx;
1224 }
1225
1226 #ifdef HAVE_prefetch
1227 if (HAVE_prefetch)
1228 {
1229 struct expand_operand ops[3];
1230
1231 create_address_operand (&ops[0], op0);
1232 create_integer_operand (&ops[1], INTVAL (op1));
1233 create_integer_operand (&ops[2], INTVAL (op2));
1234 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1235 return;
1236 }
1237 #endif
1238
1239 /* Don't do anything with direct references to volatile memory, but
1240 generate code to handle other side effects. */
1241 if (!MEM_P (op0) && side_effects_p (op0))
1242 emit_insn (op0);
1243 }
1244
1245 /* Get a MEM rtx for expression EXP which is the address of an operand
1246 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1247 the maximum length of the block of memory that might be accessed or
1248 NULL if unknown. */
1249
1250 static rtx
1251 get_memory_rtx (tree exp, tree len)
1252 {
1253 tree orig_exp = exp;
1254 rtx addr, mem;
1255
1256 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1257 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1258 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1259 exp = TREE_OPERAND (exp, 0);
1260
1261 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1262 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1263
1264 /* Get an expression we can use to find the attributes to assign to MEM.
1265 First remove any nops. */
1266 while (CONVERT_EXPR_P (exp)
1267 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1268 exp = TREE_OPERAND (exp, 0);
1269
1270 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1271 (as builtin stringops may alias with anything). */
1272 exp = fold_build2 (MEM_REF,
1273 build_array_type (char_type_node,
1274 build_range_type (sizetype,
1275 size_one_node, len)),
1276 exp, build_int_cst (ptr_type_node, 0));
1277
1278 /* If the MEM_REF has no acceptable address, try to get the base object
1279 from the original address we got, and build an all-aliasing
1280 unknown-sized access to that one. */
1281 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1282 set_mem_attributes (mem, exp, 0);
1283 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1284 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1285 0))))
1286 {
1287 exp = build_fold_addr_expr (exp);
1288 exp = fold_build2 (MEM_REF,
1289 build_array_type (char_type_node,
1290 build_range_type (sizetype,
1291 size_zero_node,
1292 NULL)),
1293 exp, build_int_cst (ptr_type_node, 0));
1294 set_mem_attributes (mem, exp, 0);
1295 }
1296 set_mem_alias_set (mem, 0);
1297 return mem;
1298 }
1299 \f
1300 /* Built-in functions to perform an untyped call and return. */
1301
1302 #define apply_args_mode \
1303 (this_target_builtins->x_apply_args_mode)
1304 #define apply_result_mode \
1305 (this_target_builtins->x_apply_result_mode)
1306
1307 /* Return the size required for the block returned by __builtin_apply_args,
1308 and initialize apply_args_mode. */
1309
1310 static int
1311 apply_args_size (void)
1312 {
1313 static int size = -1;
1314 int align;
1315 unsigned int regno;
1316 enum machine_mode mode;
1317
1318 /* The values computed by this function never change. */
1319 if (size < 0)
1320 {
1321 /* The first value is the incoming arg-pointer. */
1322 size = GET_MODE_SIZE (Pmode);
1323
1324 /* The second value is the structure value address unless this is
1325 passed as an "invisible" first argument. */
1326 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1327 size += GET_MODE_SIZE (Pmode);
1328
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if (FUNCTION_ARG_REGNO_P (regno))
1331 {
1332 mode = targetm.calls.get_raw_arg_mode (regno);
1333
1334 gcc_assert (mode != VOIDmode);
1335
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1339 size += GET_MODE_SIZE (mode);
1340 apply_args_mode[regno] = mode;
1341 }
1342 else
1343 {
1344 apply_args_mode[regno] = VOIDmode;
1345 }
1346 }
1347 return size;
1348 }
1349
1350 /* Return the size required for the block returned by __builtin_apply,
1351 and initialize apply_result_mode. */
1352
1353 static int
1354 apply_result_size (void)
1355 {
1356 static int size = -1;
1357 int align, regno;
1358 enum machine_mode mode;
1359
1360 /* The values computed by this function never change. */
1361 if (size < 0)
1362 {
1363 size = 0;
1364
1365 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1366 if (targetm.calls.function_value_regno_p (regno))
1367 {
1368 mode = targetm.calls.get_raw_result_mode (regno);
1369
1370 gcc_assert (mode != VOIDmode);
1371
1372 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1373 if (size % align != 0)
1374 size = CEIL (size, align) * align;
1375 size += GET_MODE_SIZE (mode);
1376 apply_result_mode[regno] = mode;
1377 }
1378 else
1379 apply_result_mode[regno] = VOIDmode;
1380
1381 /* Allow targets that use untyped_call and untyped_return to override
1382 the size so that machine-specific information can be stored here. */
1383 #ifdef APPLY_RESULT_SIZE
1384 size = APPLY_RESULT_SIZE;
1385 #endif
1386 }
1387 return size;
1388 }
1389
1390 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1391 /* Create a vector describing the result block RESULT. If SAVEP is true,
1392 the result block is used to save the values; otherwise it is used to
1393 restore the values. */
1394
1395 static rtx
1396 result_vector (int savep, rtx result)
1397 {
1398 int regno, size, align, nelts;
1399 enum machine_mode mode;
1400 rtx reg, mem;
1401 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1402
1403 size = nelts = 0;
1404 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1405 if ((mode = apply_result_mode[regno]) != VOIDmode)
1406 {
1407 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1408 if (size % align != 0)
1409 size = CEIL (size, align) * align;
1410 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1411 mem = adjust_address (result, mode, size);
1412 savevec[nelts++] = (savep
1413 ? gen_rtx_SET (VOIDmode, mem, reg)
1414 : gen_rtx_SET (VOIDmode, reg, mem));
1415 size += GET_MODE_SIZE (mode);
1416 }
1417 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1418 }
1419 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1420
1421 /* Save the state required to perform an untyped call with the same
1422 arguments as were passed to the current function. */
1423
1424 static rtx
1425 expand_builtin_apply_args_1 (void)
1426 {
1427 rtx registers, tem;
1428 int size, align, regno;
1429 enum machine_mode mode;
1430 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1431
1432 /* Create a block where the arg-pointer, structure value address,
1433 and argument registers can be saved. */
1434 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1435
1436 /* Walk past the arg-pointer and structure value address. */
1437 size = GET_MODE_SIZE (Pmode);
1438 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1439 size += GET_MODE_SIZE (Pmode);
1440
1441 /* Save each register used in calling a function to the block. */
1442 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1443 if ((mode = apply_args_mode[regno]) != VOIDmode)
1444 {
1445 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1446 if (size % align != 0)
1447 size = CEIL (size, align) * align;
1448
1449 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1450
1451 emit_move_insn (adjust_address (registers, mode, size), tem);
1452 size += GET_MODE_SIZE (mode);
1453 }
1454
1455 /* Save the arg pointer to the block. */
1456 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1457 #ifdef STACK_GROWS_DOWNWARD
1458 /* We need the pointer as the caller actually passed them to us, not
1459 as we might have pretended they were passed. Make sure it's a valid
1460 operand, as emit_move_insn isn't expected to handle a PLUS. */
1461 tem
1462 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1463 NULL_RTX);
1464 #endif
1465 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1466
1467 size = GET_MODE_SIZE (Pmode);
1468
1469 /* Save the structure value address unless this is passed as an
1470 "invisible" first argument. */
1471 if (struct_incoming_value)
1472 {
1473 emit_move_insn (adjust_address (registers, Pmode, size),
1474 copy_to_reg (struct_incoming_value));
1475 size += GET_MODE_SIZE (Pmode);
1476 }
1477
1478 /* Return the address of the block. */
1479 return copy_addr_to_reg (XEXP (registers, 0));
1480 }
1481
1482 /* __builtin_apply_args returns block of memory allocated on
1483 the stack into which is stored the arg pointer, structure
1484 value address, static chain, and all the registers that might
1485 possibly be used in performing a function call. The code is
1486 moved to the start of the function so the incoming values are
1487 saved. */
1488
1489 static rtx
1490 expand_builtin_apply_args (void)
1491 {
1492 /* Don't do __builtin_apply_args more than once in a function.
1493 Save the result of the first call and reuse it. */
1494 if (apply_args_value != 0)
1495 return apply_args_value;
1496 {
1497 /* When this function is called, it means that registers must be
1498 saved on entry to this function. So we migrate the
1499 call to the first insn of this function. */
1500 rtx temp;
1501 rtx seq;
1502
1503 start_sequence ();
1504 temp = expand_builtin_apply_args_1 ();
1505 seq = get_insns ();
1506 end_sequence ();
1507
1508 apply_args_value = temp;
1509
1510 /* Put the insns after the NOTE that starts the function.
1511 If this is inside a start_sequence, make the outer-level insn
1512 chain current, so the code is placed at the start of the
1513 function. If internal_arg_pointer is a non-virtual pseudo,
1514 it needs to be placed after the function that initializes
1515 that pseudo. */
1516 push_topmost_sequence ();
1517 if (REG_P (crtl->args.internal_arg_pointer)
1518 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1519 emit_insn_before (seq, parm_birth_insn);
1520 else
1521 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1522 pop_topmost_sequence ();
1523 return temp;
1524 }
1525 }
1526
1527 /* Perform an untyped call and save the state required to perform an
1528 untyped return of whatever value was returned by the given function. */
1529
1530 static rtx
1531 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1532 {
1533 int size, align, regno;
1534 enum machine_mode mode;
1535 rtx incoming_args, result, reg, dest, src, call_insn;
1536 rtx old_stack_level = 0;
1537 rtx call_fusage = 0;
1538 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1539
1540 arguments = convert_memory_address (Pmode, arguments);
1541
1542 /* Create a block where the return registers can be saved. */
1543 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1544
1545 /* Fetch the arg pointer from the ARGUMENTS block. */
1546 incoming_args = gen_reg_rtx (Pmode);
1547 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1548 #ifndef STACK_GROWS_DOWNWARD
1549 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1550 incoming_args, 0, OPTAB_LIB_WIDEN);
1551 #endif
1552
1553 /* Push a new argument block and copy the arguments. Do not allow
1554 the (potential) memcpy call below to interfere with our stack
1555 manipulations. */
1556 do_pending_stack_adjust ();
1557 NO_DEFER_POP;
1558
1559 /* Save the stack with nonlocal if available. */
1560 #ifdef HAVE_save_stack_nonlocal
1561 if (HAVE_save_stack_nonlocal)
1562 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1563 else
1564 #endif
1565 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1566
1567 /* Allocate a block of memory onto the stack and copy the memory
1568 arguments to the outgoing arguments address. We can pass TRUE
1569 as the 4th argument because we just saved the stack pointer
1570 and will restore it right after the call. */
1571 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1572
1573 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1574 may have already set current_function_calls_alloca to true.
1575 current_function_calls_alloca won't be set if argsize is zero,
1576 so we have to guarantee need_drap is true here. */
1577 if (SUPPORTS_STACK_ALIGNMENT)
1578 crtl->need_drap = true;
1579
1580 dest = virtual_outgoing_args_rtx;
1581 #ifndef STACK_GROWS_DOWNWARD
1582 if (CONST_INT_P (argsize))
1583 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1584 else
1585 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1586 #endif
1587 dest = gen_rtx_MEM (BLKmode, dest);
1588 set_mem_align (dest, PARM_BOUNDARY);
1589 src = gen_rtx_MEM (BLKmode, incoming_args);
1590 set_mem_align (src, PARM_BOUNDARY);
1591 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1592
1593 /* Refer to the argument block. */
1594 apply_args_size ();
1595 arguments = gen_rtx_MEM (BLKmode, arguments);
1596 set_mem_align (arguments, PARM_BOUNDARY);
1597
1598 /* Walk past the arg-pointer and structure value address. */
1599 size = GET_MODE_SIZE (Pmode);
1600 if (struct_value)
1601 size += GET_MODE_SIZE (Pmode);
1602
1603 /* Restore each of the registers previously saved. Make USE insns
1604 for each of these registers for use in making the call. */
1605 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1606 if ((mode = apply_args_mode[regno]) != VOIDmode)
1607 {
1608 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1609 if (size % align != 0)
1610 size = CEIL (size, align) * align;
1611 reg = gen_rtx_REG (mode, regno);
1612 emit_move_insn (reg, adjust_address (arguments, mode, size));
1613 use_reg (&call_fusage, reg);
1614 size += GET_MODE_SIZE (mode);
1615 }
1616
1617 /* Restore the structure value address unless this is passed as an
1618 "invisible" first argument. */
1619 size = GET_MODE_SIZE (Pmode);
1620 if (struct_value)
1621 {
1622 rtx value = gen_reg_rtx (Pmode);
1623 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1624 emit_move_insn (struct_value, value);
1625 if (REG_P (struct_value))
1626 use_reg (&call_fusage, struct_value);
1627 size += GET_MODE_SIZE (Pmode);
1628 }
1629
1630 /* All arguments and registers used for the call are set up by now! */
1631 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1632
1633 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1634 and we don't want to load it into a register as an optimization,
1635 because prepare_call_address already did it if it should be done. */
1636 if (GET_CODE (function) != SYMBOL_REF)
1637 function = memory_address (FUNCTION_MODE, function);
1638
1639 /* Generate the actual call instruction and save the return value. */
1640 #ifdef HAVE_untyped_call
1641 if (HAVE_untyped_call)
1642 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1643 result, result_vector (1, result)));
1644 else
1645 #endif
1646 #ifdef HAVE_call_value
1647 if (HAVE_call_value)
1648 {
1649 rtx valreg = 0;
1650
1651 /* Locate the unique return register. It is not possible to
1652 express a call that sets more than one return register using
1653 call_value; use untyped_call for that. In fact, untyped_call
1654 only needs to save the return registers in the given block. */
1655 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1656 if ((mode = apply_result_mode[regno]) != VOIDmode)
1657 {
1658 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1659
1660 valreg = gen_rtx_REG (mode, regno);
1661 }
1662
1663 emit_call_insn (GEN_CALL_VALUE (valreg,
1664 gen_rtx_MEM (FUNCTION_MODE, function),
1665 const0_rtx, NULL_RTX, const0_rtx));
1666
1667 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1668 }
1669 else
1670 #endif
1671 gcc_unreachable ();
1672
1673 /* Find the CALL insn we just emitted, and attach the register usage
1674 information. */
1675 call_insn = last_call_insn ();
1676 add_function_usage_to (call_insn, call_fusage);
1677
1678 /* Restore the stack. */
1679 #ifdef HAVE_save_stack_nonlocal
1680 if (HAVE_save_stack_nonlocal)
1681 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1682 else
1683 #endif
1684 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1685 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1686
1687 OK_DEFER_POP;
1688
1689 /* Return the address of the result block. */
1690 result = copy_addr_to_reg (XEXP (result, 0));
1691 return convert_memory_address (ptr_mode, result);
1692 }
1693
1694 /* Perform an untyped return. */
1695
1696 static void
1697 expand_builtin_return (rtx result)
1698 {
1699 int size, align, regno;
1700 enum machine_mode mode;
1701 rtx reg;
1702 rtx call_fusage = 0;
1703
1704 result = convert_memory_address (Pmode, result);
1705
1706 apply_result_size ();
1707 result = gen_rtx_MEM (BLKmode, result);
1708
1709 #ifdef HAVE_untyped_return
1710 if (HAVE_untyped_return)
1711 {
1712 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1713 emit_barrier ();
1714 return;
1715 }
1716 #endif
1717
1718 /* Restore the return value and note that each value is used. */
1719 size = 0;
1720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1721 if ((mode = apply_result_mode[regno]) != VOIDmode)
1722 {
1723 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1724 if (size % align != 0)
1725 size = CEIL (size, align) * align;
1726 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1727 emit_move_insn (reg, adjust_address (result, mode, size));
1728
1729 push_to_sequence (call_fusage);
1730 emit_use (reg);
1731 call_fusage = get_insns ();
1732 end_sequence ();
1733 size += GET_MODE_SIZE (mode);
1734 }
1735
1736 /* Put the USE insns before the return. */
1737 emit_insn (call_fusage);
1738
1739 /* Return whatever values was restored by jumping directly to the end
1740 of the function. */
1741 expand_naked_return ();
1742 }
1743
1744 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1745
1746 static enum type_class
1747 type_to_class (tree type)
1748 {
1749 switch (TREE_CODE (type))
1750 {
1751 case VOID_TYPE: return void_type_class;
1752 case INTEGER_TYPE: return integer_type_class;
1753 case ENUMERAL_TYPE: return enumeral_type_class;
1754 case BOOLEAN_TYPE: return boolean_type_class;
1755 case POINTER_TYPE: return pointer_type_class;
1756 case REFERENCE_TYPE: return reference_type_class;
1757 case OFFSET_TYPE: return offset_type_class;
1758 case REAL_TYPE: return real_type_class;
1759 case COMPLEX_TYPE: return complex_type_class;
1760 case FUNCTION_TYPE: return function_type_class;
1761 case METHOD_TYPE: return method_type_class;
1762 case RECORD_TYPE: return record_type_class;
1763 case UNION_TYPE:
1764 case QUAL_UNION_TYPE: return union_type_class;
1765 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1766 ? string_type_class : array_type_class);
1767 case LANG_TYPE: return lang_type_class;
1768 default: return no_type_class;
1769 }
1770 }
1771
1772 /* Expand a call EXP to __builtin_classify_type. */
1773
1774 static rtx
1775 expand_builtin_classify_type (tree exp)
1776 {
1777 if (call_expr_nargs (exp))
1778 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1779 return GEN_INT (no_type_class);
1780 }
1781
1782 /* This helper macro, meant to be used in mathfn_built_in below,
1783 determines which among a set of three builtin math functions is
1784 appropriate for a given type mode. The `F' and `L' cases are
1785 automatically generated from the `double' case. */
1786 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1787 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1788 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1789 fcodel = BUILT_IN_MATHFN##L ; break;
1790 /* Similar to above, but appends _R after any F/L suffix. */
1791 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1792 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1793 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1794 fcodel = BUILT_IN_MATHFN##L_R ; break;
1795
1796 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1797 if available. If IMPLICIT is true use the implicit builtin declaration,
1798 otherwise use the explicit declaration. If we can't do the conversion,
1799 return zero. */
1800
1801 static tree
1802 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1803 {
1804 enum built_in_function fcode, fcodef, fcodel, fcode2;
1805
1806 switch (fn)
1807 {
1808 CASE_MATHFN (BUILT_IN_ACOS)
1809 CASE_MATHFN (BUILT_IN_ACOSH)
1810 CASE_MATHFN (BUILT_IN_ASIN)
1811 CASE_MATHFN (BUILT_IN_ASINH)
1812 CASE_MATHFN (BUILT_IN_ATAN)
1813 CASE_MATHFN (BUILT_IN_ATAN2)
1814 CASE_MATHFN (BUILT_IN_ATANH)
1815 CASE_MATHFN (BUILT_IN_CBRT)
1816 CASE_MATHFN (BUILT_IN_CEIL)
1817 CASE_MATHFN (BUILT_IN_CEXPI)
1818 CASE_MATHFN (BUILT_IN_COPYSIGN)
1819 CASE_MATHFN (BUILT_IN_COS)
1820 CASE_MATHFN (BUILT_IN_COSH)
1821 CASE_MATHFN (BUILT_IN_DREM)
1822 CASE_MATHFN (BUILT_IN_ERF)
1823 CASE_MATHFN (BUILT_IN_ERFC)
1824 CASE_MATHFN (BUILT_IN_EXP)
1825 CASE_MATHFN (BUILT_IN_EXP10)
1826 CASE_MATHFN (BUILT_IN_EXP2)
1827 CASE_MATHFN (BUILT_IN_EXPM1)
1828 CASE_MATHFN (BUILT_IN_FABS)
1829 CASE_MATHFN (BUILT_IN_FDIM)
1830 CASE_MATHFN (BUILT_IN_FLOOR)
1831 CASE_MATHFN (BUILT_IN_FMA)
1832 CASE_MATHFN (BUILT_IN_FMAX)
1833 CASE_MATHFN (BUILT_IN_FMIN)
1834 CASE_MATHFN (BUILT_IN_FMOD)
1835 CASE_MATHFN (BUILT_IN_FREXP)
1836 CASE_MATHFN (BUILT_IN_GAMMA)
1837 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1838 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1839 CASE_MATHFN (BUILT_IN_HYPOT)
1840 CASE_MATHFN (BUILT_IN_ILOGB)
1841 CASE_MATHFN (BUILT_IN_ICEIL)
1842 CASE_MATHFN (BUILT_IN_IFLOOR)
1843 CASE_MATHFN (BUILT_IN_INF)
1844 CASE_MATHFN (BUILT_IN_IRINT)
1845 CASE_MATHFN (BUILT_IN_IROUND)
1846 CASE_MATHFN (BUILT_IN_ISINF)
1847 CASE_MATHFN (BUILT_IN_J0)
1848 CASE_MATHFN (BUILT_IN_J1)
1849 CASE_MATHFN (BUILT_IN_JN)
1850 CASE_MATHFN (BUILT_IN_LCEIL)
1851 CASE_MATHFN (BUILT_IN_LDEXP)
1852 CASE_MATHFN (BUILT_IN_LFLOOR)
1853 CASE_MATHFN (BUILT_IN_LGAMMA)
1854 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1855 CASE_MATHFN (BUILT_IN_LLCEIL)
1856 CASE_MATHFN (BUILT_IN_LLFLOOR)
1857 CASE_MATHFN (BUILT_IN_LLRINT)
1858 CASE_MATHFN (BUILT_IN_LLROUND)
1859 CASE_MATHFN (BUILT_IN_LOG)
1860 CASE_MATHFN (BUILT_IN_LOG10)
1861 CASE_MATHFN (BUILT_IN_LOG1P)
1862 CASE_MATHFN (BUILT_IN_LOG2)
1863 CASE_MATHFN (BUILT_IN_LOGB)
1864 CASE_MATHFN (BUILT_IN_LRINT)
1865 CASE_MATHFN (BUILT_IN_LROUND)
1866 CASE_MATHFN (BUILT_IN_MODF)
1867 CASE_MATHFN (BUILT_IN_NAN)
1868 CASE_MATHFN (BUILT_IN_NANS)
1869 CASE_MATHFN (BUILT_IN_NEARBYINT)
1870 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1871 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1872 CASE_MATHFN (BUILT_IN_POW)
1873 CASE_MATHFN (BUILT_IN_POWI)
1874 CASE_MATHFN (BUILT_IN_POW10)
1875 CASE_MATHFN (BUILT_IN_REMAINDER)
1876 CASE_MATHFN (BUILT_IN_REMQUO)
1877 CASE_MATHFN (BUILT_IN_RINT)
1878 CASE_MATHFN (BUILT_IN_ROUND)
1879 CASE_MATHFN (BUILT_IN_SCALB)
1880 CASE_MATHFN (BUILT_IN_SCALBLN)
1881 CASE_MATHFN (BUILT_IN_SCALBN)
1882 CASE_MATHFN (BUILT_IN_SIGNBIT)
1883 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1884 CASE_MATHFN (BUILT_IN_SIN)
1885 CASE_MATHFN (BUILT_IN_SINCOS)
1886 CASE_MATHFN (BUILT_IN_SINH)
1887 CASE_MATHFN (BUILT_IN_SQRT)
1888 CASE_MATHFN (BUILT_IN_TAN)
1889 CASE_MATHFN (BUILT_IN_TANH)
1890 CASE_MATHFN (BUILT_IN_TGAMMA)
1891 CASE_MATHFN (BUILT_IN_TRUNC)
1892 CASE_MATHFN (BUILT_IN_Y0)
1893 CASE_MATHFN (BUILT_IN_Y1)
1894 CASE_MATHFN (BUILT_IN_YN)
1895
1896 default:
1897 return NULL_TREE;
1898 }
1899
1900 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1901 fcode2 = fcode;
1902 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1903 fcode2 = fcodef;
1904 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1905 fcode2 = fcodel;
1906 else
1907 return NULL_TREE;
1908
1909 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1910 return NULL_TREE;
1911
1912 return builtin_decl_explicit (fcode2);
1913 }
1914
1915 /* Like mathfn_built_in_1(), but always use the implicit array. */
1916
1917 tree
1918 mathfn_built_in (tree type, enum built_in_function fn)
1919 {
1920 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1921 }
1922
1923 /* If errno must be maintained, expand the RTL to check if the result,
1924 TARGET, of a built-in function call, EXP, is NaN, and if so set
1925 errno to EDOM. */
1926
1927 static void
1928 expand_errno_check (tree exp, rtx target)
1929 {
1930 rtx lab = gen_label_rtx ();
1931
1932 /* Test the result; if it is NaN, set errno=EDOM because
1933 the argument was not in the domain. */
1934 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1935 NULL_RTX, NULL_RTX, lab,
1936 /* The jump is very likely. */
1937 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1938
1939 #ifdef TARGET_EDOM
1940 /* If this built-in doesn't throw an exception, set errno directly. */
1941 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1942 {
1943 #ifdef GEN_ERRNO_RTX
1944 rtx errno_rtx = GEN_ERRNO_RTX;
1945 #else
1946 rtx errno_rtx
1947 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1948 #endif
1949 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1950 emit_label (lab);
1951 return;
1952 }
1953 #endif
1954
1955 /* Make sure the library call isn't expanded as a tail call. */
1956 CALL_EXPR_TAILCALL (exp) = 0;
1957
1958 /* We can't set errno=EDOM directly; let the library call do it.
1959 Pop the arguments right away in case the call gets deleted. */
1960 NO_DEFER_POP;
1961 expand_call (exp, target, 0);
1962 OK_DEFER_POP;
1963 emit_label (lab);
1964 }
1965
1966 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1967 Return NULL_RTX if a normal call should be emitted rather than expanding
1968 the function in-line. EXP is the expression that is a call to the builtin
1969 function; if convenient, the result should be placed in TARGET.
1970 SUBTARGET may be used as the target for computing one of EXP's operands. */
1971
1972 static rtx
1973 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1974 {
1975 optab builtin_optab;
1976 rtx op0, insns;
1977 tree fndecl = get_callee_fndecl (exp);
1978 enum machine_mode mode;
1979 bool errno_set = false;
1980 tree arg;
1981
1982 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1983 return NULL_RTX;
1984
1985 arg = CALL_EXPR_ARG (exp, 0);
1986
1987 switch (DECL_FUNCTION_CODE (fndecl))
1988 {
1989 CASE_FLT_FN (BUILT_IN_SQRT):
1990 errno_set = ! tree_expr_nonnegative_p (arg);
1991 builtin_optab = sqrt_optab;
1992 break;
1993 CASE_FLT_FN (BUILT_IN_EXP):
1994 errno_set = true; builtin_optab = exp_optab; break;
1995 CASE_FLT_FN (BUILT_IN_EXP10):
1996 CASE_FLT_FN (BUILT_IN_POW10):
1997 errno_set = true; builtin_optab = exp10_optab; break;
1998 CASE_FLT_FN (BUILT_IN_EXP2):
1999 errno_set = true; builtin_optab = exp2_optab; break;
2000 CASE_FLT_FN (BUILT_IN_EXPM1):
2001 errno_set = true; builtin_optab = expm1_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOGB):
2003 errno_set = true; builtin_optab = logb_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG):
2005 errno_set = true; builtin_optab = log_optab; break;
2006 CASE_FLT_FN (BUILT_IN_LOG10):
2007 errno_set = true; builtin_optab = log10_optab; break;
2008 CASE_FLT_FN (BUILT_IN_LOG2):
2009 errno_set = true; builtin_optab = log2_optab; break;
2010 CASE_FLT_FN (BUILT_IN_LOG1P):
2011 errno_set = true; builtin_optab = log1p_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ASIN):
2013 builtin_optab = asin_optab; break;
2014 CASE_FLT_FN (BUILT_IN_ACOS):
2015 builtin_optab = acos_optab; break;
2016 CASE_FLT_FN (BUILT_IN_TAN):
2017 builtin_optab = tan_optab; break;
2018 CASE_FLT_FN (BUILT_IN_ATAN):
2019 builtin_optab = atan_optab; break;
2020 CASE_FLT_FN (BUILT_IN_FLOOR):
2021 builtin_optab = floor_optab; break;
2022 CASE_FLT_FN (BUILT_IN_CEIL):
2023 builtin_optab = ceil_optab; break;
2024 CASE_FLT_FN (BUILT_IN_TRUNC):
2025 builtin_optab = btrunc_optab; break;
2026 CASE_FLT_FN (BUILT_IN_ROUND):
2027 builtin_optab = round_optab; break;
2028 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2029 builtin_optab = nearbyint_optab;
2030 if (flag_trapping_math)
2031 break;
2032 /* Else fallthrough and expand as rint. */
2033 CASE_FLT_FN (BUILT_IN_RINT):
2034 builtin_optab = rint_optab; break;
2035 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2036 builtin_optab = significand_optab; break;
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 /* Make a suitable register to place result in. */
2042 mode = TYPE_MODE (TREE_TYPE (exp));
2043
2044 if (! flag_errno_math || ! HONOR_NANS (mode))
2045 errno_set = false;
2046
2047 /* Before working hard, check whether the instruction is available. */
2048 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2049 && (!errno_set || !optimize_insn_for_size_p ()))
2050 {
2051 target = gen_reg_rtx (mode);
2052
2053 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2054 need to expand the argument again. This way, we will not perform
2055 side-effects more the once. */
2056 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2057
2058 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2059
2060 start_sequence ();
2061
2062 /* Compute into TARGET.
2063 Set TARGET to wherever the result comes back. */
2064 target = expand_unop (mode, builtin_optab, op0, target, 0);
2065
2066 if (target != 0)
2067 {
2068 if (errno_set)
2069 expand_errno_check (exp, target);
2070
2071 /* Output the entire sequence. */
2072 insns = get_insns ();
2073 end_sequence ();
2074 emit_insn (insns);
2075 return target;
2076 }
2077
2078 /* If we were unable to expand via the builtin, stop the sequence
2079 (without outputting the insns) and call to the library function
2080 with the stabilized argument list. */
2081 end_sequence ();
2082 }
2083
2084 return expand_call (exp, target, target == const0_rtx);
2085 }
2086
2087 /* Expand a call to the builtin binary math functions (pow and atan2).
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2093
2094 static rtx
2095 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2096 {
2097 optab builtin_optab;
2098 rtx op0, op1, insns;
2099 int op1_type = REAL_TYPE;
2100 tree fndecl = get_callee_fndecl (exp);
2101 tree arg0, arg1;
2102 enum machine_mode mode;
2103 bool errno_set = true;
2104
2105 switch (DECL_FUNCTION_CODE (fndecl))
2106 {
2107 CASE_FLT_FN (BUILT_IN_SCALBN):
2108 CASE_FLT_FN (BUILT_IN_SCALBLN):
2109 CASE_FLT_FN (BUILT_IN_LDEXP):
2110 op1_type = INTEGER_TYPE;
2111 default:
2112 break;
2113 }
2114
2115 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2116 return NULL_RTX;
2117
2118 arg0 = CALL_EXPR_ARG (exp, 0);
2119 arg1 = CALL_EXPR_ARG (exp, 1);
2120
2121 switch (DECL_FUNCTION_CODE (fndecl))
2122 {
2123 CASE_FLT_FN (BUILT_IN_POW):
2124 builtin_optab = pow_optab; break;
2125 CASE_FLT_FN (BUILT_IN_ATAN2):
2126 builtin_optab = atan2_optab; break;
2127 CASE_FLT_FN (BUILT_IN_SCALB):
2128 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2129 return 0;
2130 builtin_optab = scalb_optab; break;
2131 CASE_FLT_FN (BUILT_IN_SCALBN):
2132 CASE_FLT_FN (BUILT_IN_SCALBLN):
2133 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2134 return 0;
2135 /* Fall through... */
2136 CASE_FLT_FN (BUILT_IN_LDEXP):
2137 builtin_optab = ldexp_optab; break;
2138 CASE_FLT_FN (BUILT_IN_FMOD):
2139 builtin_optab = fmod_optab; break;
2140 CASE_FLT_FN (BUILT_IN_REMAINDER):
2141 CASE_FLT_FN (BUILT_IN_DREM):
2142 builtin_optab = remainder_optab; break;
2143 default:
2144 gcc_unreachable ();
2145 }
2146
2147 /* Make a suitable register to place result in. */
2148 mode = TYPE_MODE (TREE_TYPE (exp));
2149
2150 /* Before working hard, check whether the instruction is available. */
2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 return NULL_RTX;
2153
2154 target = gen_reg_rtx (mode);
2155
2156 if (! flag_errno_math || ! HONOR_NANS (mode))
2157 errno_set = false;
2158
2159 if (errno_set && optimize_insn_for_size_p ())
2160 return 0;
2161
2162 /* Always stabilize the argument list. */
2163 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2164 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2165
2166 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2167 op1 = expand_normal (arg1);
2168
2169 start_sequence ();
2170
2171 /* Compute into TARGET.
2172 Set TARGET to wherever the result comes back. */
2173 target = expand_binop (mode, builtin_optab, op0, op1,
2174 target, 0, OPTAB_DIRECT);
2175
2176 /* If we were unable to expand via the builtin, stop the sequence
2177 (without outputting the insns) and call to the library function
2178 with the stabilized argument list. */
2179 if (target == 0)
2180 {
2181 end_sequence ();
2182 return expand_call (exp, target, target == const0_rtx);
2183 }
2184
2185 if (errno_set)
2186 expand_errno_check (exp, target);
2187
2188 /* Output the entire sequence. */
2189 insns = get_insns ();
2190 end_sequence ();
2191 emit_insn (insns);
2192
2193 return target;
2194 }
2195
2196 /* Expand a call to the builtin trinary math functions (fma).
2197 Return NULL_RTX if a normal call should be emitted rather than expanding the
2198 function in-line. EXP is the expression that is a call to the builtin
2199 function; if convenient, the result should be placed in TARGET.
2200 SUBTARGET may be used as the target for computing one of EXP's
2201 operands. */
2202
2203 static rtx
2204 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2205 {
2206 optab builtin_optab;
2207 rtx op0, op1, op2, insns;
2208 tree fndecl = get_callee_fndecl (exp);
2209 tree arg0, arg1, arg2;
2210 enum machine_mode mode;
2211
2212 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2213 return NULL_RTX;
2214
2215 arg0 = CALL_EXPR_ARG (exp, 0);
2216 arg1 = CALL_EXPR_ARG (exp, 1);
2217 arg2 = CALL_EXPR_ARG (exp, 2);
2218
2219 switch (DECL_FUNCTION_CODE (fndecl))
2220 {
2221 CASE_FLT_FN (BUILT_IN_FMA):
2222 builtin_optab = fma_optab; break;
2223 default:
2224 gcc_unreachable ();
2225 }
2226
2227 /* Make a suitable register to place result in. */
2228 mode = TYPE_MODE (TREE_TYPE (exp));
2229
2230 /* Before working hard, check whether the instruction is available. */
2231 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2232 return NULL_RTX;
2233
2234 target = gen_reg_rtx (mode);
2235
2236 /* Always stabilize the argument list. */
2237 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2238 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2239 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2240
2241 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2242 op1 = expand_normal (arg1);
2243 op2 = expand_normal (arg2);
2244
2245 start_sequence ();
2246
2247 /* Compute into TARGET.
2248 Set TARGET to wherever the result comes back. */
2249 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2250 target, 0);
2251
2252 /* If we were unable to expand via the builtin, stop the sequence
2253 (without outputting the insns) and call to the library function
2254 with the stabilized argument list. */
2255 if (target == 0)
2256 {
2257 end_sequence ();
2258 return expand_call (exp, target, target == const0_rtx);
2259 }
2260
2261 /* Output the entire sequence. */
2262 insns = get_insns ();
2263 end_sequence ();
2264 emit_insn (insns);
2265
2266 return target;
2267 }
2268
2269 /* Expand a call to the builtin sin and cos math functions.
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2275
2276 static rtx
2277 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2278 {
2279 optab builtin_optab;
2280 rtx op0, insns;
2281 tree fndecl = get_callee_fndecl (exp);
2282 enum machine_mode mode;
2283 tree arg;
2284
2285 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2286 return NULL_RTX;
2287
2288 arg = CALL_EXPR_ARG (exp, 0);
2289
2290 switch (DECL_FUNCTION_CODE (fndecl))
2291 {
2292 CASE_FLT_FN (BUILT_IN_SIN):
2293 CASE_FLT_FN (BUILT_IN_COS):
2294 builtin_optab = sincos_optab; break;
2295 default:
2296 gcc_unreachable ();
2297 }
2298
2299 /* Make a suitable register to place result in. */
2300 mode = TYPE_MODE (TREE_TYPE (exp));
2301
2302 /* Check if sincos insn is available, otherwise fallback
2303 to sin or cos insn. */
2304 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2305 switch (DECL_FUNCTION_CODE (fndecl))
2306 {
2307 CASE_FLT_FN (BUILT_IN_SIN):
2308 builtin_optab = sin_optab; break;
2309 CASE_FLT_FN (BUILT_IN_COS):
2310 builtin_optab = cos_optab; break;
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 /* Before working hard, check whether the instruction is available. */
2316 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2317 {
2318 target = gen_reg_rtx (mode);
2319
2320 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2321 need to expand the argument again. This way, we will not perform
2322 side-effects more the once. */
2323 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2324
2325 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2326
2327 start_sequence ();
2328
2329 /* Compute into TARGET.
2330 Set TARGET to wherever the result comes back. */
2331 if (builtin_optab == sincos_optab)
2332 {
2333 int result;
2334
2335 switch (DECL_FUNCTION_CODE (fndecl))
2336 {
2337 CASE_FLT_FN (BUILT_IN_SIN):
2338 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2339 break;
2340 CASE_FLT_FN (BUILT_IN_COS):
2341 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2342 break;
2343 default:
2344 gcc_unreachable ();
2345 }
2346 gcc_assert (result);
2347 }
2348 else
2349 {
2350 target = expand_unop (mode, builtin_optab, op0, target, 0);
2351 }
2352
2353 if (target != 0)
2354 {
2355 /* Output the entire sequence. */
2356 insns = get_insns ();
2357 end_sequence ();
2358 emit_insn (insns);
2359 return target;
2360 }
2361
2362 /* If we were unable to expand via the builtin, stop the sequence
2363 (without outputting the insns) and call to the library function
2364 with the stabilized argument list. */
2365 end_sequence ();
2366 }
2367
2368 target = expand_call (exp, target, target == const0_rtx);
2369
2370 return target;
2371 }
2372
2373 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2374 return an RTL instruction code that implements the functionality.
2375 If that isn't possible or available return CODE_FOR_nothing. */
2376
2377 static enum insn_code
2378 interclass_mathfn_icode (tree arg, tree fndecl)
2379 {
2380 bool errno_set = false;
2381 optab builtin_optab = 0;
2382 enum machine_mode mode;
2383
2384 switch (DECL_FUNCTION_CODE (fndecl))
2385 {
2386 CASE_FLT_FN (BUILT_IN_ILOGB):
2387 errno_set = true; builtin_optab = ilogb_optab; break;
2388 CASE_FLT_FN (BUILT_IN_ISINF):
2389 builtin_optab = isinf_optab; break;
2390 case BUILT_IN_ISNORMAL:
2391 case BUILT_IN_ISFINITE:
2392 CASE_FLT_FN (BUILT_IN_FINITE):
2393 case BUILT_IN_FINITED32:
2394 case BUILT_IN_FINITED64:
2395 case BUILT_IN_FINITED128:
2396 case BUILT_IN_ISINFD32:
2397 case BUILT_IN_ISINFD64:
2398 case BUILT_IN_ISINFD128:
2399 /* These builtins have no optabs (yet). */
2400 break;
2401 default:
2402 gcc_unreachable ();
2403 }
2404
2405 /* There's no easy way to detect the case we need to set EDOM. */
2406 if (flag_errno_math && errno_set)
2407 return CODE_FOR_nothing;
2408
2409 /* Optab mode depends on the mode of the input argument. */
2410 mode = TYPE_MODE (TREE_TYPE (arg));
2411
2412 if (builtin_optab)
2413 return optab_handler (builtin_optab, mode);
2414 return CODE_FOR_nothing;
2415 }
2416
2417 /* Expand a call to one of the builtin math functions that operate on
2418 floating point argument and output an integer result (ilogb, isinf,
2419 isnan, etc).
2420 Return 0 if a normal call should be emitted rather than expanding the
2421 function in-line. EXP is the expression that is a call to the builtin
2422 function; if convenient, the result should be placed in TARGET. */
2423
2424 static rtx
2425 expand_builtin_interclass_mathfn (tree exp, rtx target)
2426 {
2427 enum insn_code icode = CODE_FOR_nothing;
2428 rtx op0;
2429 tree fndecl = get_callee_fndecl (exp);
2430 enum machine_mode mode;
2431 tree arg;
2432
2433 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2434 return NULL_RTX;
2435
2436 arg = CALL_EXPR_ARG (exp, 0);
2437 icode = interclass_mathfn_icode (arg, fndecl);
2438 mode = TYPE_MODE (TREE_TYPE (arg));
2439
2440 if (icode != CODE_FOR_nothing)
2441 {
2442 struct expand_operand ops[1];
2443 rtx last = get_last_insn ();
2444 tree orig_arg = arg;
2445
2446 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2447 need to expand the argument again. This way, we will not perform
2448 side-effects more the once. */
2449 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2450
2451 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2452
2453 if (mode != GET_MODE (op0))
2454 op0 = convert_to_mode (mode, op0, 0);
2455
2456 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2457 if (maybe_legitimize_operands (icode, 0, 1, ops)
2458 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2459 return ops[0].value;
2460
2461 delete_insns_since (last);
2462 CALL_EXPR_ARG (exp, 0) = orig_arg;
2463 }
2464
2465 return NULL_RTX;
2466 }
2467
2468 /* Expand a call to the builtin sincos math function.
2469 Return NULL_RTX if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function. */
2472
2473 static rtx
2474 expand_builtin_sincos (tree exp)
2475 {
2476 rtx op0, op1, op2, target1, target2;
2477 enum machine_mode mode;
2478 tree arg, sinp, cosp;
2479 int result;
2480 location_t loc = EXPR_LOCATION (exp);
2481 tree alias_type, alias_off;
2482
2483 if (!validate_arglist (exp, REAL_TYPE,
2484 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2485 return NULL_RTX;
2486
2487 arg = CALL_EXPR_ARG (exp, 0);
2488 sinp = CALL_EXPR_ARG (exp, 1);
2489 cosp = CALL_EXPR_ARG (exp, 2);
2490
2491 /* Make a suitable register to place result in. */
2492 mode = TYPE_MODE (TREE_TYPE (arg));
2493
2494 /* Check if sincos insn is available, otherwise emit the call. */
2495 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2496 return NULL_RTX;
2497
2498 target1 = gen_reg_rtx (mode);
2499 target2 = gen_reg_rtx (mode);
2500
2501 op0 = expand_normal (arg);
2502 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2503 alias_off = build_int_cst (alias_type, 0);
2504 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2505 sinp, alias_off));
2506 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2507 cosp, alias_off));
2508
2509 /* Compute into target1 and target2.
2510 Set TARGET to wherever the result comes back. */
2511 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2512 gcc_assert (result);
2513
2514 /* Move target1 and target2 to the memory locations indicated
2515 by op1 and op2. */
2516 emit_move_insn (op1, target1);
2517 emit_move_insn (op2, target2);
2518
2519 return const0_rtx;
2520 }
2521
2522 /* Expand a call to the internal cexpi builtin to the sincos math function.
2523 EXP is the expression that is a call to the builtin function; if convenient,
2524 the result should be placed in TARGET. */
2525
2526 static rtx
2527 expand_builtin_cexpi (tree exp, rtx target)
2528 {
2529 tree fndecl = get_callee_fndecl (exp);
2530 tree arg, type;
2531 enum machine_mode mode;
2532 rtx op0, op1, op2;
2533 location_t loc = EXPR_LOCATION (exp);
2534
2535 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2536 return NULL_RTX;
2537
2538 arg = CALL_EXPR_ARG (exp, 0);
2539 type = TREE_TYPE (arg);
2540 mode = TYPE_MODE (TREE_TYPE (arg));
2541
2542 /* Try expanding via a sincos optab, fall back to emitting a libcall
2543 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2544 is only generated from sincos, cexp or if we have either of them. */
2545 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2546 {
2547 op1 = gen_reg_rtx (mode);
2548 op2 = gen_reg_rtx (mode);
2549
2550 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2551
2552 /* Compute into op1 and op2. */
2553 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2554 }
2555 else if (TARGET_HAS_SINCOS)
2556 {
2557 tree call, fn = NULL_TREE;
2558 tree top1, top2;
2559 rtx op1a, op2a;
2560
2561 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2562 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2563 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2564 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2565 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2566 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2567 else
2568 gcc_unreachable ();
2569
2570 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2571 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2572 op1a = copy_addr_to_reg (XEXP (op1, 0));
2573 op2a = copy_addr_to_reg (XEXP (op2, 0));
2574 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2575 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2576
2577 /* Make sure not to fold the sincos call again. */
2578 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2579 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2580 call, 3, arg, top1, top2));
2581 }
2582 else
2583 {
2584 tree call, fn = NULL_TREE, narg;
2585 tree ctype = build_complex_type (type);
2586
2587 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2588 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2590 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2591 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2592 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2593 else
2594 gcc_unreachable ();
2595
2596 /* If we don't have a decl for cexp create one. This is the
2597 friendliest fallback if the user calls __builtin_cexpi
2598 without full target C99 function support. */
2599 if (fn == NULL_TREE)
2600 {
2601 tree fntype;
2602 const char *name = NULL;
2603
2604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2605 name = "cexpf";
2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2607 name = "cexp";
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2609 name = "cexpl";
2610
2611 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2612 fn = build_fn_decl (name, fntype);
2613 }
2614
2615 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2616 build_real (type, dconst0), arg);
2617
2618 /* Make sure not to fold the cexp call again. */
2619 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2620 return expand_expr (build_call_nary (ctype, call, 1, narg),
2621 target, VOIDmode, EXPAND_NORMAL);
2622 }
2623
2624 /* Now build the proper return type. */
2625 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2626 make_tree (TREE_TYPE (arg), op2),
2627 make_tree (TREE_TYPE (arg), op1)),
2628 target, VOIDmode, EXPAND_NORMAL);
2629 }
2630
2631 /* Conveniently construct a function call expression. FNDECL names the
2632 function to be called, N is the number of arguments, and the "..."
2633 parameters are the argument expressions. Unlike build_call_exr
2634 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2635
2636 static tree
2637 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2638 {
2639 va_list ap;
2640 tree fntype = TREE_TYPE (fndecl);
2641 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2642
2643 va_start (ap, n);
2644 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2645 va_end (ap);
2646 SET_EXPR_LOCATION (fn, loc);
2647 return fn;
2648 }
2649
2650 /* Expand a call to one of the builtin rounding functions gcc defines
2651 as an extension (lfloor and lceil). As these are gcc extensions we
2652 do not need to worry about setting errno to EDOM.
2653 If expanding via optab fails, lower expression to (int)(floor(x)).
2654 EXP is the expression that is a call to the builtin function;
2655 if convenient, the result should be placed in TARGET. */
2656
2657 static rtx
2658 expand_builtin_int_roundingfn (tree exp, rtx target)
2659 {
2660 convert_optab builtin_optab;
2661 rtx op0, insns, tmp;
2662 tree fndecl = get_callee_fndecl (exp);
2663 enum built_in_function fallback_fn;
2664 tree fallback_fndecl;
2665 enum machine_mode mode;
2666 tree arg;
2667
2668 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2669 gcc_unreachable ();
2670
2671 arg = CALL_EXPR_ARG (exp, 0);
2672
2673 switch (DECL_FUNCTION_CODE (fndecl))
2674 {
2675 CASE_FLT_FN (BUILT_IN_ICEIL):
2676 CASE_FLT_FN (BUILT_IN_LCEIL):
2677 CASE_FLT_FN (BUILT_IN_LLCEIL):
2678 builtin_optab = lceil_optab;
2679 fallback_fn = BUILT_IN_CEIL;
2680 break;
2681
2682 CASE_FLT_FN (BUILT_IN_IFLOOR):
2683 CASE_FLT_FN (BUILT_IN_LFLOOR):
2684 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2685 builtin_optab = lfloor_optab;
2686 fallback_fn = BUILT_IN_FLOOR;
2687 break;
2688
2689 default:
2690 gcc_unreachable ();
2691 }
2692
2693 /* Make a suitable register to place result in. */
2694 mode = TYPE_MODE (TREE_TYPE (exp));
2695
2696 target = gen_reg_rtx (mode);
2697
2698 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2699 need to expand the argument again. This way, we will not perform
2700 side-effects more the once. */
2701 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2702
2703 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2704
2705 start_sequence ();
2706
2707 /* Compute into TARGET. */
2708 if (expand_sfix_optab (target, op0, builtin_optab))
2709 {
2710 /* Output the entire sequence. */
2711 insns = get_insns ();
2712 end_sequence ();
2713 emit_insn (insns);
2714 return target;
2715 }
2716
2717 /* If we were unable to expand via the builtin, stop the sequence
2718 (without outputting the insns). */
2719 end_sequence ();
2720
2721 /* Fall back to floating point rounding optab. */
2722 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2723
2724 /* For non-C99 targets we may end up without a fallback fndecl here
2725 if the user called __builtin_lfloor directly. In this case emit
2726 a call to the floor/ceil variants nevertheless. This should result
2727 in the best user experience for not full C99 targets. */
2728 if (fallback_fndecl == NULL_TREE)
2729 {
2730 tree fntype;
2731 const char *name = NULL;
2732
2733 switch (DECL_FUNCTION_CODE (fndecl))
2734 {
2735 case BUILT_IN_ICEIL:
2736 case BUILT_IN_LCEIL:
2737 case BUILT_IN_LLCEIL:
2738 name = "ceil";
2739 break;
2740 case BUILT_IN_ICEILF:
2741 case BUILT_IN_LCEILF:
2742 case BUILT_IN_LLCEILF:
2743 name = "ceilf";
2744 break;
2745 case BUILT_IN_ICEILL:
2746 case BUILT_IN_LCEILL:
2747 case BUILT_IN_LLCEILL:
2748 name = "ceill";
2749 break;
2750 case BUILT_IN_IFLOOR:
2751 case BUILT_IN_LFLOOR:
2752 case BUILT_IN_LLFLOOR:
2753 name = "floor";
2754 break;
2755 case BUILT_IN_IFLOORF:
2756 case BUILT_IN_LFLOORF:
2757 case BUILT_IN_LLFLOORF:
2758 name = "floorf";
2759 break;
2760 case BUILT_IN_IFLOORL:
2761 case BUILT_IN_LFLOORL:
2762 case BUILT_IN_LLFLOORL:
2763 name = "floorl";
2764 break;
2765 default:
2766 gcc_unreachable ();
2767 }
2768
2769 fntype = build_function_type_list (TREE_TYPE (arg),
2770 TREE_TYPE (arg), NULL_TREE);
2771 fallback_fndecl = build_fn_decl (name, fntype);
2772 }
2773
2774 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2775
2776 tmp = expand_normal (exp);
2777
2778 /* Truncate the result of floating point optab to integer
2779 via expand_fix (). */
2780 target = gen_reg_rtx (mode);
2781 expand_fix (target, tmp, 0);
2782
2783 return target;
2784 }
2785
2786 /* Expand a call to one of the builtin math functions doing integer
2787 conversion (lrint).
2788 Return 0 if a normal call should be emitted rather than expanding the
2789 function in-line. EXP is the expression that is a call to the builtin
2790 function; if convenient, the result should be placed in TARGET. */
2791
2792 static rtx
2793 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2794 {
2795 convert_optab builtin_optab;
2796 rtx op0, insns;
2797 tree fndecl = get_callee_fndecl (exp);
2798 tree arg;
2799 enum machine_mode mode;
2800 enum built_in_function fallback_fn = BUILT_IN_NONE;
2801
2802 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2803 gcc_unreachable ();
2804
2805 arg = CALL_EXPR_ARG (exp, 0);
2806
2807 switch (DECL_FUNCTION_CODE (fndecl))
2808 {
2809 CASE_FLT_FN (BUILT_IN_IRINT):
2810 fallback_fn = BUILT_IN_LRINT;
2811 /* FALLTHRU */
2812 CASE_FLT_FN (BUILT_IN_LRINT):
2813 CASE_FLT_FN (BUILT_IN_LLRINT):
2814 builtin_optab = lrint_optab;
2815 break;
2816
2817 CASE_FLT_FN (BUILT_IN_IROUND):
2818 fallback_fn = BUILT_IN_LROUND;
2819 /* FALLTHRU */
2820 CASE_FLT_FN (BUILT_IN_LROUND):
2821 CASE_FLT_FN (BUILT_IN_LLROUND):
2822 builtin_optab = lround_optab;
2823 break;
2824
2825 default:
2826 gcc_unreachable ();
2827 }
2828
2829 /* There's no easy way to detect the case we need to set EDOM. */
2830 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2831 return NULL_RTX;
2832
2833 /* Make a suitable register to place result in. */
2834 mode = TYPE_MODE (TREE_TYPE (exp));
2835
2836 /* There's no easy way to detect the case we need to set EDOM. */
2837 if (!flag_errno_math)
2838 {
2839 target = gen_reg_rtx (mode);
2840
2841 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2842 need to expand the argument again. This way, we will not perform
2843 side-effects more the once. */
2844 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2845
2846 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2847
2848 start_sequence ();
2849
2850 if (expand_sfix_optab (target, op0, builtin_optab))
2851 {
2852 /* Output the entire sequence. */
2853 insns = get_insns ();
2854 end_sequence ();
2855 emit_insn (insns);
2856 return target;
2857 }
2858
2859 /* If we were unable to expand via the builtin, stop the sequence
2860 (without outputting the insns) and call to the library function
2861 with the stabilized argument list. */
2862 end_sequence ();
2863 }
2864
2865 if (fallback_fn != BUILT_IN_NONE)
2866 {
2867 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2868 targets, (int) round (x) should never be transformed into
2869 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2870 a call to lround in the hope that the target provides at least some
2871 C99 functions. This should result in the best user experience for
2872 not full C99 targets. */
2873 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2874 fallback_fn, 0);
2875
2876 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2877 fallback_fndecl, 1, arg);
2878
2879 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2880 return convert_to_mode (mode, target, 0);
2881 }
2882
2883 target = expand_call (exp, target, target == const0_rtx);
2884
2885 return target;
2886 }
2887
2888 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2889 a normal call should be emitted rather than expanding the function
2890 in-line. EXP is the expression that is a call to the builtin
2891 function; if convenient, the result should be placed in TARGET. */
2892
2893 static rtx
2894 expand_builtin_powi (tree exp, rtx target)
2895 {
2896 tree arg0, arg1;
2897 rtx op0, op1;
2898 enum machine_mode mode;
2899 enum machine_mode mode2;
2900
2901 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2902 return NULL_RTX;
2903
2904 arg0 = CALL_EXPR_ARG (exp, 0);
2905 arg1 = CALL_EXPR_ARG (exp, 1);
2906 mode = TYPE_MODE (TREE_TYPE (exp));
2907
2908 /* Emit a libcall to libgcc. */
2909
2910 /* Mode of the 2nd argument must match that of an int. */
2911 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2912
2913 if (target == NULL_RTX)
2914 target = gen_reg_rtx (mode);
2915
2916 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2917 if (GET_MODE (op0) != mode)
2918 op0 = convert_to_mode (mode, op0, 0);
2919 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2920 if (GET_MODE (op1) != mode2)
2921 op1 = convert_to_mode (mode2, op1, 0);
2922
2923 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2924 target, LCT_CONST, mode, 2,
2925 op0, mode, op1, mode2);
2926
2927 return target;
2928 }
2929
2930 /* Expand expression EXP which is a call to the strlen builtin. Return
2931 NULL_RTX if we failed the caller should emit a normal call, otherwise
2932 try to get the result in TARGET, if convenient. */
2933
2934 static rtx
2935 expand_builtin_strlen (tree exp, rtx target,
2936 enum machine_mode target_mode)
2937 {
2938 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2939 return NULL_RTX;
2940 else
2941 {
2942 struct expand_operand ops[4];
2943 rtx pat;
2944 tree len;
2945 tree src = CALL_EXPR_ARG (exp, 0);
2946 rtx src_reg, before_strlen;
2947 enum machine_mode insn_mode = target_mode;
2948 enum insn_code icode = CODE_FOR_nothing;
2949 unsigned int align;
2950
2951 /* If the length can be computed at compile-time, return it. */
2952 len = c_strlen (src, 0);
2953 if (len)
2954 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2955
2956 /* If the length can be computed at compile-time and is constant
2957 integer, but there are side-effects in src, evaluate
2958 src for side-effects, then return len.
2959 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2960 can be optimized into: i++; x = 3; */
2961 len = c_strlen (src, 1);
2962 if (len && TREE_CODE (len) == INTEGER_CST)
2963 {
2964 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2965 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2966 }
2967
2968 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2969
2970 /* If SRC is not a pointer type, don't do this operation inline. */
2971 if (align == 0)
2972 return NULL_RTX;
2973
2974 /* Bail out if we can't compute strlen in the right mode. */
2975 while (insn_mode != VOIDmode)
2976 {
2977 icode = optab_handler (strlen_optab, insn_mode);
2978 if (icode != CODE_FOR_nothing)
2979 break;
2980
2981 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2982 }
2983 if (insn_mode == VOIDmode)
2984 return NULL_RTX;
2985
2986 /* Make a place to hold the source address. We will not expand
2987 the actual source until we are sure that the expansion will
2988 not fail -- there are trees that cannot be expanded twice. */
2989 src_reg = gen_reg_rtx (Pmode);
2990
2991 /* Mark the beginning of the strlen sequence so we can emit the
2992 source operand later. */
2993 before_strlen = get_last_insn ();
2994
2995 create_output_operand (&ops[0], target, insn_mode);
2996 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2997 create_integer_operand (&ops[2], 0);
2998 create_integer_operand (&ops[3], align);
2999 if (!maybe_expand_insn (icode, 4, ops))
3000 return NULL_RTX;
3001
3002 /* Now that we are assured of success, expand the source. */
3003 start_sequence ();
3004 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3005 if (pat != src_reg)
3006 {
3007 #ifdef POINTERS_EXTEND_UNSIGNED
3008 if (GET_MODE (pat) != Pmode)
3009 pat = convert_to_mode (Pmode, pat,
3010 POINTERS_EXTEND_UNSIGNED);
3011 #endif
3012 emit_move_insn (src_reg, pat);
3013 }
3014 pat = get_insns ();
3015 end_sequence ();
3016
3017 if (before_strlen)
3018 emit_insn_after (pat, before_strlen);
3019 else
3020 emit_insn_before (pat, get_insns ());
3021
3022 /* Return the value in the proper mode for this function. */
3023 if (GET_MODE (ops[0].value) == target_mode)
3024 target = ops[0].value;
3025 else if (target != 0)
3026 convert_move (target, ops[0].value, 0);
3027 else
3028 target = convert_to_mode (target_mode, ops[0].value, 0);
3029
3030 return target;
3031 }
3032 }
3033
3034 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3035 bytes from constant string DATA + OFFSET and return it as target
3036 constant. */
3037
3038 static rtx
3039 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3040 enum machine_mode mode)
3041 {
3042 const char *str = (const char *) data;
3043
3044 gcc_assert (offset >= 0
3045 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3046 <= strlen (str) + 1));
3047
3048 return c_readstr (str + offset, mode);
3049 }
3050
3051 /* Expand a call EXP to the memcpy builtin.
3052 Return NULL_RTX if we failed, the caller should emit a normal call,
3053 otherwise try to get the result in TARGET, if convenient (and in
3054 mode MODE if that's convenient). */
3055
3056 static rtx
3057 expand_builtin_memcpy (tree exp, rtx target)
3058 {
3059 if (!validate_arglist (exp,
3060 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3061 return NULL_RTX;
3062 else
3063 {
3064 tree dest = CALL_EXPR_ARG (exp, 0);
3065 tree src = CALL_EXPR_ARG (exp, 1);
3066 tree len = CALL_EXPR_ARG (exp, 2);
3067 const char *src_str;
3068 unsigned int src_align = get_pointer_alignment (src);
3069 unsigned int dest_align = get_pointer_alignment (dest);
3070 rtx dest_mem, src_mem, dest_addr, len_rtx;
3071 HOST_WIDE_INT expected_size = -1;
3072 unsigned int expected_align = 0;
3073
3074 /* If DEST is not a pointer type, call the normal function. */
3075 if (dest_align == 0)
3076 return NULL_RTX;
3077
3078 /* If either SRC is not a pointer type, don't do this
3079 operation in-line. */
3080 if (src_align == 0)
3081 return NULL_RTX;
3082
3083 if (currently_expanding_gimple_stmt)
3084 stringop_block_profile (currently_expanding_gimple_stmt,
3085 &expected_align, &expected_size);
3086
3087 if (expected_align < dest_align)
3088 expected_align = dest_align;
3089 dest_mem = get_memory_rtx (dest, len);
3090 set_mem_align (dest_mem, dest_align);
3091 len_rtx = expand_normal (len);
3092 src_str = c_getstr (src);
3093
3094 /* If SRC is a string constant and block move would be done
3095 by pieces, we can avoid loading the string from memory
3096 and only stored the computed constants. */
3097 if (src_str
3098 && CONST_INT_P (len_rtx)
3099 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3100 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3101 CONST_CAST (char *, src_str),
3102 dest_align, false))
3103 {
3104 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3105 builtin_memcpy_read_str,
3106 CONST_CAST (char *, src_str),
3107 dest_align, false, 0);
3108 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3109 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3110 return dest_mem;
3111 }
3112
3113 src_mem = get_memory_rtx (src, len);
3114 set_mem_align (src_mem, src_align);
3115
3116 /* Copy word part most expediently. */
3117 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3118 CALL_EXPR_TAILCALL (exp)
3119 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3120 expected_align, expected_size);
3121
3122 if (dest_addr == 0)
3123 {
3124 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3125 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3126 }
3127 return dest_addr;
3128 }
3129 }
3130
3131 /* Expand a call EXP to the mempcpy builtin.
3132 Return NULL_RTX if we failed; the caller should emit a normal call,
3133 otherwise try to get the result in TARGET, if convenient (and in
3134 mode MODE if that's convenient). If ENDP is 0 return the
3135 destination pointer, if ENDP is 1 return the end pointer ala
3136 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3137 stpcpy. */
3138
3139 static rtx
3140 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3141 {
3142 if (!validate_arglist (exp,
3143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3144 return NULL_RTX;
3145 else
3146 {
3147 tree dest = CALL_EXPR_ARG (exp, 0);
3148 tree src = CALL_EXPR_ARG (exp, 1);
3149 tree len = CALL_EXPR_ARG (exp, 2);
3150 return expand_builtin_mempcpy_args (dest, src, len,
3151 target, mode, /*endp=*/ 1);
3152 }
3153 }
3154
3155 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3156 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3157 so that this can also be called without constructing an actual CALL_EXPR.
3158 The other arguments and return value are the same as for
3159 expand_builtin_mempcpy. */
3160
3161 static rtx
3162 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3163 rtx target, enum machine_mode mode, int endp)
3164 {
3165 /* If return value is ignored, transform mempcpy into memcpy. */
3166 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3167 {
3168 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3169 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3170 dest, src, len);
3171 return expand_expr (result, target, mode, EXPAND_NORMAL);
3172 }
3173 else
3174 {
3175 const char *src_str;
3176 unsigned int src_align = get_pointer_alignment (src);
3177 unsigned int dest_align = get_pointer_alignment (dest);
3178 rtx dest_mem, src_mem, len_rtx;
3179
3180 /* If either SRC or DEST is not a pointer type, don't do this
3181 operation in-line. */
3182 if (dest_align == 0 || src_align == 0)
3183 return NULL_RTX;
3184
3185 /* If LEN is not constant, call the normal function. */
3186 if (! host_integerp (len, 1))
3187 return NULL_RTX;
3188
3189 len_rtx = expand_normal (len);
3190 src_str = c_getstr (src);
3191
3192 /* If SRC is a string constant and block move would be done
3193 by pieces, we can avoid loading the string from memory
3194 and only stored the computed constants. */
3195 if (src_str
3196 && CONST_INT_P (len_rtx)
3197 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3198 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3199 CONST_CAST (char *, src_str),
3200 dest_align, false))
3201 {
3202 dest_mem = get_memory_rtx (dest, len);
3203 set_mem_align (dest_mem, dest_align);
3204 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3205 builtin_memcpy_read_str,
3206 CONST_CAST (char *, src_str),
3207 dest_align, false, endp);
3208 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3209 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3210 return dest_mem;
3211 }
3212
3213 if (CONST_INT_P (len_rtx)
3214 && can_move_by_pieces (INTVAL (len_rtx),
3215 MIN (dest_align, src_align)))
3216 {
3217 dest_mem = get_memory_rtx (dest, len);
3218 set_mem_align (dest_mem, dest_align);
3219 src_mem = get_memory_rtx (src, len);
3220 set_mem_align (src_mem, src_align);
3221 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3222 MIN (dest_align, src_align), endp);
3223 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3224 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3225 return dest_mem;
3226 }
3227
3228 return NULL_RTX;
3229 }
3230 }
3231
3232 #ifndef HAVE_movstr
3233 # define HAVE_movstr 0
3234 # define CODE_FOR_movstr CODE_FOR_nothing
3235 #endif
3236
3237 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3238 we failed, the caller should emit a normal call, otherwise try to
3239 get the result in TARGET, if convenient. If ENDP is 0 return the
3240 destination pointer, if ENDP is 1 return the end pointer ala
3241 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3242 stpcpy. */
3243
3244 static rtx
3245 expand_movstr (tree dest, tree src, rtx target, int endp)
3246 {
3247 struct expand_operand ops[3];
3248 rtx dest_mem;
3249 rtx src_mem;
3250
3251 if (!HAVE_movstr)
3252 return NULL_RTX;
3253
3254 dest_mem = get_memory_rtx (dest, NULL);
3255 src_mem = get_memory_rtx (src, NULL);
3256 if (!endp)
3257 {
3258 target = force_reg (Pmode, XEXP (dest_mem, 0));
3259 dest_mem = replace_equiv_address (dest_mem, target);
3260 }
3261
3262 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3263 create_fixed_operand (&ops[1], dest_mem);
3264 create_fixed_operand (&ops[2], src_mem);
3265 expand_insn (CODE_FOR_movstr, 3, ops);
3266
3267 if (endp && target != const0_rtx)
3268 {
3269 target = ops[0].value;
3270 /* movstr is supposed to set end to the address of the NUL
3271 terminator. If the caller requested a mempcpy-like return value,
3272 adjust it. */
3273 if (endp == 1)
3274 {
3275 rtx tem = plus_constant (GET_MODE (target),
3276 gen_lowpart (GET_MODE (target), target), 1);
3277 emit_move_insn (target, force_operand (tem, NULL_RTX));
3278 }
3279 }
3280 return target;
3281 }
3282
3283 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3284 NULL_RTX if we failed the caller should emit a normal call, otherwise
3285 try to get the result in TARGET, if convenient (and in mode MODE if that's
3286 convenient). */
3287
3288 static rtx
3289 expand_builtin_strcpy (tree exp, rtx target)
3290 {
3291 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3292 {
3293 tree dest = CALL_EXPR_ARG (exp, 0);
3294 tree src = CALL_EXPR_ARG (exp, 1);
3295 return expand_builtin_strcpy_args (dest, src, target);
3296 }
3297 return NULL_RTX;
3298 }
3299
3300 /* Helper function to do the actual work for expand_builtin_strcpy. The
3301 arguments to the builtin_strcpy call DEST and SRC are broken out
3302 so that this can also be called without constructing an actual CALL_EXPR.
3303 The other arguments and return value are the same as for
3304 expand_builtin_strcpy. */
3305
3306 static rtx
3307 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3308 {
3309 return expand_movstr (dest, src, target, /*endp=*/0);
3310 }
3311
3312 /* Expand a call EXP to the stpcpy builtin.
3313 Return NULL_RTX if we failed the caller should emit a normal call,
3314 otherwise try to get the result in TARGET, if convenient (and in
3315 mode MODE if that's convenient). */
3316
3317 static rtx
3318 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3319 {
3320 tree dst, src;
3321 location_t loc = EXPR_LOCATION (exp);
3322
3323 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3324 return NULL_RTX;
3325
3326 dst = CALL_EXPR_ARG (exp, 0);
3327 src = CALL_EXPR_ARG (exp, 1);
3328
3329 /* If return value is ignored, transform stpcpy into strcpy. */
3330 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3331 {
3332 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3333 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3334 return expand_expr (result, target, mode, EXPAND_NORMAL);
3335 }
3336 else
3337 {
3338 tree len, lenp1;
3339 rtx ret;
3340
3341 /* Ensure we get an actual string whose length can be evaluated at
3342 compile-time, not an expression containing a string. This is
3343 because the latter will potentially produce pessimized code
3344 when used to produce the return value. */
3345 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3346 return expand_movstr (dst, src, target, /*endp=*/2);
3347
3348 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3349 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3350 target, mode, /*endp=*/2);
3351
3352 if (ret)
3353 return ret;
3354
3355 if (TREE_CODE (len) == INTEGER_CST)
3356 {
3357 rtx len_rtx = expand_normal (len);
3358
3359 if (CONST_INT_P (len_rtx))
3360 {
3361 ret = expand_builtin_strcpy_args (dst, src, target);
3362
3363 if (ret)
3364 {
3365 if (! target)
3366 {
3367 if (mode != VOIDmode)
3368 target = gen_reg_rtx (mode);
3369 else
3370 target = gen_reg_rtx (GET_MODE (ret));
3371 }
3372 if (GET_MODE (target) != GET_MODE (ret))
3373 ret = gen_lowpart (GET_MODE (target), ret);
3374
3375 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3376 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3377 gcc_assert (ret);
3378
3379 return target;
3380 }
3381 }
3382 }
3383
3384 return expand_movstr (dst, src, target, /*endp=*/2);
3385 }
3386 }
3387
3388 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3389 bytes from constant string DATA + OFFSET and return it as target
3390 constant. */
3391
3392 rtx
3393 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3394 enum machine_mode mode)
3395 {
3396 const char *str = (const char *) data;
3397
3398 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3399 return const0_rtx;
3400
3401 return c_readstr (str + offset, mode);
3402 }
3403
3404 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3405 NULL_RTX if we failed the caller should emit a normal call. */
3406
3407 static rtx
3408 expand_builtin_strncpy (tree exp, rtx target)
3409 {
3410 location_t loc = EXPR_LOCATION (exp);
3411
3412 if (validate_arglist (exp,
3413 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3414 {
3415 tree dest = CALL_EXPR_ARG (exp, 0);
3416 tree src = CALL_EXPR_ARG (exp, 1);
3417 tree len = CALL_EXPR_ARG (exp, 2);
3418 tree slen = c_strlen (src, 1);
3419
3420 /* We must be passed a constant len and src parameter. */
3421 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3422 return NULL_RTX;
3423
3424 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3425
3426 /* We're required to pad with trailing zeros if the requested
3427 len is greater than strlen(s2)+1. In that case try to
3428 use store_by_pieces, if it fails, punt. */
3429 if (tree_int_cst_lt (slen, len))
3430 {
3431 unsigned int dest_align = get_pointer_alignment (dest);
3432 const char *p = c_getstr (src);
3433 rtx dest_mem;
3434
3435 if (!p || dest_align == 0 || !host_integerp (len, 1)
3436 || !can_store_by_pieces (tree_low_cst (len, 1),
3437 builtin_strncpy_read_str,
3438 CONST_CAST (char *, p),
3439 dest_align, false))
3440 return NULL_RTX;
3441
3442 dest_mem = get_memory_rtx (dest, len);
3443 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3444 builtin_strncpy_read_str,
3445 CONST_CAST (char *, p), dest_align, false, 0);
3446 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3447 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3448 return dest_mem;
3449 }
3450 }
3451 return NULL_RTX;
3452 }
3453
3454 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3455 bytes from constant string DATA + OFFSET and return it as target
3456 constant. */
3457
3458 rtx
3459 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3460 enum machine_mode mode)
3461 {
3462 const char *c = (const char *) data;
3463 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3464
3465 memset (p, *c, GET_MODE_SIZE (mode));
3466
3467 return c_readstr (p, mode);
3468 }
3469
3470 /* Callback routine for store_by_pieces. Return the RTL of a register
3471 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3472 char value given in the RTL register data. For example, if mode is
3473 4 bytes wide, return the RTL for 0x01010101*data. */
3474
3475 static rtx
3476 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3477 enum machine_mode mode)
3478 {
3479 rtx target, coeff;
3480 size_t size;
3481 char *p;
3482
3483 size = GET_MODE_SIZE (mode);
3484 if (size == 1)
3485 return (rtx) data;
3486
3487 p = XALLOCAVEC (char, size);
3488 memset (p, 1, size);
3489 coeff = c_readstr (p, mode);
3490
3491 target = convert_to_mode (mode, (rtx) data, 1);
3492 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3493 return force_reg (mode, target);
3494 }
3495
3496 /* Expand expression EXP, which is a call to the memset builtin. Return
3497 NULL_RTX if we failed the caller should emit a normal call, otherwise
3498 try to get the result in TARGET, if convenient (and in mode MODE if that's
3499 convenient). */
3500
3501 static rtx
3502 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3503 {
3504 if (!validate_arglist (exp,
3505 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3506 return NULL_RTX;
3507 else
3508 {
3509 tree dest = CALL_EXPR_ARG (exp, 0);
3510 tree val = CALL_EXPR_ARG (exp, 1);
3511 tree len = CALL_EXPR_ARG (exp, 2);
3512 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3513 }
3514 }
3515
3516 /* Helper function to do the actual work for expand_builtin_memset. The
3517 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3518 so that this can also be called without constructing an actual CALL_EXPR.
3519 The other arguments and return value are the same as for
3520 expand_builtin_memset. */
3521
3522 static rtx
3523 expand_builtin_memset_args (tree dest, tree val, tree len,
3524 rtx target, enum machine_mode mode, tree orig_exp)
3525 {
3526 tree fndecl, fn;
3527 enum built_in_function fcode;
3528 enum machine_mode val_mode;
3529 char c;
3530 unsigned int dest_align;
3531 rtx dest_mem, dest_addr, len_rtx;
3532 HOST_WIDE_INT expected_size = -1;
3533 unsigned int expected_align = 0;
3534
3535 dest_align = get_pointer_alignment (dest);
3536
3537 /* If DEST is not a pointer type, don't do this operation in-line. */
3538 if (dest_align == 0)
3539 return NULL_RTX;
3540
3541 if (currently_expanding_gimple_stmt)
3542 stringop_block_profile (currently_expanding_gimple_stmt,
3543 &expected_align, &expected_size);
3544
3545 if (expected_align < dest_align)
3546 expected_align = dest_align;
3547
3548 /* If the LEN parameter is zero, return DEST. */
3549 if (integer_zerop (len))
3550 {
3551 /* Evaluate and ignore VAL in case it has side-effects. */
3552 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3553 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3554 }
3555
3556 /* Stabilize the arguments in case we fail. */
3557 dest = builtin_save_expr (dest);
3558 val = builtin_save_expr (val);
3559 len = builtin_save_expr (len);
3560
3561 len_rtx = expand_normal (len);
3562 dest_mem = get_memory_rtx (dest, len);
3563 val_mode = TYPE_MODE (unsigned_char_type_node);
3564
3565 if (TREE_CODE (val) != INTEGER_CST)
3566 {
3567 rtx val_rtx;
3568
3569 val_rtx = expand_normal (val);
3570 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3571
3572 /* Assume that we can memset by pieces if we can store
3573 * the coefficients by pieces (in the required modes).
3574 * We can't pass builtin_memset_gen_str as that emits RTL. */
3575 c = 1;
3576 if (host_integerp (len, 1)
3577 && can_store_by_pieces (tree_low_cst (len, 1),
3578 builtin_memset_read_str, &c, dest_align,
3579 true))
3580 {
3581 val_rtx = force_reg (val_mode, val_rtx);
3582 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3583 builtin_memset_gen_str, val_rtx, dest_align,
3584 true, 0);
3585 }
3586 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3587 dest_align, expected_align,
3588 expected_size))
3589 goto do_libcall;
3590
3591 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3592 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3593 return dest_mem;
3594 }
3595
3596 if (target_char_cast (val, &c))
3597 goto do_libcall;
3598
3599 if (c)
3600 {
3601 if (host_integerp (len, 1)
3602 && can_store_by_pieces (tree_low_cst (len, 1),
3603 builtin_memset_read_str, &c, dest_align,
3604 true))
3605 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3606 builtin_memset_read_str, &c, dest_align, true, 0);
3607 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3608 gen_int_mode (c, val_mode),
3609 dest_align, expected_align,
3610 expected_size))
3611 goto do_libcall;
3612
3613 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3614 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3615 return dest_mem;
3616 }
3617
3618 set_mem_align (dest_mem, dest_align);
3619 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3620 CALL_EXPR_TAILCALL (orig_exp)
3621 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3622 expected_align, expected_size);
3623
3624 if (dest_addr == 0)
3625 {
3626 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3627 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3628 }
3629
3630 return dest_addr;
3631
3632 do_libcall:
3633 fndecl = get_callee_fndecl (orig_exp);
3634 fcode = DECL_FUNCTION_CODE (fndecl);
3635 if (fcode == BUILT_IN_MEMSET)
3636 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3637 dest, val, len);
3638 else if (fcode == BUILT_IN_BZERO)
3639 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3640 dest, len);
3641 else
3642 gcc_unreachable ();
3643 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3644 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3645 return expand_call (fn, target, target == const0_rtx);
3646 }
3647
3648 /* Expand expression EXP, which is a call to the bzero builtin. Return
3649 NULL_RTX if we failed the caller should emit a normal call. */
3650
3651 static rtx
3652 expand_builtin_bzero (tree exp)
3653 {
3654 tree dest, size;
3655 location_t loc = EXPR_LOCATION (exp);
3656
3657 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3658 return NULL_RTX;
3659
3660 dest = CALL_EXPR_ARG (exp, 0);
3661 size = CALL_EXPR_ARG (exp, 1);
3662
3663 /* New argument list transforming bzero(ptr x, int y) to
3664 memset(ptr x, int 0, size_t y). This is done this way
3665 so that if it isn't expanded inline, we fallback to
3666 calling bzero instead of memset. */
3667
3668 return expand_builtin_memset_args (dest, integer_zero_node,
3669 fold_convert_loc (loc,
3670 size_type_node, size),
3671 const0_rtx, VOIDmode, exp);
3672 }
3673
3674 /* Expand expression EXP, which is a call to the memcmp built-in function.
3675 Return NULL_RTX if we failed and the caller should emit a normal call,
3676 otherwise try to get the result in TARGET, if convenient (and in mode
3677 MODE, if that's convenient). */
3678
3679 static rtx
3680 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3681 ATTRIBUTE_UNUSED enum machine_mode mode)
3682 {
3683 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3684
3685 if (!validate_arglist (exp,
3686 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3687 return NULL_RTX;
3688
3689 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3690 implementing memcmp because it will stop if it encounters two
3691 zero bytes. */
3692 #if defined HAVE_cmpmemsi
3693 {
3694 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3695 rtx result;
3696 rtx insn;
3697 tree arg1 = CALL_EXPR_ARG (exp, 0);
3698 tree arg2 = CALL_EXPR_ARG (exp, 1);
3699 tree len = CALL_EXPR_ARG (exp, 2);
3700
3701 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3702 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3703 enum machine_mode insn_mode;
3704
3705 if (HAVE_cmpmemsi)
3706 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3707 else
3708 return NULL_RTX;
3709
3710 /* If we don't have POINTER_TYPE, call the function. */
3711 if (arg1_align == 0 || arg2_align == 0)
3712 return NULL_RTX;
3713
3714 /* Make a place to write the result of the instruction. */
3715 result = target;
3716 if (! (result != 0
3717 && REG_P (result) && GET_MODE (result) == insn_mode
3718 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3719 result = gen_reg_rtx (insn_mode);
3720
3721 arg1_rtx = get_memory_rtx (arg1, len);
3722 arg2_rtx = get_memory_rtx (arg2, len);
3723 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3724
3725 /* Set MEM_SIZE as appropriate. */
3726 if (CONST_INT_P (arg3_rtx))
3727 {
3728 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3729 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3730 }
3731
3732 if (HAVE_cmpmemsi)
3733 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3734 GEN_INT (MIN (arg1_align, arg2_align)));
3735 else
3736 gcc_unreachable ();
3737
3738 if (insn)
3739 emit_insn (insn);
3740 else
3741 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3742 TYPE_MODE (integer_type_node), 3,
3743 XEXP (arg1_rtx, 0), Pmode,
3744 XEXP (arg2_rtx, 0), Pmode,
3745 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3746 TYPE_UNSIGNED (sizetype)),
3747 TYPE_MODE (sizetype));
3748
3749 /* Return the value in the proper mode for this function. */
3750 mode = TYPE_MODE (TREE_TYPE (exp));
3751 if (GET_MODE (result) == mode)
3752 return result;
3753 else if (target != 0)
3754 {
3755 convert_move (target, result, 0);
3756 return target;
3757 }
3758 else
3759 return convert_to_mode (mode, result, 0);
3760 }
3761 #endif /* HAVE_cmpmemsi. */
3762
3763 return NULL_RTX;
3764 }
3765
3766 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3767 if we failed the caller should emit a normal call, otherwise try to get
3768 the result in TARGET, if convenient. */
3769
3770 static rtx
3771 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3772 {
3773 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3774 return NULL_RTX;
3775
3776 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3777 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3778 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3779 {
3780 rtx arg1_rtx, arg2_rtx;
3781 rtx result, insn = NULL_RTX;
3782 tree fndecl, fn;
3783 tree arg1 = CALL_EXPR_ARG (exp, 0);
3784 tree arg2 = CALL_EXPR_ARG (exp, 1);
3785
3786 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3787 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3788
3789 /* If we don't have POINTER_TYPE, call the function. */
3790 if (arg1_align == 0 || arg2_align == 0)
3791 return NULL_RTX;
3792
3793 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3794 arg1 = builtin_save_expr (arg1);
3795 arg2 = builtin_save_expr (arg2);
3796
3797 arg1_rtx = get_memory_rtx (arg1, NULL);
3798 arg2_rtx = get_memory_rtx (arg2, NULL);
3799
3800 #ifdef HAVE_cmpstrsi
3801 /* Try to call cmpstrsi. */
3802 if (HAVE_cmpstrsi)
3803 {
3804 enum machine_mode insn_mode
3805 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3806
3807 /* Make a place to write the result of the instruction. */
3808 result = target;
3809 if (! (result != 0
3810 && REG_P (result) && GET_MODE (result) == insn_mode
3811 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3812 result = gen_reg_rtx (insn_mode);
3813
3814 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3815 GEN_INT (MIN (arg1_align, arg2_align)));
3816 }
3817 #endif
3818 #ifdef HAVE_cmpstrnsi
3819 /* Try to determine at least one length and call cmpstrnsi. */
3820 if (!insn && HAVE_cmpstrnsi)
3821 {
3822 tree len;
3823 rtx arg3_rtx;
3824
3825 enum machine_mode insn_mode
3826 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3827 tree len1 = c_strlen (arg1, 1);
3828 tree len2 = c_strlen (arg2, 1);
3829
3830 if (len1)
3831 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3832 if (len2)
3833 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3834
3835 /* If we don't have a constant length for the first, use the length
3836 of the second, if we know it. We don't require a constant for
3837 this case; some cost analysis could be done if both are available
3838 but neither is constant. For now, assume they're equally cheap,
3839 unless one has side effects. If both strings have constant lengths,
3840 use the smaller. */
3841
3842 if (!len1)
3843 len = len2;
3844 else if (!len2)
3845 len = len1;
3846 else if (TREE_SIDE_EFFECTS (len1))
3847 len = len2;
3848 else if (TREE_SIDE_EFFECTS (len2))
3849 len = len1;
3850 else if (TREE_CODE (len1) != INTEGER_CST)
3851 len = len2;
3852 else if (TREE_CODE (len2) != INTEGER_CST)
3853 len = len1;
3854 else if (tree_int_cst_lt (len1, len2))
3855 len = len1;
3856 else
3857 len = len2;
3858
3859 /* If both arguments have side effects, we cannot optimize. */
3860 if (!len || TREE_SIDE_EFFECTS (len))
3861 goto do_libcall;
3862
3863 arg3_rtx = expand_normal (len);
3864
3865 /* Make a place to write the result of the instruction. */
3866 result = target;
3867 if (! (result != 0
3868 && REG_P (result) && GET_MODE (result) == insn_mode
3869 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3870 result = gen_reg_rtx (insn_mode);
3871
3872 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3873 GEN_INT (MIN (arg1_align, arg2_align)));
3874 }
3875 #endif
3876
3877 if (insn)
3878 {
3879 enum machine_mode mode;
3880 emit_insn (insn);
3881
3882 /* Return the value in the proper mode for this function. */
3883 mode = TYPE_MODE (TREE_TYPE (exp));
3884 if (GET_MODE (result) == mode)
3885 return result;
3886 if (target == 0)
3887 return convert_to_mode (mode, result, 0);
3888 convert_move (target, result, 0);
3889 return target;
3890 }
3891
3892 /* Expand the library call ourselves using a stabilized argument
3893 list to avoid re-evaluating the function's arguments twice. */
3894 #ifdef HAVE_cmpstrnsi
3895 do_libcall:
3896 #endif
3897 fndecl = get_callee_fndecl (exp);
3898 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3899 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3900 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3901 return expand_call (fn, target, target == const0_rtx);
3902 }
3903 #endif
3904 return NULL_RTX;
3905 }
3906
3907 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3908 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3909 the result in TARGET, if convenient. */
3910
3911 static rtx
3912 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3913 ATTRIBUTE_UNUSED enum machine_mode mode)
3914 {
3915 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3916
3917 if (!validate_arglist (exp,
3918 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3919 return NULL_RTX;
3920
3921 /* If c_strlen can determine an expression for one of the string
3922 lengths, and it doesn't have side effects, then emit cmpstrnsi
3923 using length MIN(strlen(string)+1, arg3). */
3924 #ifdef HAVE_cmpstrnsi
3925 if (HAVE_cmpstrnsi)
3926 {
3927 tree len, len1, len2;
3928 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3929 rtx result, insn;
3930 tree fndecl, fn;
3931 tree arg1 = CALL_EXPR_ARG (exp, 0);
3932 tree arg2 = CALL_EXPR_ARG (exp, 1);
3933 tree arg3 = CALL_EXPR_ARG (exp, 2);
3934
3935 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3936 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3937 enum machine_mode insn_mode
3938 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3939
3940 len1 = c_strlen (arg1, 1);
3941 len2 = c_strlen (arg2, 1);
3942
3943 if (len1)
3944 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3945 if (len2)
3946 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3947
3948 /* If we don't have a constant length for the first, use the length
3949 of the second, if we know it. We don't require a constant for
3950 this case; some cost analysis could be done if both are available
3951 but neither is constant. For now, assume they're equally cheap,
3952 unless one has side effects. If both strings have constant lengths,
3953 use the smaller. */
3954
3955 if (!len1)
3956 len = len2;
3957 else if (!len2)
3958 len = len1;
3959 else if (TREE_SIDE_EFFECTS (len1))
3960 len = len2;
3961 else if (TREE_SIDE_EFFECTS (len2))
3962 len = len1;
3963 else if (TREE_CODE (len1) != INTEGER_CST)
3964 len = len2;
3965 else if (TREE_CODE (len2) != INTEGER_CST)
3966 len = len1;
3967 else if (tree_int_cst_lt (len1, len2))
3968 len = len1;
3969 else
3970 len = len2;
3971
3972 /* If both arguments have side effects, we cannot optimize. */
3973 if (!len || TREE_SIDE_EFFECTS (len))
3974 return NULL_RTX;
3975
3976 /* The actual new length parameter is MIN(len,arg3). */
3977 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3978 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3979
3980 /* If we don't have POINTER_TYPE, call the function. */
3981 if (arg1_align == 0 || arg2_align == 0)
3982 return NULL_RTX;
3983
3984 /* Make a place to write the result of the instruction. */
3985 result = target;
3986 if (! (result != 0
3987 && REG_P (result) && GET_MODE (result) == insn_mode
3988 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3989 result = gen_reg_rtx (insn_mode);
3990
3991 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3992 arg1 = builtin_save_expr (arg1);
3993 arg2 = builtin_save_expr (arg2);
3994 len = builtin_save_expr (len);
3995
3996 arg1_rtx = get_memory_rtx (arg1, len);
3997 arg2_rtx = get_memory_rtx (arg2, len);
3998 arg3_rtx = expand_normal (len);
3999 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4000 GEN_INT (MIN (arg1_align, arg2_align)));
4001 if (insn)
4002 {
4003 emit_insn (insn);
4004
4005 /* Return the value in the proper mode for this function. */
4006 mode = TYPE_MODE (TREE_TYPE (exp));
4007 if (GET_MODE (result) == mode)
4008 return result;
4009 if (target == 0)
4010 return convert_to_mode (mode, result, 0);
4011 convert_move (target, result, 0);
4012 return target;
4013 }
4014
4015 /* Expand the library call ourselves using a stabilized argument
4016 list to avoid re-evaluating the function's arguments twice. */
4017 fndecl = get_callee_fndecl (exp);
4018 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4019 arg1, arg2, len);
4020 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4021 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4022 return expand_call (fn, target, target == const0_rtx);
4023 }
4024 #endif
4025 return NULL_RTX;
4026 }
4027
4028 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4029 if that's convenient. */
4030
4031 rtx
4032 expand_builtin_saveregs (void)
4033 {
4034 rtx val, seq;
4035
4036 /* Don't do __builtin_saveregs more than once in a function.
4037 Save the result of the first call and reuse it. */
4038 if (saveregs_value != 0)
4039 return saveregs_value;
4040
4041 /* When this function is called, it means that registers must be
4042 saved on entry to this function. So we migrate the call to the
4043 first insn of this function. */
4044
4045 start_sequence ();
4046
4047 /* Do whatever the machine needs done in this case. */
4048 val = targetm.calls.expand_builtin_saveregs ();
4049
4050 seq = get_insns ();
4051 end_sequence ();
4052
4053 saveregs_value = val;
4054
4055 /* Put the insns after the NOTE that starts the function. If this
4056 is inside a start_sequence, make the outer-level insn chain current, so
4057 the code is placed at the start of the function. */
4058 push_topmost_sequence ();
4059 emit_insn_after (seq, entry_of_function ());
4060 pop_topmost_sequence ();
4061
4062 return val;
4063 }
4064
4065 /* Expand a call to __builtin_next_arg. */
4066
4067 static rtx
4068 expand_builtin_next_arg (void)
4069 {
4070 /* Checking arguments is already done in fold_builtin_next_arg
4071 that must be called before this function. */
4072 return expand_binop (ptr_mode, add_optab,
4073 crtl->args.internal_arg_pointer,
4074 crtl->args.arg_offset_rtx,
4075 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4076 }
4077
4078 /* Make it easier for the backends by protecting the valist argument
4079 from multiple evaluations. */
4080
4081 static tree
4082 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4083 {
4084 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4085
4086 /* The current way of determining the type of valist is completely
4087 bogus. We should have the information on the va builtin instead. */
4088 if (!vatype)
4089 vatype = targetm.fn_abi_va_list (cfun->decl);
4090
4091 if (TREE_CODE (vatype) == ARRAY_TYPE)
4092 {
4093 if (TREE_SIDE_EFFECTS (valist))
4094 valist = save_expr (valist);
4095
4096 /* For this case, the backends will be expecting a pointer to
4097 vatype, but it's possible we've actually been given an array
4098 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4099 So fix it. */
4100 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4101 {
4102 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4103 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4104 }
4105 }
4106 else
4107 {
4108 tree pt = build_pointer_type (vatype);
4109
4110 if (! needs_lvalue)
4111 {
4112 if (! TREE_SIDE_EFFECTS (valist))
4113 return valist;
4114
4115 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4116 TREE_SIDE_EFFECTS (valist) = 1;
4117 }
4118
4119 if (TREE_SIDE_EFFECTS (valist))
4120 valist = save_expr (valist);
4121 valist = fold_build2_loc (loc, MEM_REF,
4122 vatype, valist, build_int_cst (pt, 0));
4123 }
4124
4125 return valist;
4126 }
4127
4128 /* The "standard" definition of va_list is void*. */
4129
4130 tree
4131 std_build_builtin_va_list (void)
4132 {
4133 return ptr_type_node;
4134 }
4135
4136 /* The "standard" abi va_list is va_list_type_node. */
4137
4138 tree
4139 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4140 {
4141 return va_list_type_node;
4142 }
4143
4144 /* The "standard" type of va_list is va_list_type_node. */
4145
4146 tree
4147 std_canonical_va_list_type (tree type)
4148 {
4149 tree wtype, htype;
4150
4151 if (INDIRECT_REF_P (type))
4152 type = TREE_TYPE (type);
4153 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4154 type = TREE_TYPE (type);
4155 wtype = va_list_type_node;
4156 htype = type;
4157 /* Treat structure va_list types. */
4158 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4159 htype = TREE_TYPE (htype);
4160 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4161 {
4162 /* If va_list is an array type, the argument may have decayed
4163 to a pointer type, e.g. by being passed to another function.
4164 In that case, unwrap both types so that we can compare the
4165 underlying records. */
4166 if (TREE_CODE (htype) == ARRAY_TYPE
4167 || POINTER_TYPE_P (htype))
4168 {
4169 wtype = TREE_TYPE (wtype);
4170 htype = TREE_TYPE (htype);
4171 }
4172 }
4173 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4174 return va_list_type_node;
4175
4176 return NULL_TREE;
4177 }
4178
4179 /* The "standard" implementation of va_start: just assign `nextarg' to
4180 the variable. */
4181
4182 void
4183 std_expand_builtin_va_start (tree valist, rtx nextarg)
4184 {
4185 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4186 convert_move (va_r, nextarg, 0);
4187 }
4188
4189 /* Expand EXP, a call to __builtin_va_start. */
4190
4191 static rtx
4192 expand_builtin_va_start (tree exp)
4193 {
4194 rtx nextarg;
4195 tree valist;
4196 location_t loc = EXPR_LOCATION (exp);
4197
4198 if (call_expr_nargs (exp) < 2)
4199 {
4200 error_at (loc, "too few arguments to function %<va_start%>");
4201 return const0_rtx;
4202 }
4203
4204 if (fold_builtin_next_arg (exp, true))
4205 return const0_rtx;
4206
4207 nextarg = expand_builtin_next_arg ();
4208 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4209
4210 if (targetm.expand_builtin_va_start)
4211 targetm.expand_builtin_va_start (valist, nextarg);
4212 else
4213 std_expand_builtin_va_start (valist, nextarg);
4214
4215 return const0_rtx;
4216 }
4217
4218 /* The "standard" implementation of va_arg: read the value from the
4219 current (padded) address and increment by the (padded) size. */
4220
4221 tree
4222 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4223 gimple_seq *post_p)
4224 {
4225 tree addr, t, type_size, rounded_size, valist_tmp;
4226 unsigned HOST_WIDE_INT align, boundary;
4227 bool indirect;
4228
4229 #ifdef ARGS_GROW_DOWNWARD
4230 /* All of the alignment and movement below is for args-grow-up machines.
4231 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4232 implement their own specialized gimplify_va_arg_expr routines. */
4233 gcc_unreachable ();
4234 #endif
4235
4236 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4237 if (indirect)
4238 type = build_pointer_type (type);
4239
4240 align = PARM_BOUNDARY / BITS_PER_UNIT;
4241 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4242
4243 /* When we align parameter on stack for caller, if the parameter
4244 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4245 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4246 here with caller. */
4247 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4248 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4249
4250 boundary /= BITS_PER_UNIT;
4251
4252 /* Hoist the valist value into a temporary for the moment. */
4253 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4254
4255 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4256 requires greater alignment, we must perform dynamic alignment. */
4257 if (boundary > align
4258 && !integer_zerop (TYPE_SIZE (type)))
4259 {
4260 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4261 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4262 gimplify_and_add (t, pre_p);
4263
4264 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4265 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4266 valist_tmp,
4267 build_int_cst (TREE_TYPE (valist), -boundary)));
4268 gimplify_and_add (t, pre_p);
4269 }
4270 else
4271 boundary = align;
4272
4273 /* If the actual alignment is less than the alignment of the type,
4274 adjust the type accordingly so that we don't assume strict alignment
4275 when dereferencing the pointer. */
4276 boundary *= BITS_PER_UNIT;
4277 if (boundary < TYPE_ALIGN (type))
4278 {
4279 type = build_variant_type_copy (type);
4280 TYPE_ALIGN (type) = boundary;
4281 }
4282
4283 /* Compute the rounded size of the type. */
4284 type_size = size_in_bytes (type);
4285 rounded_size = round_up (type_size, align);
4286
4287 /* Reduce rounded_size so it's sharable with the postqueue. */
4288 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4289
4290 /* Get AP. */
4291 addr = valist_tmp;
4292 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4293 {
4294 /* Small args are padded downward. */
4295 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4296 rounded_size, size_int (align));
4297 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4298 size_binop (MINUS_EXPR, rounded_size, type_size));
4299 addr = fold_build_pointer_plus (addr, t);
4300 }
4301
4302 /* Compute new value for AP. */
4303 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4304 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4305 gimplify_and_add (t, pre_p);
4306
4307 addr = fold_convert (build_pointer_type (type), addr);
4308
4309 if (indirect)
4310 addr = build_va_arg_indirect_ref (addr);
4311
4312 return build_va_arg_indirect_ref (addr);
4313 }
4314
4315 /* Build an indirect-ref expression over the given TREE, which represents a
4316 piece of a va_arg() expansion. */
4317 tree
4318 build_va_arg_indirect_ref (tree addr)
4319 {
4320 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4321
4322 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4323 mf_mark (addr);
4324
4325 return addr;
4326 }
4327
4328 /* Return a dummy expression of type TYPE in order to keep going after an
4329 error. */
4330
4331 static tree
4332 dummy_object (tree type)
4333 {
4334 tree t = build_int_cst (build_pointer_type (type), 0);
4335 return build2 (MEM_REF, type, t, t);
4336 }
4337
4338 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4339 builtin function, but a very special sort of operator. */
4340
4341 enum gimplify_status
4342 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4343 {
4344 tree promoted_type, have_va_type;
4345 tree valist = TREE_OPERAND (*expr_p, 0);
4346 tree type = TREE_TYPE (*expr_p);
4347 tree t;
4348 location_t loc = EXPR_LOCATION (*expr_p);
4349
4350 /* Verify that valist is of the proper type. */
4351 have_va_type = TREE_TYPE (valist);
4352 if (have_va_type == error_mark_node)
4353 return GS_ERROR;
4354 have_va_type = targetm.canonical_va_list_type (have_va_type);
4355
4356 if (have_va_type == NULL_TREE)
4357 {
4358 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4359 return GS_ERROR;
4360 }
4361
4362 /* Generate a diagnostic for requesting data of a type that cannot
4363 be passed through `...' due to type promotion at the call site. */
4364 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4365 != type)
4366 {
4367 static bool gave_help;
4368 bool warned;
4369
4370 /* Unfortunately, this is merely undefined, rather than a constraint
4371 violation, so we cannot make this an error. If this call is never
4372 executed, the program is still strictly conforming. */
4373 warned = warning_at (loc, 0,
4374 "%qT is promoted to %qT when passed through %<...%>",
4375 type, promoted_type);
4376 if (!gave_help && warned)
4377 {
4378 gave_help = true;
4379 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4380 promoted_type, type);
4381 }
4382
4383 /* We can, however, treat "undefined" any way we please.
4384 Call abort to encourage the user to fix the program. */
4385 if (warned)
4386 inform (loc, "if this code is reached, the program will abort");
4387 /* Before the abort, allow the evaluation of the va_list
4388 expression to exit or longjmp. */
4389 gimplify_and_add (valist, pre_p);
4390 t = build_call_expr_loc (loc,
4391 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4392 gimplify_and_add (t, pre_p);
4393
4394 /* This is dead code, but go ahead and finish so that the
4395 mode of the result comes out right. */
4396 *expr_p = dummy_object (type);
4397 return GS_ALL_DONE;
4398 }
4399 else
4400 {
4401 /* Make it easier for the backends by protecting the valist argument
4402 from multiple evaluations. */
4403 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4404 {
4405 /* For this case, the backends will be expecting a pointer to
4406 TREE_TYPE (abi), but it's possible we've
4407 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4408 So fix it. */
4409 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4410 {
4411 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4412 valist = fold_convert_loc (loc, p1,
4413 build_fold_addr_expr_loc (loc, valist));
4414 }
4415
4416 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4417 }
4418 else
4419 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4420
4421 if (!targetm.gimplify_va_arg_expr)
4422 /* FIXME: Once most targets are converted we should merely
4423 assert this is non-null. */
4424 return GS_ALL_DONE;
4425
4426 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4427 return GS_OK;
4428 }
4429 }
4430
4431 /* Expand EXP, a call to __builtin_va_end. */
4432
4433 static rtx
4434 expand_builtin_va_end (tree exp)
4435 {
4436 tree valist = CALL_EXPR_ARG (exp, 0);
4437
4438 /* Evaluate for side effects, if needed. I hate macros that don't
4439 do that. */
4440 if (TREE_SIDE_EFFECTS (valist))
4441 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4442
4443 return const0_rtx;
4444 }
4445
4446 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4447 builtin rather than just as an assignment in stdarg.h because of the
4448 nastiness of array-type va_list types. */
4449
4450 static rtx
4451 expand_builtin_va_copy (tree exp)
4452 {
4453 tree dst, src, t;
4454 location_t loc = EXPR_LOCATION (exp);
4455
4456 dst = CALL_EXPR_ARG (exp, 0);
4457 src = CALL_EXPR_ARG (exp, 1);
4458
4459 dst = stabilize_va_list_loc (loc, dst, 1);
4460 src = stabilize_va_list_loc (loc, src, 0);
4461
4462 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4463
4464 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4465 {
4466 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4467 TREE_SIDE_EFFECTS (t) = 1;
4468 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4469 }
4470 else
4471 {
4472 rtx dstb, srcb, size;
4473
4474 /* Evaluate to pointers. */
4475 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4476 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4477 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4478 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4479
4480 dstb = convert_memory_address (Pmode, dstb);
4481 srcb = convert_memory_address (Pmode, srcb);
4482
4483 /* "Dereference" to BLKmode memories. */
4484 dstb = gen_rtx_MEM (BLKmode, dstb);
4485 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4486 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4487 srcb = gen_rtx_MEM (BLKmode, srcb);
4488 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4489 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4490
4491 /* Copy. */
4492 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4493 }
4494
4495 return const0_rtx;
4496 }
4497
4498 /* Expand a call to one of the builtin functions __builtin_frame_address or
4499 __builtin_return_address. */
4500
4501 static rtx
4502 expand_builtin_frame_address (tree fndecl, tree exp)
4503 {
4504 /* The argument must be a nonnegative integer constant.
4505 It counts the number of frames to scan up the stack.
4506 The value is the return address saved in that frame. */
4507 if (call_expr_nargs (exp) == 0)
4508 /* Warning about missing arg was already issued. */
4509 return const0_rtx;
4510 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4511 {
4512 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4513 error ("invalid argument to %<__builtin_frame_address%>");
4514 else
4515 error ("invalid argument to %<__builtin_return_address%>");
4516 return const0_rtx;
4517 }
4518 else
4519 {
4520 rtx tem
4521 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4522 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4523
4524 /* Some ports cannot access arbitrary stack frames. */
4525 if (tem == NULL)
4526 {
4527 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4528 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4529 else
4530 warning (0, "unsupported argument to %<__builtin_return_address%>");
4531 return const0_rtx;
4532 }
4533
4534 /* For __builtin_frame_address, return what we've got. */
4535 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4536 return tem;
4537
4538 if (!REG_P (tem)
4539 && ! CONSTANT_P (tem))
4540 tem = copy_addr_to_reg (tem);
4541 return tem;
4542 }
4543 }
4544
4545 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4546 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4547 is the same as for allocate_dynamic_stack_space. */
4548
4549 static rtx
4550 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4551 {
4552 rtx op0;
4553 rtx result;
4554 bool valid_arglist;
4555 unsigned int align;
4556 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4557 == BUILT_IN_ALLOCA_WITH_ALIGN);
4558
4559 /* Emit normal call if we use mudflap. */
4560 if (flag_mudflap)
4561 return NULL_RTX;
4562
4563 valid_arglist
4564 = (alloca_with_align
4565 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4566 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4567
4568 if (!valid_arglist)
4569 return NULL_RTX;
4570
4571 /* Compute the argument. */
4572 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4573
4574 /* Compute the alignment. */
4575 align = (alloca_with_align
4576 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4577 : BIGGEST_ALIGNMENT);
4578
4579 /* Allocate the desired space. */
4580 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4581 result = convert_memory_address (ptr_mode, result);
4582
4583 return result;
4584 }
4585
4586 /* Expand a call to bswap builtin in EXP.
4587 Return NULL_RTX if a normal call should be emitted rather than expanding the
4588 function in-line. If convenient, the result should be placed in TARGET.
4589 SUBTARGET may be used as the target for computing one of EXP's operands. */
4590
4591 static rtx
4592 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4593 rtx subtarget)
4594 {
4595 tree arg;
4596 rtx op0;
4597
4598 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4599 return NULL_RTX;
4600
4601 arg = CALL_EXPR_ARG (exp, 0);
4602 op0 = expand_expr (arg,
4603 subtarget && GET_MODE (subtarget) == target_mode
4604 ? subtarget : NULL_RTX,
4605 target_mode, EXPAND_NORMAL);
4606 if (GET_MODE (op0) != target_mode)
4607 op0 = convert_to_mode (target_mode, op0, 1);
4608
4609 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4610
4611 gcc_assert (target);
4612
4613 return convert_to_mode (target_mode, target, 1);
4614 }
4615
4616 /* Expand a call to a unary builtin in EXP.
4617 Return NULL_RTX if a normal call should be emitted rather than expanding the
4618 function in-line. If convenient, the result should be placed in TARGET.
4619 SUBTARGET may be used as the target for computing one of EXP's operands. */
4620
4621 static rtx
4622 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4623 rtx subtarget, optab op_optab)
4624 {
4625 rtx op0;
4626
4627 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4628 return NULL_RTX;
4629
4630 /* Compute the argument. */
4631 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4632 (subtarget
4633 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4634 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4635 VOIDmode, EXPAND_NORMAL);
4636 /* Compute op, into TARGET if possible.
4637 Set TARGET to wherever the result comes back. */
4638 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4639 op_optab, op0, target, op_optab != clrsb_optab);
4640 gcc_assert (target);
4641
4642 return convert_to_mode (target_mode, target, 0);
4643 }
4644
4645 /* Expand a call to __builtin_expect. We just return our argument
4646 as the builtin_expect semantic should've been already executed by
4647 tree branch prediction pass. */
4648
4649 static rtx
4650 expand_builtin_expect (tree exp, rtx target)
4651 {
4652 tree arg;
4653
4654 if (call_expr_nargs (exp) < 2)
4655 return const0_rtx;
4656 arg = CALL_EXPR_ARG (exp, 0);
4657
4658 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4659 /* When guessing was done, the hints should be already stripped away. */
4660 gcc_assert (!flag_guess_branch_prob
4661 || optimize == 0 || seen_error ());
4662 return target;
4663 }
4664
4665 /* Expand a call to __builtin_assume_aligned. We just return our first
4666 argument as the builtin_assume_aligned semantic should've been already
4667 executed by CCP. */
4668
4669 static rtx
4670 expand_builtin_assume_aligned (tree exp, rtx target)
4671 {
4672 if (call_expr_nargs (exp) < 2)
4673 return const0_rtx;
4674 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4675 EXPAND_NORMAL);
4676 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4677 && (call_expr_nargs (exp) < 3
4678 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4679 return target;
4680 }
4681
4682 void
4683 expand_builtin_trap (void)
4684 {
4685 #ifdef HAVE_trap
4686 if (HAVE_trap)
4687 emit_insn (gen_trap ());
4688 else
4689 #endif
4690 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4691 emit_barrier ();
4692 }
4693
4694 /* Expand a call to __builtin_unreachable. We do nothing except emit
4695 a barrier saying that control flow will not pass here.
4696
4697 It is the responsibility of the program being compiled to ensure
4698 that control flow does never reach __builtin_unreachable. */
4699 static void
4700 expand_builtin_unreachable (void)
4701 {
4702 emit_barrier ();
4703 }
4704
4705 /* Expand EXP, a call to fabs, fabsf or fabsl.
4706 Return NULL_RTX if a normal call should be emitted rather than expanding
4707 the function inline. If convenient, the result should be placed
4708 in TARGET. SUBTARGET may be used as the target for computing
4709 the operand. */
4710
4711 static rtx
4712 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4713 {
4714 enum machine_mode mode;
4715 tree arg;
4716 rtx op0;
4717
4718 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4719 return NULL_RTX;
4720
4721 arg = CALL_EXPR_ARG (exp, 0);
4722 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4723 mode = TYPE_MODE (TREE_TYPE (arg));
4724 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4725 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4726 }
4727
4728 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4729 Return NULL is a normal call should be emitted rather than expanding the
4730 function inline. If convenient, the result should be placed in TARGET.
4731 SUBTARGET may be used as the target for computing the operand. */
4732
4733 static rtx
4734 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4735 {
4736 rtx op0, op1;
4737 tree arg;
4738
4739 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4740 return NULL_RTX;
4741
4742 arg = CALL_EXPR_ARG (exp, 0);
4743 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4744
4745 arg = CALL_EXPR_ARG (exp, 1);
4746 op1 = expand_normal (arg);
4747
4748 return expand_copysign (op0, op1, target);
4749 }
4750
4751 /* Create a new constant string literal and return a char* pointer to it.
4752 The STRING_CST value is the LEN characters at STR. */
4753 tree
4754 build_string_literal (int len, const char *str)
4755 {
4756 tree t, elem, index, type;
4757
4758 t = build_string (len, str);
4759 elem = build_type_variant (char_type_node, 1, 0);
4760 index = build_index_type (size_int (len - 1));
4761 type = build_array_type (elem, index);
4762 TREE_TYPE (t) = type;
4763 TREE_CONSTANT (t) = 1;
4764 TREE_READONLY (t) = 1;
4765 TREE_STATIC (t) = 1;
4766
4767 type = build_pointer_type (elem);
4768 t = build1 (ADDR_EXPR, type,
4769 build4 (ARRAY_REF, elem,
4770 t, integer_zero_node, NULL_TREE, NULL_TREE));
4771 return t;
4772 }
4773
4774 /* Expand a call to __builtin___clear_cache. */
4775
4776 static rtx
4777 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4778 {
4779 #ifndef HAVE_clear_cache
4780 #ifdef CLEAR_INSN_CACHE
4781 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4782 does something. Just do the default expansion to a call to
4783 __clear_cache(). */
4784 return NULL_RTX;
4785 #else
4786 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4787 does nothing. There is no need to call it. Do nothing. */
4788 return const0_rtx;
4789 #endif /* CLEAR_INSN_CACHE */
4790 #else
4791 /* We have a "clear_cache" insn, and it will handle everything. */
4792 tree begin, end;
4793 rtx begin_rtx, end_rtx;
4794
4795 /* We must not expand to a library call. If we did, any
4796 fallback library function in libgcc that might contain a call to
4797 __builtin___clear_cache() would recurse infinitely. */
4798 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4799 {
4800 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4801 return const0_rtx;
4802 }
4803
4804 if (HAVE_clear_cache)
4805 {
4806 struct expand_operand ops[2];
4807
4808 begin = CALL_EXPR_ARG (exp, 0);
4809 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4810
4811 end = CALL_EXPR_ARG (exp, 1);
4812 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4813
4814 create_address_operand (&ops[0], begin_rtx);
4815 create_address_operand (&ops[1], end_rtx);
4816 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4817 return const0_rtx;
4818 }
4819 return const0_rtx;
4820 #endif /* HAVE_clear_cache */
4821 }
4822
4823 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4824
4825 static rtx
4826 round_trampoline_addr (rtx tramp)
4827 {
4828 rtx temp, addend, mask;
4829
4830 /* If we don't need too much alignment, we'll have been guaranteed
4831 proper alignment by get_trampoline_type. */
4832 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4833 return tramp;
4834
4835 /* Round address up to desired boundary. */
4836 temp = gen_reg_rtx (Pmode);
4837 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4838 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4839
4840 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4841 temp, 0, OPTAB_LIB_WIDEN);
4842 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4843 temp, 0, OPTAB_LIB_WIDEN);
4844
4845 return tramp;
4846 }
4847
4848 static rtx
4849 expand_builtin_init_trampoline (tree exp, bool onstack)
4850 {
4851 tree t_tramp, t_func, t_chain;
4852 rtx m_tramp, r_tramp, r_chain, tmp;
4853
4854 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4855 POINTER_TYPE, VOID_TYPE))
4856 return NULL_RTX;
4857
4858 t_tramp = CALL_EXPR_ARG (exp, 0);
4859 t_func = CALL_EXPR_ARG (exp, 1);
4860 t_chain = CALL_EXPR_ARG (exp, 2);
4861
4862 r_tramp = expand_normal (t_tramp);
4863 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4864 MEM_NOTRAP_P (m_tramp) = 1;
4865
4866 /* If ONSTACK, the TRAMP argument should be the address of a field
4867 within the local function's FRAME decl. Either way, let's see if
4868 we can fill in the MEM_ATTRs for this memory. */
4869 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4870 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4871 true, 0);
4872
4873 /* Creator of a heap trampoline is responsible for making sure the
4874 address is aligned to at least STACK_BOUNDARY. Normally malloc
4875 will ensure this anyhow. */
4876 tmp = round_trampoline_addr (r_tramp);
4877 if (tmp != r_tramp)
4878 {
4879 m_tramp = change_address (m_tramp, BLKmode, tmp);
4880 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4881 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4882 }
4883
4884 /* The FUNC argument should be the address of the nested function.
4885 Extract the actual function decl to pass to the hook. */
4886 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4887 t_func = TREE_OPERAND (t_func, 0);
4888 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4889
4890 r_chain = expand_normal (t_chain);
4891
4892 /* Generate insns to initialize the trampoline. */
4893 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4894
4895 if (onstack)
4896 {
4897 trampolines_created = 1;
4898
4899 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4900 "trampoline generated for nested function %qD", t_func);
4901 }
4902
4903 return const0_rtx;
4904 }
4905
4906 static rtx
4907 expand_builtin_adjust_trampoline (tree exp)
4908 {
4909 rtx tramp;
4910
4911 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4912 return NULL_RTX;
4913
4914 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4915 tramp = round_trampoline_addr (tramp);
4916 if (targetm.calls.trampoline_adjust_address)
4917 tramp = targetm.calls.trampoline_adjust_address (tramp);
4918
4919 return tramp;
4920 }
4921
4922 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4923 function. The function first checks whether the back end provides
4924 an insn to implement signbit for the respective mode. If not, it
4925 checks whether the floating point format of the value is such that
4926 the sign bit can be extracted. If that is not the case, the
4927 function returns NULL_RTX to indicate that a normal call should be
4928 emitted rather than expanding the function in-line. EXP is the
4929 expression that is a call to the builtin function; if convenient,
4930 the result should be placed in TARGET. */
4931 static rtx
4932 expand_builtin_signbit (tree exp, rtx target)
4933 {
4934 const struct real_format *fmt;
4935 enum machine_mode fmode, imode, rmode;
4936 tree arg;
4937 int word, bitpos;
4938 enum insn_code icode;
4939 rtx temp;
4940 location_t loc = EXPR_LOCATION (exp);
4941
4942 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4943 return NULL_RTX;
4944
4945 arg = CALL_EXPR_ARG (exp, 0);
4946 fmode = TYPE_MODE (TREE_TYPE (arg));
4947 rmode = TYPE_MODE (TREE_TYPE (exp));
4948 fmt = REAL_MODE_FORMAT (fmode);
4949
4950 arg = builtin_save_expr (arg);
4951
4952 /* Expand the argument yielding a RTX expression. */
4953 temp = expand_normal (arg);
4954
4955 /* Check if the back end provides an insn that handles signbit for the
4956 argument's mode. */
4957 icode = optab_handler (signbit_optab, fmode);
4958 if (icode != CODE_FOR_nothing)
4959 {
4960 rtx last = get_last_insn ();
4961 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4962 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4963 return target;
4964 delete_insns_since (last);
4965 }
4966
4967 /* For floating point formats without a sign bit, implement signbit
4968 as "ARG < 0.0". */
4969 bitpos = fmt->signbit_ro;
4970 if (bitpos < 0)
4971 {
4972 /* But we can't do this if the format supports signed zero. */
4973 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4974 return NULL_RTX;
4975
4976 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4977 build_real (TREE_TYPE (arg), dconst0));
4978 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4979 }
4980
4981 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4982 {
4983 imode = int_mode_for_mode (fmode);
4984 if (imode == BLKmode)
4985 return NULL_RTX;
4986 temp = gen_lowpart (imode, temp);
4987 }
4988 else
4989 {
4990 imode = word_mode;
4991 /* Handle targets with different FP word orders. */
4992 if (FLOAT_WORDS_BIG_ENDIAN)
4993 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4994 else
4995 word = bitpos / BITS_PER_WORD;
4996 temp = operand_subword_force (temp, word, fmode);
4997 bitpos = bitpos % BITS_PER_WORD;
4998 }
4999
5000 /* Force the intermediate word_mode (or narrower) result into a
5001 register. This avoids attempting to create paradoxical SUBREGs
5002 of floating point modes below. */
5003 temp = force_reg (imode, temp);
5004
5005 /* If the bitpos is within the "result mode" lowpart, the operation
5006 can be implement with a single bitwise AND. Otherwise, we need
5007 a right shift and an AND. */
5008
5009 if (bitpos < GET_MODE_BITSIZE (rmode))
5010 {
5011 double_int mask = double_int_setbit (double_int_zero, bitpos);
5012
5013 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5014 temp = gen_lowpart (rmode, temp);
5015 temp = expand_binop (rmode, and_optab, temp,
5016 immed_double_int_const (mask, rmode),
5017 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5018 }
5019 else
5020 {
5021 /* Perform a logical right shift to place the signbit in the least
5022 significant bit, then truncate the result to the desired mode
5023 and mask just this bit. */
5024 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5025 temp = gen_lowpart (rmode, temp);
5026 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5027 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5028 }
5029
5030 return temp;
5031 }
5032
5033 /* Expand fork or exec calls. TARGET is the desired target of the
5034 call. EXP is the call. FN is the
5035 identificator of the actual function. IGNORE is nonzero if the
5036 value is to be ignored. */
5037
5038 static rtx
5039 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5040 {
5041 tree id, decl;
5042 tree call;
5043
5044 /* If we are not profiling, just call the function. */
5045 if (!profile_arc_flag)
5046 return NULL_RTX;
5047
5048 /* Otherwise call the wrapper. This should be equivalent for the rest of
5049 compiler, so the code does not diverge, and the wrapper may run the
5050 code necessary for keeping the profiling sane. */
5051
5052 switch (DECL_FUNCTION_CODE (fn))
5053 {
5054 case BUILT_IN_FORK:
5055 id = get_identifier ("__gcov_fork");
5056 break;
5057
5058 case BUILT_IN_EXECL:
5059 id = get_identifier ("__gcov_execl");
5060 break;
5061
5062 case BUILT_IN_EXECV:
5063 id = get_identifier ("__gcov_execv");
5064 break;
5065
5066 case BUILT_IN_EXECLP:
5067 id = get_identifier ("__gcov_execlp");
5068 break;
5069
5070 case BUILT_IN_EXECLE:
5071 id = get_identifier ("__gcov_execle");
5072 break;
5073
5074 case BUILT_IN_EXECVP:
5075 id = get_identifier ("__gcov_execvp");
5076 break;
5077
5078 case BUILT_IN_EXECVE:
5079 id = get_identifier ("__gcov_execve");
5080 break;
5081
5082 default:
5083 gcc_unreachable ();
5084 }
5085
5086 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5087 FUNCTION_DECL, id, TREE_TYPE (fn));
5088 DECL_EXTERNAL (decl) = 1;
5089 TREE_PUBLIC (decl) = 1;
5090 DECL_ARTIFICIAL (decl) = 1;
5091 TREE_NOTHROW (decl) = 1;
5092 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5093 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5094 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5095 return expand_call (call, target, ignore);
5096 }
5097
5098
5099 \f
5100 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5101 the pointer in these functions is void*, the tree optimizers may remove
5102 casts. The mode computed in expand_builtin isn't reliable either, due
5103 to __sync_bool_compare_and_swap.
5104
5105 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5106 group of builtins. This gives us log2 of the mode size. */
5107
5108 static inline enum machine_mode
5109 get_builtin_sync_mode (int fcode_diff)
5110 {
5111 /* The size is not negotiable, so ask not to get BLKmode in return
5112 if the target indicates that a smaller size would be better. */
5113 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5114 }
5115
5116 /* Expand the memory expression LOC and return the appropriate memory operand
5117 for the builtin_sync operations. */
5118
5119 static rtx
5120 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5121 {
5122 rtx addr, mem;
5123
5124 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5125 addr = convert_memory_address (Pmode, addr);
5126
5127 /* Note that we explicitly do not want any alias information for this
5128 memory, so that we kill all other live memories. Otherwise we don't
5129 satisfy the full barrier semantics of the intrinsic. */
5130 mem = validize_mem (gen_rtx_MEM (mode, addr));
5131
5132 /* The alignment needs to be at least according to that of the mode. */
5133 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5134 get_pointer_alignment (loc)));
5135 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5136 MEM_VOLATILE_P (mem) = 1;
5137
5138 return mem;
5139 }
5140
5141 /* Make sure an argument is in the right mode.
5142 EXP is the tree argument.
5143 MODE is the mode it should be in. */
5144
5145 static rtx
5146 expand_expr_force_mode (tree exp, enum machine_mode mode)
5147 {
5148 rtx val;
5149 enum machine_mode old_mode;
5150
5151 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5152 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5153 of CONST_INTs, where we know the old_mode only from the call argument. */
5154
5155 old_mode = GET_MODE (val);
5156 if (old_mode == VOIDmode)
5157 old_mode = TYPE_MODE (TREE_TYPE (exp));
5158 val = convert_modes (mode, old_mode, val, 1);
5159 return val;
5160 }
5161
5162
5163 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5164 EXP is the CALL_EXPR. CODE is the rtx code
5165 that corresponds to the arithmetic or logical operation from the name;
5166 an exception here is that NOT actually means NAND. TARGET is an optional
5167 place for us to store the results; AFTER is true if this is the
5168 fetch_and_xxx form. */
5169
5170 static rtx
5171 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5172 enum rtx_code code, bool after,
5173 rtx target)
5174 {
5175 rtx val, mem;
5176 location_t loc = EXPR_LOCATION (exp);
5177
5178 if (code == NOT && warn_sync_nand)
5179 {
5180 tree fndecl = get_callee_fndecl (exp);
5181 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5182
5183 static bool warned_f_a_n, warned_n_a_f;
5184
5185 switch (fcode)
5186 {
5187 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5188 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5189 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5190 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5191 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5192 if (warned_f_a_n)
5193 break;
5194
5195 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5196 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5197 warned_f_a_n = true;
5198 break;
5199
5200 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5201 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5202 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5203 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5204 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5205 if (warned_n_a_f)
5206 break;
5207
5208 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5209 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5210 warned_n_a_f = true;
5211 break;
5212
5213 default:
5214 gcc_unreachable ();
5215 }
5216 }
5217
5218 /* Expand the operands. */
5219 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5220 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5221
5222 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5223 after);
5224 }
5225
5226 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5227 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5228 true if this is the boolean form. TARGET is a place for us to store the
5229 results; this is NOT optional if IS_BOOL is true. */
5230
5231 static rtx
5232 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5233 bool is_bool, rtx target)
5234 {
5235 rtx old_val, new_val, mem;
5236 rtx *pbool, *poval;
5237
5238 /* Expand the operands. */
5239 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5240 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5241 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5242
5243 pbool = poval = NULL;
5244 if (target != const0_rtx)
5245 {
5246 if (is_bool)
5247 pbool = &target;
5248 else
5249 poval = &target;
5250 }
5251 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5252 false, MEMMODEL_SEQ_CST,
5253 MEMMODEL_SEQ_CST))
5254 return NULL_RTX;
5255
5256 return target;
5257 }
5258
5259 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5260 general form is actually an atomic exchange, and some targets only
5261 support a reduced form with the second argument being a constant 1.
5262 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5263 the results. */
5264
5265 static rtx
5266 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5267 rtx target)
5268 {
5269 rtx val, mem;
5270
5271 /* Expand the operands. */
5272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5273 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5274
5275 return expand_sync_lock_test_and_set (target, mem, val);
5276 }
5277
5278 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5279
5280 static void
5281 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5282 {
5283 rtx mem;
5284
5285 /* Expand the operands. */
5286 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5287
5288 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5289 }
5290
5291 /* Given an integer representing an ``enum memmodel'', verify its
5292 correctness and return the memory model enum. */
5293
5294 static enum memmodel
5295 get_memmodel (tree exp)
5296 {
5297 rtx op;
5298 unsigned HOST_WIDE_INT val;
5299
5300 /* If the parameter is not a constant, it's a run time value so we'll just
5301 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5302 if (TREE_CODE (exp) != INTEGER_CST)
5303 return MEMMODEL_SEQ_CST;
5304
5305 op = expand_normal (exp);
5306
5307 val = INTVAL (op);
5308 if (targetm.memmodel_check)
5309 val = targetm.memmodel_check (val);
5310 else if (val & ~MEMMODEL_MASK)
5311 {
5312 warning (OPT_Winvalid_memory_model,
5313 "Unknown architecture specifier in memory model to builtin.");
5314 return MEMMODEL_SEQ_CST;
5315 }
5316
5317 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5318 {
5319 warning (OPT_Winvalid_memory_model,
5320 "invalid memory model argument to builtin");
5321 return MEMMODEL_SEQ_CST;
5322 }
5323
5324 return (enum memmodel) val;
5325 }
5326
5327 /* Expand the __atomic_exchange intrinsic:
5328 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5329 EXP is the CALL_EXPR.
5330 TARGET is an optional place for us to store the results. */
5331
5332 static rtx
5333 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5334 {
5335 rtx val, mem;
5336 enum memmodel model;
5337
5338 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5339 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5340 {
5341 error ("invalid memory model for %<__atomic_exchange%>");
5342 return NULL_RTX;
5343 }
5344
5345 if (!flag_inline_atomics)
5346 return NULL_RTX;
5347
5348 /* Expand the operands. */
5349 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5350 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5351
5352 return expand_atomic_exchange (target, mem, val, model);
5353 }
5354
5355 /* Expand the __atomic_compare_exchange intrinsic:
5356 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5357 TYPE desired, BOOL weak,
5358 enum memmodel success,
5359 enum memmodel failure)
5360 EXP is the CALL_EXPR.
5361 TARGET is an optional place for us to store the results. */
5362
5363 static rtx
5364 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5365 rtx target)
5366 {
5367 rtx expect, desired, mem, oldval;
5368 enum memmodel success, failure;
5369 tree weak;
5370 bool is_weak;
5371
5372 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5373 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5374
5375 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5376 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5377 {
5378 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5379 return NULL_RTX;
5380 }
5381
5382 if (failure > success)
5383 {
5384 error ("failure memory model cannot be stronger than success "
5385 "memory model for %<__atomic_compare_exchange%>");
5386 return NULL_RTX;
5387 }
5388
5389 if (!flag_inline_atomics)
5390 return NULL_RTX;
5391
5392 /* Expand the operands. */
5393 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5394
5395 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5396 expect = convert_memory_address (Pmode, expect);
5397 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5398
5399 weak = CALL_EXPR_ARG (exp, 3);
5400 is_weak = false;
5401 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5402 is_weak = true;
5403
5404 oldval = copy_to_reg (gen_rtx_MEM (mode, expect));
5405
5406 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5407 &oldval, mem, oldval, desired,
5408 is_weak, success, failure))
5409 return NULL_RTX;
5410
5411 emit_move_insn (gen_rtx_MEM (mode, expect), oldval);
5412 return target;
5413 }
5414
5415 /* Expand the __atomic_load intrinsic:
5416 TYPE __atomic_load (TYPE *object, enum memmodel)
5417 EXP is the CALL_EXPR.
5418 TARGET is an optional place for us to store the results. */
5419
5420 static rtx
5421 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5422 {
5423 rtx mem;
5424 enum memmodel model;
5425
5426 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5427 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5428 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5429 {
5430 error ("invalid memory model for %<__atomic_load%>");
5431 return NULL_RTX;
5432 }
5433
5434 if (!flag_inline_atomics)
5435 return NULL_RTX;
5436
5437 /* Expand the operand. */
5438 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5439
5440 return expand_atomic_load (target, mem, model);
5441 }
5442
5443
5444 /* Expand the __atomic_store intrinsic:
5445 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5448
5449 static rtx
5450 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5451 {
5452 rtx mem, val;
5453 enum memmodel model;
5454
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5456 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5457 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5458 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5459 {
5460 error ("invalid memory model for %<__atomic_store%>");
5461 return NULL_RTX;
5462 }
5463
5464 if (!flag_inline_atomics)
5465 return NULL_RTX;
5466
5467 /* Expand the operands. */
5468 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5469 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5470
5471 return expand_atomic_store (mem, val, model, false);
5472 }
5473
5474 /* Expand the __atomic_fetch_XXX intrinsic:
5475 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5476 EXP is the CALL_EXPR.
5477 TARGET is an optional place for us to store the results.
5478 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5479 FETCH_AFTER is true if returning the result of the operation.
5480 FETCH_AFTER is false if returning the value before the operation.
5481 IGNORE is true if the result is not used.
5482 EXT_CALL is the correct builtin for an external call if this cannot be
5483 resolved to an instruction sequence. */
5484
5485 static rtx
5486 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5487 enum rtx_code code, bool fetch_after,
5488 bool ignore, enum built_in_function ext_call)
5489 {
5490 rtx val, mem, ret;
5491 enum memmodel model;
5492 tree fndecl;
5493 tree addr;
5494
5495 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5496
5497 /* Expand the operands. */
5498 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5499 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5500
5501 /* Only try generating instructions if inlining is turned on. */
5502 if (flag_inline_atomics)
5503 {
5504 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5505 if (ret)
5506 return ret;
5507 }
5508
5509 /* Return if a different routine isn't needed for the library call. */
5510 if (ext_call == BUILT_IN_NONE)
5511 return NULL_RTX;
5512
5513 /* Change the call to the specified function. */
5514 fndecl = get_callee_fndecl (exp);
5515 addr = CALL_EXPR_FN (exp);
5516 STRIP_NOPS (addr);
5517
5518 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5519 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5520
5521 /* Expand the call here so we can emit trailing code. */
5522 ret = expand_call (exp, target, ignore);
5523
5524 /* Replace the original function just in case it matters. */
5525 TREE_OPERAND (addr, 0) = fndecl;
5526
5527 /* Then issue the arithmetic correction to return the right result. */
5528 if (!ignore)
5529 {
5530 if (code == NOT)
5531 {
5532 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5533 OPTAB_LIB_WIDEN);
5534 ret = expand_simple_unop (mode, NOT, ret, target, true);
5535 }
5536 else
5537 ret = expand_simple_binop (mode, code, ret, val, target, true,
5538 OPTAB_LIB_WIDEN);
5539 }
5540 return ret;
5541 }
5542
5543
5544 #ifndef HAVE_atomic_clear
5545 # define HAVE_atomic_clear 0
5546 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5547 #endif
5548
5549 /* Expand an atomic clear operation.
5550 void _atomic_clear (BOOL *obj, enum memmodel)
5551 EXP is the call expression. */
5552
5553 static rtx
5554 expand_builtin_atomic_clear (tree exp)
5555 {
5556 enum machine_mode mode;
5557 rtx mem, ret;
5558 enum memmodel model;
5559
5560 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5561 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5562 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5563
5564 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5565 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5566 {
5567 error ("invalid memory model for %<__atomic_store%>");
5568 return const0_rtx;
5569 }
5570
5571 if (HAVE_atomic_clear)
5572 {
5573 emit_insn (gen_atomic_clear (mem, model));
5574 return const0_rtx;
5575 }
5576
5577 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5578 Failing that, a store is issued by __atomic_store. The only way this can
5579 fail is if the bool type is larger than a word size. Unlikely, but
5580 handle it anyway for completeness. Assume a single threaded model since
5581 there is no atomic support in this case, and no barriers are required. */
5582 ret = expand_atomic_store (mem, const0_rtx, model, true);
5583 if (!ret)
5584 emit_move_insn (mem, const0_rtx);
5585 return const0_rtx;
5586 }
5587
5588 /* Expand an atomic test_and_set operation.
5589 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5590 EXP is the call expression. */
5591
5592 static rtx
5593 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5594 {
5595 rtx mem;
5596 enum memmodel model;
5597 enum machine_mode mode;
5598
5599 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5600 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5601 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5602
5603 return expand_atomic_test_and_set (target, mem, model);
5604 }
5605
5606
5607 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5608 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5609
5610 static tree
5611 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5612 {
5613 int size;
5614 enum machine_mode mode;
5615 unsigned int mode_align, type_align;
5616
5617 if (TREE_CODE (arg0) != INTEGER_CST)
5618 return NULL_TREE;
5619
5620 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5621 mode = mode_for_size (size, MODE_INT, 0);
5622 mode_align = GET_MODE_ALIGNMENT (mode);
5623
5624 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5625 type_align = mode_align;
5626 else
5627 {
5628 tree ttype = TREE_TYPE (arg1);
5629
5630 /* This function is usually invoked and folded immediately by the front
5631 end before anything else has a chance to look at it. The pointer
5632 parameter at this point is usually cast to a void *, so check for that
5633 and look past the cast. */
5634 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5635 && VOID_TYPE_P (TREE_TYPE (ttype)))
5636 arg1 = TREE_OPERAND (arg1, 0);
5637
5638 ttype = TREE_TYPE (arg1);
5639 gcc_assert (POINTER_TYPE_P (ttype));
5640
5641 /* Get the underlying type of the object. */
5642 ttype = TREE_TYPE (ttype);
5643 type_align = TYPE_ALIGN (ttype);
5644 }
5645
5646 /* If the object has smaller alignment, the the lock free routines cannot
5647 be used. */
5648 if (type_align < mode_align)
5649 return boolean_false_node;
5650
5651 /* Check if a compare_and_swap pattern exists for the mode which represents
5652 the required size. The pattern is not allowed to fail, so the existence
5653 of the pattern indicates support is present. */
5654 if (can_compare_and_swap_p (mode, true))
5655 return boolean_true_node;
5656 else
5657 return boolean_false_node;
5658 }
5659
5660 /* Return true if the parameters to call EXP represent an object which will
5661 always generate lock free instructions. The first argument represents the
5662 size of the object, and the second parameter is a pointer to the object
5663 itself. If NULL is passed for the object, then the result is based on
5664 typical alignment for an object of the specified size. Otherwise return
5665 false. */
5666
5667 static rtx
5668 expand_builtin_atomic_always_lock_free (tree exp)
5669 {
5670 tree size;
5671 tree arg0 = CALL_EXPR_ARG (exp, 0);
5672 tree arg1 = CALL_EXPR_ARG (exp, 1);
5673
5674 if (TREE_CODE (arg0) != INTEGER_CST)
5675 {
5676 error ("non-constant argument 1 to __atomic_always_lock_free");
5677 return const0_rtx;
5678 }
5679
5680 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5681 if (size == boolean_true_node)
5682 return const1_rtx;
5683 return const0_rtx;
5684 }
5685
5686 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5687 is lock free on this architecture. */
5688
5689 static tree
5690 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5691 {
5692 if (!flag_inline_atomics)
5693 return NULL_TREE;
5694
5695 /* If it isn't always lock free, don't generate a result. */
5696 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5697 return boolean_true_node;
5698
5699 return NULL_TREE;
5700 }
5701
5702 /* Return true if the parameters to call EXP represent an object which will
5703 always generate lock free instructions. The first argument represents the
5704 size of the object, and the second parameter is a pointer to the object
5705 itself. If NULL is passed for the object, then the result is based on
5706 typical alignment for an object of the specified size. Otherwise return
5707 NULL*/
5708
5709 static rtx
5710 expand_builtin_atomic_is_lock_free (tree exp)
5711 {
5712 tree size;
5713 tree arg0 = CALL_EXPR_ARG (exp, 0);
5714 tree arg1 = CALL_EXPR_ARG (exp, 1);
5715
5716 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5717 {
5718 error ("non-integer argument 1 to __atomic_is_lock_free");
5719 return NULL_RTX;
5720 }
5721
5722 if (!flag_inline_atomics)
5723 return NULL_RTX;
5724
5725 /* If the value is known at compile time, return the RTX for it. */
5726 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5727 if (size == boolean_true_node)
5728 return const1_rtx;
5729
5730 return NULL_RTX;
5731 }
5732
5733 /* Expand the __atomic_thread_fence intrinsic:
5734 void __atomic_thread_fence (enum memmodel)
5735 EXP is the CALL_EXPR. */
5736
5737 static void
5738 expand_builtin_atomic_thread_fence (tree exp)
5739 {
5740 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5741 expand_mem_thread_fence (model);
5742 }
5743
5744 /* Expand the __atomic_signal_fence intrinsic:
5745 void __atomic_signal_fence (enum memmodel)
5746 EXP is the CALL_EXPR. */
5747
5748 static void
5749 expand_builtin_atomic_signal_fence (tree exp)
5750 {
5751 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5752 expand_mem_signal_fence (model);
5753 }
5754
5755 /* Expand the __sync_synchronize intrinsic. */
5756
5757 static void
5758 expand_builtin_sync_synchronize (void)
5759 {
5760 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5761 }
5762
5763 \f
5764 /* Expand an expression EXP that calls a built-in function,
5765 with result going to TARGET if that's convenient
5766 (and in mode MODE if that's convenient).
5767 SUBTARGET may be used as the target for computing one of EXP's operands.
5768 IGNORE is nonzero if the value is to be ignored. */
5769
5770 rtx
5771 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5772 int ignore)
5773 {
5774 tree fndecl = get_callee_fndecl (exp);
5775 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5776 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5777 int flags;
5778
5779 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5780 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5781
5782 /* When not optimizing, generate calls to library functions for a certain
5783 set of builtins. */
5784 if (!optimize
5785 && !called_as_built_in (fndecl)
5786 && fcode != BUILT_IN_ALLOCA
5787 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5788 && fcode != BUILT_IN_FREE)
5789 return expand_call (exp, target, ignore);
5790
5791 /* The built-in function expanders test for target == const0_rtx
5792 to determine whether the function's result will be ignored. */
5793 if (ignore)
5794 target = const0_rtx;
5795
5796 /* If the result of a pure or const built-in function is ignored, and
5797 none of its arguments are volatile, we can avoid expanding the
5798 built-in call and just evaluate the arguments for side-effects. */
5799 if (target == const0_rtx
5800 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5801 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5802 {
5803 bool volatilep = false;
5804 tree arg;
5805 call_expr_arg_iterator iter;
5806
5807 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5808 if (TREE_THIS_VOLATILE (arg))
5809 {
5810 volatilep = true;
5811 break;
5812 }
5813
5814 if (! volatilep)
5815 {
5816 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5817 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5818 return const0_rtx;
5819 }
5820 }
5821
5822 switch (fcode)
5823 {
5824 CASE_FLT_FN (BUILT_IN_FABS):
5825 target = expand_builtin_fabs (exp, target, subtarget);
5826 if (target)
5827 return target;
5828 break;
5829
5830 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5831 target = expand_builtin_copysign (exp, target, subtarget);
5832 if (target)
5833 return target;
5834 break;
5835
5836 /* Just do a normal library call if we were unable to fold
5837 the values. */
5838 CASE_FLT_FN (BUILT_IN_CABS):
5839 break;
5840
5841 CASE_FLT_FN (BUILT_IN_EXP):
5842 CASE_FLT_FN (BUILT_IN_EXP10):
5843 CASE_FLT_FN (BUILT_IN_POW10):
5844 CASE_FLT_FN (BUILT_IN_EXP2):
5845 CASE_FLT_FN (BUILT_IN_EXPM1):
5846 CASE_FLT_FN (BUILT_IN_LOGB):
5847 CASE_FLT_FN (BUILT_IN_LOG):
5848 CASE_FLT_FN (BUILT_IN_LOG10):
5849 CASE_FLT_FN (BUILT_IN_LOG2):
5850 CASE_FLT_FN (BUILT_IN_LOG1P):
5851 CASE_FLT_FN (BUILT_IN_TAN):
5852 CASE_FLT_FN (BUILT_IN_ASIN):
5853 CASE_FLT_FN (BUILT_IN_ACOS):
5854 CASE_FLT_FN (BUILT_IN_ATAN):
5855 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5856 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5857 because of possible accuracy problems. */
5858 if (! flag_unsafe_math_optimizations)
5859 break;
5860 CASE_FLT_FN (BUILT_IN_SQRT):
5861 CASE_FLT_FN (BUILT_IN_FLOOR):
5862 CASE_FLT_FN (BUILT_IN_CEIL):
5863 CASE_FLT_FN (BUILT_IN_TRUNC):
5864 CASE_FLT_FN (BUILT_IN_ROUND):
5865 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5866 CASE_FLT_FN (BUILT_IN_RINT):
5867 target = expand_builtin_mathfn (exp, target, subtarget);
5868 if (target)
5869 return target;
5870 break;
5871
5872 CASE_FLT_FN (BUILT_IN_FMA):
5873 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5874 if (target)
5875 return target;
5876 break;
5877
5878 CASE_FLT_FN (BUILT_IN_ILOGB):
5879 if (! flag_unsafe_math_optimizations)
5880 break;
5881 CASE_FLT_FN (BUILT_IN_ISINF):
5882 CASE_FLT_FN (BUILT_IN_FINITE):
5883 case BUILT_IN_ISFINITE:
5884 case BUILT_IN_ISNORMAL:
5885 target = expand_builtin_interclass_mathfn (exp, target);
5886 if (target)
5887 return target;
5888 break;
5889
5890 CASE_FLT_FN (BUILT_IN_ICEIL):
5891 CASE_FLT_FN (BUILT_IN_LCEIL):
5892 CASE_FLT_FN (BUILT_IN_LLCEIL):
5893 CASE_FLT_FN (BUILT_IN_LFLOOR):
5894 CASE_FLT_FN (BUILT_IN_IFLOOR):
5895 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5896 target = expand_builtin_int_roundingfn (exp, target);
5897 if (target)
5898 return target;
5899 break;
5900
5901 CASE_FLT_FN (BUILT_IN_IRINT):
5902 CASE_FLT_FN (BUILT_IN_LRINT):
5903 CASE_FLT_FN (BUILT_IN_LLRINT):
5904 CASE_FLT_FN (BUILT_IN_IROUND):
5905 CASE_FLT_FN (BUILT_IN_LROUND):
5906 CASE_FLT_FN (BUILT_IN_LLROUND):
5907 target = expand_builtin_int_roundingfn_2 (exp, target);
5908 if (target)
5909 return target;
5910 break;
5911
5912 CASE_FLT_FN (BUILT_IN_POWI):
5913 target = expand_builtin_powi (exp, target);
5914 if (target)
5915 return target;
5916 break;
5917
5918 CASE_FLT_FN (BUILT_IN_ATAN2):
5919 CASE_FLT_FN (BUILT_IN_LDEXP):
5920 CASE_FLT_FN (BUILT_IN_SCALB):
5921 CASE_FLT_FN (BUILT_IN_SCALBN):
5922 CASE_FLT_FN (BUILT_IN_SCALBLN):
5923 if (! flag_unsafe_math_optimizations)
5924 break;
5925
5926 CASE_FLT_FN (BUILT_IN_FMOD):
5927 CASE_FLT_FN (BUILT_IN_REMAINDER):
5928 CASE_FLT_FN (BUILT_IN_DREM):
5929 CASE_FLT_FN (BUILT_IN_POW):
5930 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5931 if (target)
5932 return target;
5933 break;
5934
5935 CASE_FLT_FN (BUILT_IN_CEXPI):
5936 target = expand_builtin_cexpi (exp, target);
5937 gcc_assert (target);
5938 return target;
5939
5940 CASE_FLT_FN (BUILT_IN_SIN):
5941 CASE_FLT_FN (BUILT_IN_COS):
5942 if (! flag_unsafe_math_optimizations)
5943 break;
5944 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5945 if (target)
5946 return target;
5947 break;
5948
5949 CASE_FLT_FN (BUILT_IN_SINCOS):
5950 if (! flag_unsafe_math_optimizations)
5951 break;
5952 target = expand_builtin_sincos (exp);
5953 if (target)
5954 return target;
5955 break;
5956
5957 case BUILT_IN_APPLY_ARGS:
5958 return expand_builtin_apply_args ();
5959
5960 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5961 FUNCTION with a copy of the parameters described by
5962 ARGUMENTS, and ARGSIZE. It returns a block of memory
5963 allocated on the stack into which is stored all the registers
5964 that might possibly be used for returning the result of a
5965 function. ARGUMENTS is the value returned by
5966 __builtin_apply_args. ARGSIZE is the number of bytes of
5967 arguments that must be copied. ??? How should this value be
5968 computed? We'll also need a safe worst case value for varargs
5969 functions. */
5970 case BUILT_IN_APPLY:
5971 if (!validate_arglist (exp, POINTER_TYPE,
5972 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5973 && !validate_arglist (exp, REFERENCE_TYPE,
5974 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5975 return const0_rtx;
5976 else
5977 {
5978 rtx ops[3];
5979
5980 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5981 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5982 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5983
5984 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5985 }
5986
5987 /* __builtin_return (RESULT) causes the function to return the
5988 value described by RESULT. RESULT is address of the block of
5989 memory returned by __builtin_apply. */
5990 case BUILT_IN_RETURN:
5991 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5992 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5993 return const0_rtx;
5994
5995 case BUILT_IN_SAVEREGS:
5996 return expand_builtin_saveregs ();
5997
5998 case BUILT_IN_VA_ARG_PACK:
5999 /* All valid uses of __builtin_va_arg_pack () are removed during
6000 inlining. */
6001 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6002 return const0_rtx;
6003
6004 case BUILT_IN_VA_ARG_PACK_LEN:
6005 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6006 inlining. */
6007 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6008 return const0_rtx;
6009
6010 /* Return the address of the first anonymous stack arg. */
6011 case BUILT_IN_NEXT_ARG:
6012 if (fold_builtin_next_arg (exp, false))
6013 return const0_rtx;
6014 return expand_builtin_next_arg ();
6015
6016 case BUILT_IN_CLEAR_CACHE:
6017 target = expand_builtin___clear_cache (exp);
6018 if (target)
6019 return target;
6020 break;
6021
6022 case BUILT_IN_CLASSIFY_TYPE:
6023 return expand_builtin_classify_type (exp);
6024
6025 case BUILT_IN_CONSTANT_P:
6026 return const0_rtx;
6027
6028 case BUILT_IN_FRAME_ADDRESS:
6029 case BUILT_IN_RETURN_ADDRESS:
6030 return expand_builtin_frame_address (fndecl, exp);
6031
6032 /* Returns the address of the area where the structure is returned.
6033 0 otherwise. */
6034 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6035 if (call_expr_nargs (exp) != 0
6036 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6037 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6038 return const0_rtx;
6039 else
6040 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6041
6042 case BUILT_IN_ALLOCA:
6043 case BUILT_IN_ALLOCA_WITH_ALIGN:
6044 /* If the allocation stems from the declaration of a variable-sized
6045 object, it cannot accumulate. */
6046 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6047 if (target)
6048 return target;
6049 break;
6050
6051 case BUILT_IN_STACK_SAVE:
6052 return expand_stack_save ();
6053
6054 case BUILT_IN_STACK_RESTORE:
6055 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6056 return const0_rtx;
6057
6058 case BUILT_IN_BSWAP16:
6059 case BUILT_IN_BSWAP32:
6060 case BUILT_IN_BSWAP64:
6061 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6062 if (target)
6063 return target;
6064 break;
6065
6066 CASE_INT_FN (BUILT_IN_FFS):
6067 case BUILT_IN_FFSIMAX:
6068 target = expand_builtin_unop (target_mode, exp, target,
6069 subtarget, ffs_optab);
6070 if (target)
6071 return target;
6072 break;
6073
6074 CASE_INT_FN (BUILT_IN_CLZ):
6075 case BUILT_IN_CLZIMAX:
6076 target = expand_builtin_unop (target_mode, exp, target,
6077 subtarget, clz_optab);
6078 if (target)
6079 return target;
6080 break;
6081
6082 CASE_INT_FN (BUILT_IN_CTZ):
6083 case BUILT_IN_CTZIMAX:
6084 target = expand_builtin_unop (target_mode, exp, target,
6085 subtarget, ctz_optab);
6086 if (target)
6087 return target;
6088 break;
6089
6090 CASE_INT_FN (BUILT_IN_CLRSB):
6091 case BUILT_IN_CLRSBIMAX:
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, clrsb_optab);
6094 if (target)
6095 return target;
6096 break;
6097
6098 CASE_INT_FN (BUILT_IN_POPCOUNT):
6099 case BUILT_IN_POPCOUNTIMAX:
6100 target = expand_builtin_unop (target_mode, exp, target,
6101 subtarget, popcount_optab);
6102 if (target)
6103 return target;
6104 break;
6105
6106 CASE_INT_FN (BUILT_IN_PARITY):
6107 case BUILT_IN_PARITYIMAX:
6108 target = expand_builtin_unop (target_mode, exp, target,
6109 subtarget, parity_optab);
6110 if (target)
6111 return target;
6112 break;
6113
6114 case BUILT_IN_STRLEN:
6115 target = expand_builtin_strlen (exp, target, target_mode);
6116 if (target)
6117 return target;
6118 break;
6119
6120 case BUILT_IN_STRCPY:
6121 target = expand_builtin_strcpy (exp, target);
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_STRNCPY:
6127 target = expand_builtin_strncpy (exp, target);
6128 if (target)
6129 return target;
6130 break;
6131
6132 case BUILT_IN_STPCPY:
6133 target = expand_builtin_stpcpy (exp, target, mode);
6134 if (target)
6135 return target;
6136 break;
6137
6138 case BUILT_IN_MEMCPY:
6139 target = expand_builtin_memcpy (exp, target);
6140 if (target)
6141 return target;
6142 break;
6143
6144 case BUILT_IN_MEMPCPY:
6145 target = expand_builtin_mempcpy (exp, target, mode);
6146 if (target)
6147 return target;
6148 break;
6149
6150 case BUILT_IN_MEMSET:
6151 target = expand_builtin_memset (exp, target, mode);
6152 if (target)
6153 return target;
6154 break;
6155
6156 case BUILT_IN_BZERO:
6157 target = expand_builtin_bzero (exp);
6158 if (target)
6159 return target;
6160 break;
6161
6162 case BUILT_IN_STRCMP:
6163 target = expand_builtin_strcmp (exp, target);
6164 if (target)
6165 return target;
6166 break;
6167
6168 case BUILT_IN_STRNCMP:
6169 target = expand_builtin_strncmp (exp, target, mode);
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_BCMP:
6175 case BUILT_IN_MEMCMP:
6176 target = expand_builtin_memcmp (exp, target, mode);
6177 if (target)
6178 return target;
6179 break;
6180
6181 case BUILT_IN_SETJMP:
6182 /* This should have been lowered to the builtins below. */
6183 gcc_unreachable ();
6184
6185 case BUILT_IN_SETJMP_SETUP:
6186 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6187 and the receiver label. */
6188 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6189 {
6190 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6191 VOIDmode, EXPAND_NORMAL);
6192 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6193 rtx label_r = label_rtx (label);
6194
6195 /* This is copied from the handling of non-local gotos. */
6196 expand_builtin_setjmp_setup (buf_addr, label_r);
6197 nonlocal_goto_handler_labels
6198 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6199 nonlocal_goto_handler_labels);
6200 /* ??? Do not let expand_label treat us as such since we would
6201 not want to be both on the list of non-local labels and on
6202 the list of forced labels. */
6203 FORCED_LABEL (label) = 0;
6204 return const0_rtx;
6205 }
6206 break;
6207
6208 case BUILT_IN_SETJMP_DISPATCHER:
6209 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6210 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6211 {
6212 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6213 rtx label_r = label_rtx (label);
6214
6215 /* Remove the dispatcher label from the list of non-local labels
6216 since the receiver labels have been added to it above. */
6217 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6218 return const0_rtx;
6219 }
6220 break;
6221
6222 case BUILT_IN_SETJMP_RECEIVER:
6223 /* __builtin_setjmp_receiver is passed the receiver label. */
6224 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6225 {
6226 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6227 rtx label_r = label_rtx (label);
6228
6229 expand_builtin_setjmp_receiver (label_r);
6230 return const0_rtx;
6231 }
6232 break;
6233
6234 /* __builtin_longjmp is passed a pointer to an array of five words.
6235 It's similar to the C library longjmp function but works with
6236 __builtin_setjmp above. */
6237 case BUILT_IN_LONGJMP:
6238 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6239 {
6240 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6241 VOIDmode, EXPAND_NORMAL);
6242 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6243
6244 if (value != const1_rtx)
6245 {
6246 error ("%<__builtin_longjmp%> second argument must be 1");
6247 return const0_rtx;
6248 }
6249
6250 expand_builtin_longjmp (buf_addr, value);
6251 return const0_rtx;
6252 }
6253 break;
6254
6255 case BUILT_IN_NONLOCAL_GOTO:
6256 target = expand_builtin_nonlocal_goto (exp);
6257 if (target)
6258 return target;
6259 break;
6260
6261 /* This updates the setjmp buffer that is its argument with the value
6262 of the current stack pointer. */
6263 case BUILT_IN_UPDATE_SETJMP_BUF:
6264 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6265 {
6266 rtx buf_addr
6267 = expand_normal (CALL_EXPR_ARG (exp, 0));
6268
6269 expand_builtin_update_setjmp_buf (buf_addr);
6270 return const0_rtx;
6271 }
6272 break;
6273
6274 case BUILT_IN_TRAP:
6275 expand_builtin_trap ();
6276 return const0_rtx;
6277
6278 case BUILT_IN_UNREACHABLE:
6279 expand_builtin_unreachable ();
6280 return const0_rtx;
6281
6282 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6283 case BUILT_IN_SIGNBITD32:
6284 case BUILT_IN_SIGNBITD64:
6285 case BUILT_IN_SIGNBITD128:
6286 target = expand_builtin_signbit (exp, target);
6287 if (target)
6288 return target;
6289 break;
6290
6291 /* Various hooks for the DWARF 2 __throw routine. */
6292 case BUILT_IN_UNWIND_INIT:
6293 expand_builtin_unwind_init ();
6294 return const0_rtx;
6295 case BUILT_IN_DWARF_CFA:
6296 return virtual_cfa_rtx;
6297 #ifdef DWARF2_UNWIND_INFO
6298 case BUILT_IN_DWARF_SP_COLUMN:
6299 return expand_builtin_dwarf_sp_column ();
6300 case BUILT_IN_INIT_DWARF_REG_SIZES:
6301 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6302 return const0_rtx;
6303 #endif
6304 case BUILT_IN_FROB_RETURN_ADDR:
6305 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6306 case BUILT_IN_EXTRACT_RETURN_ADDR:
6307 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6308 case BUILT_IN_EH_RETURN:
6309 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6310 CALL_EXPR_ARG (exp, 1));
6311 return const0_rtx;
6312 #ifdef EH_RETURN_DATA_REGNO
6313 case BUILT_IN_EH_RETURN_DATA_REGNO:
6314 return expand_builtin_eh_return_data_regno (exp);
6315 #endif
6316 case BUILT_IN_EXTEND_POINTER:
6317 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6318 case BUILT_IN_EH_POINTER:
6319 return expand_builtin_eh_pointer (exp);
6320 case BUILT_IN_EH_FILTER:
6321 return expand_builtin_eh_filter (exp);
6322 case BUILT_IN_EH_COPY_VALUES:
6323 return expand_builtin_eh_copy_values (exp);
6324
6325 case BUILT_IN_VA_START:
6326 return expand_builtin_va_start (exp);
6327 case BUILT_IN_VA_END:
6328 return expand_builtin_va_end (exp);
6329 case BUILT_IN_VA_COPY:
6330 return expand_builtin_va_copy (exp);
6331 case BUILT_IN_EXPECT:
6332 return expand_builtin_expect (exp, target);
6333 case BUILT_IN_ASSUME_ALIGNED:
6334 return expand_builtin_assume_aligned (exp, target);
6335 case BUILT_IN_PREFETCH:
6336 expand_builtin_prefetch (exp);
6337 return const0_rtx;
6338
6339 case BUILT_IN_INIT_TRAMPOLINE:
6340 return expand_builtin_init_trampoline (exp, true);
6341 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6342 return expand_builtin_init_trampoline (exp, false);
6343 case BUILT_IN_ADJUST_TRAMPOLINE:
6344 return expand_builtin_adjust_trampoline (exp);
6345
6346 case BUILT_IN_FORK:
6347 case BUILT_IN_EXECL:
6348 case BUILT_IN_EXECV:
6349 case BUILT_IN_EXECLP:
6350 case BUILT_IN_EXECLE:
6351 case BUILT_IN_EXECVP:
6352 case BUILT_IN_EXECVE:
6353 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6354 if (target)
6355 return target;
6356 break;
6357
6358 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6359 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6360 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6361 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6362 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6363 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6364 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6370 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6371 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6372 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6373 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6374 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6375 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6376 if (target)
6377 return target;
6378 break;
6379
6380 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6381 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6382 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6383 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6384 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6385 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6386 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6387 if (target)
6388 return target;
6389 break;
6390
6391 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6392 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6393 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6394 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6395 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6396 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6397 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6398 if (target)
6399 return target;
6400 break;
6401
6402 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6403 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6404 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6405 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6406 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6407 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6408 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6409 if (target)
6410 return target;
6411 break;
6412
6413 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6414 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6415 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6416 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6417 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6419 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6420 if (target)
6421 return target;
6422 break;
6423
6424 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6425 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6426 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6427 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6428 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6429 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6430 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6431 if (target)
6432 return target;
6433 break;
6434
6435 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6436 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6437 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6438 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6439 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6440 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6441 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6442 if (target)
6443 return target;
6444 break;
6445
6446 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6447 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6448 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6449 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6450 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6452 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6453 if (target)
6454 return target;
6455 break;
6456
6457 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6458 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6459 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6460 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6461 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6462 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6463 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6464 if (target)
6465 return target;
6466 break;
6467
6468 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6469 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6470 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6471 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6472 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6473 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6474 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6475 if (target)
6476 return target;
6477 break;
6478
6479 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6480 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6481 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6482 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6483 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6484 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6485 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6486 if (target)
6487 return target;
6488 break;
6489
6490 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6491 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6492 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6493 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6494 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6495 if (mode == VOIDmode)
6496 mode = TYPE_MODE (boolean_type_node);
6497 if (!target || !register_operand (target, mode))
6498 target = gen_reg_rtx (mode);
6499
6500 mode = get_builtin_sync_mode
6501 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6502 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6503 if (target)
6504 return target;
6505 break;
6506
6507 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6508 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6509 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6510 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6511 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6512 mode = get_builtin_sync_mode
6513 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6514 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6515 if (target)
6516 return target;
6517 break;
6518
6519 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6520 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6521 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6522 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6523 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6525 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6526 if (target)
6527 return target;
6528 break;
6529
6530 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6531 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6532 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6533 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6534 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6536 expand_builtin_sync_lock_release (mode, exp);
6537 return const0_rtx;
6538
6539 case BUILT_IN_SYNC_SYNCHRONIZE:
6540 expand_builtin_sync_synchronize ();
6541 return const0_rtx;
6542
6543 case BUILT_IN_ATOMIC_EXCHANGE_1:
6544 case BUILT_IN_ATOMIC_EXCHANGE_2:
6545 case BUILT_IN_ATOMIC_EXCHANGE_4:
6546 case BUILT_IN_ATOMIC_EXCHANGE_8:
6547 case BUILT_IN_ATOMIC_EXCHANGE_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6549 target = expand_builtin_atomic_exchange (mode, exp, target);
6550 if (target)
6551 return target;
6552 break;
6553
6554 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6555 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6556 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6557 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6558 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6559 {
6560 unsigned int nargs, z;
6561 VEC(tree,gc) *vec;
6562
6563 mode =
6564 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6565 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6566 if (target)
6567 return target;
6568
6569 /* If this is turned into an external library call, the weak parameter
6570 must be dropped to match the expected parameter list. */
6571 nargs = call_expr_nargs (exp);
6572 vec = VEC_alloc (tree, gc, nargs - 1);
6573 for (z = 0; z < 3; z++)
6574 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6575 /* Skip the boolean weak parameter. */
6576 for (z = 4; z < 6; z++)
6577 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6578 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6579 break;
6580 }
6581
6582 case BUILT_IN_ATOMIC_LOAD_1:
6583 case BUILT_IN_ATOMIC_LOAD_2:
6584 case BUILT_IN_ATOMIC_LOAD_4:
6585 case BUILT_IN_ATOMIC_LOAD_8:
6586 case BUILT_IN_ATOMIC_LOAD_16:
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6588 target = expand_builtin_atomic_load (mode, exp, target);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_ATOMIC_STORE_1:
6594 case BUILT_IN_ATOMIC_STORE_2:
6595 case BUILT_IN_ATOMIC_STORE_4:
6596 case BUILT_IN_ATOMIC_STORE_8:
6597 case BUILT_IN_ATOMIC_STORE_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6599 target = expand_builtin_atomic_store (mode, exp);
6600 if (target)
6601 return const0_rtx;
6602 break;
6603
6604 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6605 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6606 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6607 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6608 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6609 {
6610 enum built_in_function lib;
6611 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6612 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6613 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6614 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6615 ignore, lib);
6616 if (target)
6617 return target;
6618 break;
6619 }
6620 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6621 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6622 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6623 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6624 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6625 {
6626 enum built_in_function lib;
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6628 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6629 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6630 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6631 ignore, lib);
6632 if (target)
6633 return target;
6634 break;
6635 }
6636 case BUILT_IN_ATOMIC_AND_FETCH_1:
6637 case BUILT_IN_ATOMIC_AND_FETCH_2:
6638 case BUILT_IN_ATOMIC_AND_FETCH_4:
6639 case BUILT_IN_ATOMIC_AND_FETCH_8:
6640 case BUILT_IN_ATOMIC_AND_FETCH_16:
6641 {
6642 enum built_in_function lib;
6643 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6644 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6645 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6646 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6647 ignore, lib);
6648 if (target)
6649 return target;
6650 break;
6651 }
6652 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6653 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6654 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6655 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6656 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6657 {
6658 enum built_in_function lib;
6659 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6660 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6661 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6662 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6663 ignore, lib);
6664 if (target)
6665 return target;
6666 break;
6667 }
6668 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6669 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6670 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6671 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6672 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6673 {
6674 enum built_in_function lib;
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6676 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6677 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6678 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6679 ignore, lib);
6680 if (target)
6681 return target;
6682 break;
6683 }
6684 case BUILT_IN_ATOMIC_OR_FETCH_1:
6685 case BUILT_IN_ATOMIC_OR_FETCH_2:
6686 case BUILT_IN_ATOMIC_OR_FETCH_4:
6687 case BUILT_IN_ATOMIC_OR_FETCH_8:
6688 case BUILT_IN_ATOMIC_OR_FETCH_16:
6689 {
6690 enum built_in_function lib;
6691 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6692 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6693 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6695 ignore, lib);
6696 if (target)
6697 return target;
6698 break;
6699 }
6700 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6701 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6702 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6703 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6704 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6707 ignore, BUILT_IN_NONE);
6708 if (target)
6709 return target;
6710 break;
6711
6712 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6713 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6714 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6715 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6716 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_AND_1:
6725 case BUILT_IN_ATOMIC_FETCH_AND_2:
6726 case BUILT_IN_ATOMIC_FETCH_AND_4:
6727 case BUILT_IN_ATOMIC_FETCH_AND_8:
6728 case BUILT_IN_ATOMIC_FETCH_AND_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6737 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6738 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6739 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6740 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6743 ignore, BUILT_IN_NONE);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6749 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6750 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6751 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6752 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6755 ignore, BUILT_IN_NONE);
6756 if (target)
6757 return target;
6758 break;
6759
6760 case BUILT_IN_ATOMIC_FETCH_OR_1:
6761 case BUILT_IN_ATOMIC_FETCH_OR_2:
6762 case BUILT_IN_ATOMIC_FETCH_OR_4:
6763 case BUILT_IN_ATOMIC_FETCH_OR_8:
6764 case BUILT_IN_ATOMIC_FETCH_OR_16:
6765 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6766 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6767 ignore, BUILT_IN_NONE);
6768 if (target)
6769 return target;
6770 break;
6771
6772 case BUILT_IN_ATOMIC_TEST_AND_SET:
6773 return expand_builtin_atomic_test_and_set (exp, target);
6774
6775 case BUILT_IN_ATOMIC_CLEAR:
6776 return expand_builtin_atomic_clear (exp);
6777
6778 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6779 return expand_builtin_atomic_always_lock_free (exp);
6780
6781 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6782 target = expand_builtin_atomic_is_lock_free (exp);
6783 if (target)
6784 return target;
6785 break;
6786
6787 case BUILT_IN_ATOMIC_THREAD_FENCE:
6788 expand_builtin_atomic_thread_fence (exp);
6789 return const0_rtx;
6790
6791 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6792 expand_builtin_atomic_signal_fence (exp);
6793 return const0_rtx;
6794
6795 case BUILT_IN_OBJECT_SIZE:
6796 return expand_builtin_object_size (exp);
6797
6798 case BUILT_IN_MEMCPY_CHK:
6799 case BUILT_IN_MEMPCPY_CHK:
6800 case BUILT_IN_MEMMOVE_CHK:
6801 case BUILT_IN_MEMSET_CHK:
6802 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6803 if (target)
6804 return target;
6805 break;
6806
6807 case BUILT_IN_STRCPY_CHK:
6808 case BUILT_IN_STPCPY_CHK:
6809 case BUILT_IN_STRNCPY_CHK:
6810 case BUILT_IN_STPNCPY_CHK:
6811 case BUILT_IN_STRCAT_CHK:
6812 case BUILT_IN_STRNCAT_CHK:
6813 case BUILT_IN_SNPRINTF_CHK:
6814 case BUILT_IN_VSNPRINTF_CHK:
6815 maybe_emit_chk_warning (exp, fcode);
6816 break;
6817
6818 case BUILT_IN_SPRINTF_CHK:
6819 case BUILT_IN_VSPRINTF_CHK:
6820 maybe_emit_sprintf_chk_warning (exp, fcode);
6821 break;
6822
6823 case BUILT_IN_FREE:
6824 if (warn_free_nonheap_object)
6825 maybe_emit_free_warning (exp);
6826 break;
6827
6828 default: /* just do library call, if unknown builtin */
6829 break;
6830 }
6831
6832 /* The switch statement above can drop through to cause the function
6833 to be called normally. */
6834 return expand_call (exp, target, ignore);
6835 }
6836
6837 /* Determine whether a tree node represents a call to a built-in
6838 function. If the tree T is a call to a built-in function with
6839 the right number of arguments of the appropriate types, return
6840 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6841 Otherwise the return value is END_BUILTINS. */
6842
6843 enum built_in_function
6844 builtin_mathfn_code (const_tree t)
6845 {
6846 const_tree fndecl, arg, parmlist;
6847 const_tree argtype, parmtype;
6848 const_call_expr_arg_iterator iter;
6849
6850 if (TREE_CODE (t) != CALL_EXPR
6851 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6852 return END_BUILTINS;
6853
6854 fndecl = get_callee_fndecl (t);
6855 if (fndecl == NULL_TREE
6856 || TREE_CODE (fndecl) != FUNCTION_DECL
6857 || ! DECL_BUILT_IN (fndecl)
6858 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6859 return END_BUILTINS;
6860
6861 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6862 init_const_call_expr_arg_iterator (t, &iter);
6863 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6864 {
6865 /* If a function doesn't take a variable number of arguments,
6866 the last element in the list will have type `void'. */
6867 parmtype = TREE_VALUE (parmlist);
6868 if (VOID_TYPE_P (parmtype))
6869 {
6870 if (more_const_call_expr_args_p (&iter))
6871 return END_BUILTINS;
6872 return DECL_FUNCTION_CODE (fndecl);
6873 }
6874
6875 if (! more_const_call_expr_args_p (&iter))
6876 return END_BUILTINS;
6877
6878 arg = next_const_call_expr_arg (&iter);
6879 argtype = TREE_TYPE (arg);
6880
6881 if (SCALAR_FLOAT_TYPE_P (parmtype))
6882 {
6883 if (! SCALAR_FLOAT_TYPE_P (argtype))
6884 return END_BUILTINS;
6885 }
6886 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6887 {
6888 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6889 return END_BUILTINS;
6890 }
6891 else if (POINTER_TYPE_P (parmtype))
6892 {
6893 if (! POINTER_TYPE_P (argtype))
6894 return END_BUILTINS;
6895 }
6896 else if (INTEGRAL_TYPE_P (parmtype))
6897 {
6898 if (! INTEGRAL_TYPE_P (argtype))
6899 return END_BUILTINS;
6900 }
6901 else
6902 return END_BUILTINS;
6903 }
6904
6905 /* Variable-length argument list. */
6906 return DECL_FUNCTION_CODE (fndecl);
6907 }
6908
6909 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6910 evaluate to a constant. */
6911
6912 static tree
6913 fold_builtin_constant_p (tree arg)
6914 {
6915 /* We return 1 for a numeric type that's known to be a constant
6916 value at compile-time or for an aggregate type that's a
6917 literal constant. */
6918 STRIP_NOPS (arg);
6919
6920 /* If we know this is a constant, emit the constant of one. */
6921 if (CONSTANT_CLASS_P (arg)
6922 || (TREE_CODE (arg) == CONSTRUCTOR
6923 && TREE_CONSTANT (arg)))
6924 return integer_one_node;
6925 if (TREE_CODE (arg) == ADDR_EXPR)
6926 {
6927 tree op = TREE_OPERAND (arg, 0);
6928 if (TREE_CODE (op) == STRING_CST
6929 || (TREE_CODE (op) == ARRAY_REF
6930 && integer_zerop (TREE_OPERAND (op, 1))
6931 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6932 return integer_one_node;
6933 }
6934
6935 /* If this expression has side effects, show we don't know it to be a
6936 constant. Likewise if it's a pointer or aggregate type since in
6937 those case we only want literals, since those are only optimized
6938 when generating RTL, not later.
6939 And finally, if we are compiling an initializer, not code, we
6940 need to return a definite result now; there's not going to be any
6941 more optimization done. */
6942 if (TREE_SIDE_EFFECTS (arg)
6943 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6944 || POINTER_TYPE_P (TREE_TYPE (arg))
6945 || cfun == 0
6946 || folding_initializer)
6947 return integer_zero_node;
6948
6949 return NULL_TREE;
6950 }
6951
6952 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6953 return it as a truthvalue. */
6954
6955 static tree
6956 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6957 {
6958 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6959
6960 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6961 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6962 ret_type = TREE_TYPE (TREE_TYPE (fn));
6963 pred_type = TREE_VALUE (arg_types);
6964 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6965
6966 pred = fold_convert_loc (loc, pred_type, pred);
6967 expected = fold_convert_loc (loc, expected_type, expected);
6968 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6969
6970 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6971 build_int_cst (ret_type, 0));
6972 }
6973
6974 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6975 NULL_TREE if no simplification is possible. */
6976
6977 static tree
6978 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6979 {
6980 tree inner, fndecl, inner_arg0;
6981 enum tree_code code;
6982
6983 /* Distribute the expected value over short-circuiting operators.
6984 See through the cast from truthvalue_type_node to long. */
6985 inner_arg0 = arg0;
6986 while (TREE_CODE (inner_arg0) == NOP_EXPR
6987 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6988 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6989 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6990
6991 /* If this is a builtin_expect within a builtin_expect keep the
6992 inner one. See through a comparison against a constant. It
6993 might have been added to create a thruthvalue. */
6994 inner = inner_arg0;
6995
6996 if (COMPARISON_CLASS_P (inner)
6997 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6998 inner = TREE_OPERAND (inner, 0);
6999
7000 if (TREE_CODE (inner) == CALL_EXPR
7001 && (fndecl = get_callee_fndecl (inner))
7002 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7003 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7004 return arg0;
7005
7006 inner = inner_arg0;
7007 code = TREE_CODE (inner);
7008 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7009 {
7010 tree op0 = TREE_OPERAND (inner, 0);
7011 tree op1 = TREE_OPERAND (inner, 1);
7012
7013 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7014 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7015 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7016
7017 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7018 }
7019
7020 /* If the argument isn't invariant then there's nothing else we can do. */
7021 if (!TREE_CONSTANT (inner_arg0))
7022 return NULL_TREE;
7023
7024 /* If we expect that a comparison against the argument will fold to
7025 a constant return the constant. In practice, this means a true
7026 constant or the address of a non-weak symbol. */
7027 inner = inner_arg0;
7028 STRIP_NOPS (inner);
7029 if (TREE_CODE (inner) == ADDR_EXPR)
7030 {
7031 do
7032 {
7033 inner = TREE_OPERAND (inner, 0);
7034 }
7035 while (TREE_CODE (inner) == COMPONENT_REF
7036 || TREE_CODE (inner) == ARRAY_REF);
7037 if ((TREE_CODE (inner) == VAR_DECL
7038 || TREE_CODE (inner) == FUNCTION_DECL)
7039 && DECL_WEAK (inner))
7040 return NULL_TREE;
7041 }
7042
7043 /* Otherwise, ARG0 already has the proper type for the return value. */
7044 return arg0;
7045 }
7046
7047 /* Fold a call to __builtin_classify_type with argument ARG. */
7048
7049 static tree
7050 fold_builtin_classify_type (tree arg)
7051 {
7052 if (arg == 0)
7053 return build_int_cst (integer_type_node, no_type_class);
7054
7055 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7056 }
7057
7058 /* Fold a call to __builtin_strlen with argument ARG. */
7059
7060 static tree
7061 fold_builtin_strlen (location_t loc, tree type, tree arg)
7062 {
7063 if (!validate_arg (arg, POINTER_TYPE))
7064 return NULL_TREE;
7065 else
7066 {
7067 tree len = c_strlen (arg, 0);
7068
7069 if (len)
7070 return fold_convert_loc (loc, type, len);
7071
7072 return NULL_TREE;
7073 }
7074 }
7075
7076 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7077
7078 static tree
7079 fold_builtin_inf (location_t loc, tree type, int warn)
7080 {
7081 REAL_VALUE_TYPE real;
7082
7083 /* __builtin_inff is intended to be usable to define INFINITY on all
7084 targets. If an infinity is not available, INFINITY expands "to a
7085 positive constant of type float that overflows at translation
7086 time", footnote "In this case, using INFINITY will violate the
7087 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7088 Thus we pedwarn to ensure this constraint violation is
7089 diagnosed. */
7090 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7091 pedwarn (loc, 0, "target format does not support infinity");
7092
7093 real_inf (&real);
7094 return build_real (type, real);
7095 }
7096
7097 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7098
7099 static tree
7100 fold_builtin_nan (tree arg, tree type, int quiet)
7101 {
7102 REAL_VALUE_TYPE real;
7103 const char *str;
7104
7105 if (!validate_arg (arg, POINTER_TYPE))
7106 return NULL_TREE;
7107 str = c_getstr (arg);
7108 if (!str)
7109 return NULL_TREE;
7110
7111 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7112 return NULL_TREE;
7113
7114 return build_real (type, real);
7115 }
7116
7117 /* Return true if the floating point expression T has an integer value.
7118 We also allow +Inf, -Inf and NaN to be considered integer values. */
7119
7120 static bool
7121 integer_valued_real_p (tree t)
7122 {
7123 switch (TREE_CODE (t))
7124 {
7125 case FLOAT_EXPR:
7126 return true;
7127
7128 case ABS_EXPR:
7129 case SAVE_EXPR:
7130 return integer_valued_real_p (TREE_OPERAND (t, 0));
7131
7132 case COMPOUND_EXPR:
7133 case MODIFY_EXPR:
7134 case BIND_EXPR:
7135 return integer_valued_real_p (TREE_OPERAND (t, 1));
7136
7137 case PLUS_EXPR:
7138 case MINUS_EXPR:
7139 case MULT_EXPR:
7140 case MIN_EXPR:
7141 case MAX_EXPR:
7142 return integer_valued_real_p (TREE_OPERAND (t, 0))
7143 && integer_valued_real_p (TREE_OPERAND (t, 1));
7144
7145 case COND_EXPR:
7146 return integer_valued_real_p (TREE_OPERAND (t, 1))
7147 && integer_valued_real_p (TREE_OPERAND (t, 2));
7148
7149 case REAL_CST:
7150 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7151
7152 case NOP_EXPR:
7153 {
7154 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7155 if (TREE_CODE (type) == INTEGER_TYPE)
7156 return true;
7157 if (TREE_CODE (type) == REAL_TYPE)
7158 return integer_valued_real_p (TREE_OPERAND (t, 0));
7159 break;
7160 }
7161
7162 case CALL_EXPR:
7163 switch (builtin_mathfn_code (t))
7164 {
7165 CASE_FLT_FN (BUILT_IN_CEIL):
7166 CASE_FLT_FN (BUILT_IN_FLOOR):
7167 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7168 CASE_FLT_FN (BUILT_IN_RINT):
7169 CASE_FLT_FN (BUILT_IN_ROUND):
7170 CASE_FLT_FN (BUILT_IN_TRUNC):
7171 return true;
7172
7173 CASE_FLT_FN (BUILT_IN_FMIN):
7174 CASE_FLT_FN (BUILT_IN_FMAX):
7175 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7176 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7177
7178 default:
7179 break;
7180 }
7181 break;
7182
7183 default:
7184 break;
7185 }
7186 return false;
7187 }
7188
7189 /* FNDECL is assumed to be a builtin where truncation can be propagated
7190 across (for instance floor((double)f) == (double)floorf (f).
7191 Do the transformation for a call with argument ARG. */
7192
7193 static tree
7194 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7195 {
7196 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7197
7198 if (!validate_arg (arg, REAL_TYPE))
7199 return NULL_TREE;
7200
7201 /* Integer rounding functions are idempotent. */
7202 if (fcode == builtin_mathfn_code (arg))
7203 return arg;
7204
7205 /* If argument is already integer valued, and we don't need to worry
7206 about setting errno, there's no need to perform rounding. */
7207 if (! flag_errno_math && integer_valued_real_p (arg))
7208 return arg;
7209
7210 if (optimize)
7211 {
7212 tree arg0 = strip_float_extensions (arg);
7213 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7214 tree newtype = TREE_TYPE (arg0);
7215 tree decl;
7216
7217 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7218 && (decl = mathfn_built_in (newtype, fcode)))
7219 return fold_convert_loc (loc, ftype,
7220 build_call_expr_loc (loc, decl, 1,
7221 fold_convert_loc (loc,
7222 newtype,
7223 arg0)));
7224 }
7225 return NULL_TREE;
7226 }
7227
7228 /* FNDECL is assumed to be builtin which can narrow the FP type of
7229 the argument, for instance lround((double)f) -> lroundf (f).
7230 Do the transformation for a call with argument ARG. */
7231
7232 static tree
7233 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7234 {
7235 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7236
7237 if (!validate_arg (arg, REAL_TYPE))
7238 return NULL_TREE;
7239
7240 /* If argument is already integer valued, and we don't need to worry
7241 about setting errno, there's no need to perform rounding. */
7242 if (! flag_errno_math && integer_valued_real_p (arg))
7243 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7244 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7245
7246 if (optimize)
7247 {
7248 tree ftype = TREE_TYPE (arg);
7249 tree arg0 = strip_float_extensions (arg);
7250 tree newtype = TREE_TYPE (arg0);
7251 tree decl;
7252
7253 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7254 && (decl = mathfn_built_in (newtype, fcode)))
7255 return build_call_expr_loc (loc, decl, 1,
7256 fold_convert_loc (loc, newtype, arg0));
7257 }
7258
7259 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7260 sizeof (int) == sizeof (long). */
7261 if (TYPE_PRECISION (integer_type_node)
7262 == TYPE_PRECISION (long_integer_type_node))
7263 {
7264 tree newfn = NULL_TREE;
7265 switch (fcode)
7266 {
7267 CASE_FLT_FN (BUILT_IN_ICEIL):
7268 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7269 break;
7270
7271 CASE_FLT_FN (BUILT_IN_IFLOOR):
7272 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7273 break;
7274
7275 CASE_FLT_FN (BUILT_IN_IROUND):
7276 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7277 break;
7278
7279 CASE_FLT_FN (BUILT_IN_IRINT):
7280 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7281 break;
7282
7283 default:
7284 break;
7285 }
7286
7287 if (newfn)
7288 {
7289 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7290 return fold_convert_loc (loc,
7291 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7292 }
7293 }
7294
7295 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7296 sizeof (long long) == sizeof (long). */
7297 if (TYPE_PRECISION (long_long_integer_type_node)
7298 == TYPE_PRECISION (long_integer_type_node))
7299 {
7300 tree newfn = NULL_TREE;
7301 switch (fcode)
7302 {
7303 CASE_FLT_FN (BUILT_IN_LLCEIL):
7304 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7305 break;
7306
7307 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7308 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7309 break;
7310
7311 CASE_FLT_FN (BUILT_IN_LLROUND):
7312 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7313 break;
7314
7315 CASE_FLT_FN (BUILT_IN_LLRINT):
7316 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7317 break;
7318
7319 default:
7320 break;
7321 }
7322
7323 if (newfn)
7324 {
7325 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7326 return fold_convert_loc (loc,
7327 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7328 }
7329 }
7330
7331 return NULL_TREE;
7332 }
7333
7334 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7335 return type. Return NULL_TREE if no simplification can be made. */
7336
7337 static tree
7338 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7339 {
7340 tree res;
7341
7342 if (!validate_arg (arg, COMPLEX_TYPE)
7343 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7344 return NULL_TREE;
7345
7346 /* Calculate the result when the argument is a constant. */
7347 if (TREE_CODE (arg) == COMPLEX_CST
7348 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7349 type, mpfr_hypot)))
7350 return res;
7351
7352 if (TREE_CODE (arg) == COMPLEX_EXPR)
7353 {
7354 tree real = TREE_OPERAND (arg, 0);
7355 tree imag = TREE_OPERAND (arg, 1);
7356
7357 /* If either part is zero, cabs is fabs of the other. */
7358 if (real_zerop (real))
7359 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7360 if (real_zerop (imag))
7361 return fold_build1_loc (loc, ABS_EXPR, type, real);
7362
7363 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7364 if (flag_unsafe_math_optimizations
7365 && operand_equal_p (real, imag, OEP_PURE_SAME))
7366 {
7367 const REAL_VALUE_TYPE sqrt2_trunc
7368 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7369 STRIP_NOPS (real);
7370 return fold_build2_loc (loc, MULT_EXPR, type,
7371 fold_build1_loc (loc, ABS_EXPR, type, real),
7372 build_real (type, sqrt2_trunc));
7373 }
7374 }
7375
7376 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7377 if (TREE_CODE (arg) == NEGATE_EXPR
7378 || TREE_CODE (arg) == CONJ_EXPR)
7379 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7380
7381 /* Don't do this when optimizing for size. */
7382 if (flag_unsafe_math_optimizations
7383 && optimize && optimize_function_for_speed_p (cfun))
7384 {
7385 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7386
7387 if (sqrtfn != NULL_TREE)
7388 {
7389 tree rpart, ipart, result;
7390
7391 arg = builtin_save_expr (arg);
7392
7393 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7394 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7395
7396 rpart = builtin_save_expr (rpart);
7397 ipart = builtin_save_expr (ipart);
7398
7399 result = fold_build2_loc (loc, PLUS_EXPR, type,
7400 fold_build2_loc (loc, MULT_EXPR, type,
7401 rpart, rpart),
7402 fold_build2_loc (loc, MULT_EXPR, type,
7403 ipart, ipart));
7404
7405 return build_call_expr_loc (loc, sqrtfn, 1, result);
7406 }
7407 }
7408
7409 return NULL_TREE;
7410 }
7411
7412 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7413 complex tree type of the result. If NEG is true, the imaginary
7414 zero is negative. */
7415
7416 static tree
7417 build_complex_cproj (tree type, bool neg)
7418 {
7419 REAL_VALUE_TYPE rinf, rzero = dconst0;
7420
7421 real_inf (&rinf);
7422 rzero.sign = neg;
7423 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7424 build_real (TREE_TYPE (type), rzero));
7425 }
7426
7427 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7428 return type. Return NULL_TREE if no simplification can be made. */
7429
7430 static tree
7431 fold_builtin_cproj (location_t loc, tree arg, tree type)
7432 {
7433 if (!validate_arg (arg, COMPLEX_TYPE)
7434 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7435 return NULL_TREE;
7436
7437 /* If there are no infinities, return arg. */
7438 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7439 return non_lvalue_loc (loc, arg);
7440
7441 /* Calculate the result when the argument is a constant. */
7442 if (TREE_CODE (arg) == COMPLEX_CST)
7443 {
7444 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7445 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7446
7447 if (real_isinf (real) || real_isinf (imag))
7448 return build_complex_cproj (type, imag->sign);
7449 else
7450 return arg;
7451 }
7452 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7453 {
7454 tree real = TREE_OPERAND (arg, 0);
7455 tree imag = TREE_OPERAND (arg, 1);
7456
7457 STRIP_NOPS (real);
7458 STRIP_NOPS (imag);
7459
7460 /* If the real part is inf and the imag part is known to be
7461 nonnegative, return (inf + 0i). Remember side-effects are
7462 possible in the imag part. */
7463 if (TREE_CODE (real) == REAL_CST
7464 && real_isinf (TREE_REAL_CST_PTR (real))
7465 && tree_expr_nonnegative_p (imag))
7466 return omit_one_operand_loc (loc, type,
7467 build_complex_cproj (type, false),
7468 arg);
7469
7470 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7471 Remember side-effects are possible in the real part. */
7472 if (TREE_CODE (imag) == REAL_CST
7473 && real_isinf (TREE_REAL_CST_PTR (imag)))
7474 return
7475 omit_one_operand_loc (loc, type,
7476 build_complex_cproj (type, TREE_REAL_CST_PTR
7477 (imag)->sign), arg);
7478 }
7479
7480 return NULL_TREE;
7481 }
7482
7483 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7484 Return NULL_TREE if no simplification can be made. */
7485
7486 static tree
7487 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7488 {
7489
7490 enum built_in_function fcode;
7491 tree res;
7492
7493 if (!validate_arg (arg, REAL_TYPE))
7494 return NULL_TREE;
7495
7496 /* Calculate the result when the argument is a constant. */
7497 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7498 return res;
7499
7500 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7501 fcode = builtin_mathfn_code (arg);
7502 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7503 {
7504 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7505 arg = fold_build2_loc (loc, MULT_EXPR, type,
7506 CALL_EXPR_ARG (arg, 0),
7507 build_real (type, dconsthalf));
7508 return build_call_expr_loc (loc, expfn, 1, arg);
7509 }
7510
7511 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7512 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7513 {
7514 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7515
7516 if (powfn)
7517 {
7518 tree arg0 = CALL_EXPR_ARG (arg, 0);
7519 tree tree_root;
7520 /* The inner root was either sqrt or cbrt. */
7521 /* This was a conditional expression but it triggered a bug
7522 in Sun C 5.5. */
7523 REAL_VALUE_TYPE dconstroot;
7524 if (BUILTIN_SQRT_P (fcode))
7525 dconstroot = dconsthalf;
7526 else
7527 dconstroot = dconst_third ();
7528
7529 /* Adjust for the outer root. */
7530 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7531 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7532 tree_root = build_real (type, dconstroot);
7533 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7534 }
7535 }
7536
7537 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7538 if (flag_unsafe_math_optimizations
7539 && (fcode == BUILT_IN_POW
7540 || fcode == BUILT_IN_POWF
7541 || fcode == BUILT_IN_POWL))
7542 {
7543 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7544 tree arg0 = CALL_EXPR_ARG (arg, 0);
7545 tree arg1 = CALL_EXPR_ARG (arg, 1);
7546 tree narg1;
7547 if (!tree_expr_nonnegative_p (arg0))
7548 arg0 = build1 (ABS_EXPR, type, arg0);
7549 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7550 build_real (type, dconsthalf));
7551 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7552 }
7553
7554 return NULL_TREE;
7555 }
7556
7557 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7558 Return NULL_TREE if no simplification can be made. */
7559
7560 static tree
7561 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7562 {
7563 const enum built_in_function fcode = builtin_mathfn_code (arg);
7564 tree res;
7565
7566 if (!validate_arg (arg, REAL_TYPE))
7567 return NULL_TREE;
7568
7569 /* Calculate the result when the argument is a constant. */
7570 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7571 return res;
7572
7573 if (flag_unsafe_math_optimizations)
7574 {
7575 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7576 if (BUILTIN_EXPONENT_P (fcode))
7577 {
7578 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7579 const REAL_VALUE_TYPE third_trunc =
7580 real_value_truncate (TYPE_MODE (type), dconst_third ());
7581 arg = fold_build2_loc (loc, MULT_EXPR, type,
7582 CALL_EXPR_ARG (arg, 0),
7583 build_real (type, third_trunc));
7584 return build_call_expr_loc (loc, expfn, 1, arg);
7585 }
7586
7587 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7588 if (BUILTIN_SQRT_P (fcode))
7589 {
7590 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7591
7592 if (powfn)
7593 {
7594 tree arg0 = CALL_EXPR_ARG (arg, 0);
7595 tree tree_root;
7596 REAL_VALUE_TYPE dconstroot = dconst_third ();
7597
7598 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7599 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7600 tree_root = build_real (type, dconstroot);
7601 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7602 }
7603 }
7604
7605 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7606 if (BUILTIN_CBRT_P (fcode))
7607 {
7608 tree arg0 = CALL_EXPR_ARG (arg, 0);
7609 if (tree_expr_nonnegative_p (arg0))
7610 {
7611 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7612
7613 if (powfn)
7614 {
7615 tree tree_root;
7616 REAL_VALUE_TYPE dconstroot;
7617
7618 real_arithmetic (&dconstroot, MULT_EXPR,
7619 dconst_third_ptr (), dconst_third_ptr ());
7620 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7621 tree_root = build_real (type, dconstroot);
7622 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7623 }
7624 }
7625 }
7626
7627 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7628 if (fcode == BUILT_IN_POW
7629 || fcode == BUILT_IN_POWF
7630 || fcode == BUILT_IN_POWL)
7631 {
7632 tree arg00 = CALL_EXPR_ARG (arg, 0);
7633 tree arg01 = CALL_EXPR_ARG (arg, 1);
7634 if (tree_expr_nonnegative_p (arg00))
7635 {
7636 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7637 const REAL_VALUE_TYPE dconstroot
7638 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7639 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7640 build_real (type, dconstroot));
7641 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7642 }
7643 }
7644 }
7645 return NULL_TREE;
7646 }
7647
7648 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7649 TYPE is the type of the return value. Return NULL_TREE if no
7650 simplification can be made. */
7651
7652 static tree
7653 fold_builtin_cos (location_t loc,
7654 tree arg, tree type, tree fndecl)
7655 {
7656 tree res, narg;
7657
7658 if (!validate_arg (arg, REAL_TYPE))
7659 return NULL_TREE;
7660
7661 /* Calculate the result when the argument is a constant. */
7662 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7663 return res;
7664
7665 /* Optimize cos(-x) into cos (x). */
7666 if ((narg = fold_strip_sign_ops (arg)))
7667 return build_call_expr_loc (loc, fndecl, 1, narg);
7668
7669 return NULL_TREE;
7670 }
7671
7672 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7673 Return NULL_TREE if no simplification can be made. */
7674
7675 static tree
7676 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7677 {
7678 if (validate_arg (arg, REAL_TYPE))
7679 {
7680 tree res, narg;
7681
7682 /* Calculate the result when the argument is a constant. */
7683 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7684 return res;
7685
7686 /* Optimize cosh(-x) into cosh (x). */
7687 if ((narg = fold_strip_sign_ops (arg)))
7688 return build_call_expr_loc (loc, fndecl, 1, narg);
7689 }
7690
7691 return NULL_TREE;
7692 }
7693
7694 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7695 argument ARG. TYPE is the type of the return value. Return
7696 NULL_TREE if no simplification can be made. */
7697
7698 static tree
7699 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7700 bool hyper)
7701 {
7702 if (validate_arg (arg, COMPLEX_TYPE)
7703 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7704 {
7705 tree tmp;
7706
7707 /* Calculate the result when the argument is a constant. */
7708 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7709 return tmp;
7710
7711 /* Optimize fn(-x) into fn(x). */
7712 if ((tmp = fold_strip_sign_ops (arg)))
7713 return build_call_expr_loc (loc, fndecl, 1, tmp);
7714 }
7715
7716 return NULL_TREE;
7717 }
7718
7719 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7720 Return NULL_TREE if no simplification can be made. */
7721
7722 static tree
7723 fold_builtin_tan (tree arg, tree type)
7724 {
7725 enum built_in_function fcode;
7726 tree res;
7727
7728 if (!validate_arg (arg, REAL_TYPE))
7729 return NULL_TREE;
7730
7731 /* Calculate the result when the argument is a constant. */
7732 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7733 return res;
7734
7735 /* Optimize tan(atan(x)) = x. */
7736 fcode = builtin_mathfn_code (arg);
7737 if (flag_unsafe_math_optimizations
7738 && (fcode == BUILT_IN_ATAN
7739 || fcode == BUILT_IN_ATANF
7740 || fcode == BUILT_IN_ATANL))
7741 return CALL_EXPR_ARG (arg, 0);
7742
7743 return NULL_TREE;
7744 }
7745
7746 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7747 NULL_TREE if no simplification can be made. */
7748
7749 static tree
7750 fold_builtin_sincos (location_t loc,
7751 tree arg0, tree arg1, tree arg2)
7752 {
7753 tree type;
7754 tree res, fn, call;
7755
7756 if (!validate_arg (arg0, REAL_TYPE)
7757 || !validate_arg (arg1, POINTER_TYPE)
7758 || !validate_arg (arg2, POINTER_TYPE))
7759 return NULL_TREE;
7760
7761 type = TREE_TYPE (arg0);
7762
7763 /* Calculate the result when the argument is a constant. */
7764 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7765 return res;
7766
7767 /* Canonicalize sincos to cexpi. */
7768 if (!TARGET_C99_FUNCTIONS)
7769 return NULL_TREE;
7770 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7771 if (!fn)
7772 return NULL_TREE;
7773
7774 call = build_call_expr_loc (loc, fn, 1, arg0);
7775 call = builtin_save_expr (call);
7776
7777 return build2 (COMPOUND_EXPR, void_type_node,
7778 build2 (MODIFY_EXPR, void_type_node,
7779 build_fold_indirect_ref_loc (loc, arg1),
7780 build1 (IMAGPART_EXPR, type, call)),
7781 build2 (MODIFY_EXPR, void_type_node,
7782 build_fold_indirect_ref_loc (loc, arg2),
7783 build1 (REALPART_EXPR, type, call)));
7784 }
7785
7786 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7787 NULL_TREE if no simplification can be made. */
7788
7789 static tree
7790 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7791 {
7792 tree rtype;
7793 tree realp, imagp, ifn;
7794 tree res;
7795
7796 if (!validate_arg (arg0, COMPLEX_TYPE)
7797 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7798 return NULL_TREE;
7799
7800 /* Calculate the result when the argument is a constant. */
7801 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7802 return res;
7803
7804 rtype = TREE_TYPE (TREE_TYPE (arg0));
7805
7806 /* In case we can figure out the real part of arg0 and it is constant zero
7807 fold to cexpi. */
7808 if (!TARGET_C99_FUNCTIONS)
7809 return NULL_TREE;
7810 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7811 if (!ifn)
7812 return NULL_TREE;
7813
7814 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7815 && real_zerop (realp))
7816 {
7817 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7818 return build_call_expr_loc (loc, ifn, 1, narg);
7819 }
7820
7821 /* In case we can easily decompose real and imaginary parts split cexp
7822 to exp (r) * cexpi (i). */
7823 if (flag_unsafe_math_optimizations
7824 && realp)
7825 {
7826 tree rfn, rcall, icall;
7827
7828 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7829 if (!rfn)
7830 return NULL_TREE;
7831
7832 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7833 if (!imagp)
7834 return NULL_TREE;
7835
7836 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7837 icall = builtin_save_expr (icall);
7838 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7839 rcall = builtin_save_expr (rcall);
7840 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7841 fold_build2_loc (loc, MULT_EXPR, rtype,
7842 rcall,
7843 fold_build1_loc (loc, REALPART_EXPR,
7844 rtype, icall)),
7845 fold_build2_loc (loc, MULT_EXPR, rtype,
7846 rcall,
7847 fold_build1_loc (loc, IMAGPART_EXPR,
7848 rtype, icall)));
7849 }
7850
7851 return NULL_TREE;
7852 }
7853
7854 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7855 Return NULL_TREE if no simplification can be made. */
7856
7857 static tree
7858 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7859 {
7860 if (!validate_arg (arg, REAL_TYPE))
7861 return NULL_TREE;
7862
7863 /* Optimize trunc of constant value. */
7864 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7865 {
7866 REAL_VALUE_TYPE r, x;
7867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7868
7869 x = TREE_REAL_CST (arg);
7870 real_trunc (&r, TYPE_MODE (type), &x);
7871 return build_real (type, r);
7872 }
7873
7874 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7875 }
7876
7877 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7878 Return NULL_TREE if no simplification can be made. */
7879
7880 static tree
7881 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7882 {
7883 if (!validate_arg (arg, REAL_TYPE))
7884 return NULL_TREE;
7885
7886 /* Optimize floor of constant value. */
7887 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7888 {
7889 REAL_VALUE_TYPE x;
7890
7891 x = TREE_REAL_CST (arg);
7892 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7893 {
7894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7895 REAL_VALUE_TYPE r;
7896
7897 real_floor (&r, TYPE_MODE (type), &x);
7898 return build_real (type, r);
7899 }
7900 }
7901
7902 /* Fold floor (x) where x is nonnegative to trunc (x). */
7903 if (tree_expr_nonnegative_p (arg))
7904 {
7905 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7906 if (truncfn)
7907 return build_call_expr_loc (loc, truncfn, 1, arg);
7908 }
7909
7910 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7911 }
7912
7913 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7914 Return NULL_TREE if no simplification can be made. */
7915
7916 static tree
7917 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7918 {
7919 if (!validate_arg (arg, REAL_TYPE))
7920 return NULL_TREE;
7921
7922 /* Optimize ceil of constant value. */
7923 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7924 {
7925 REAL_VALUE_TYPE x;
7926
7927 x = TREE_REAL_CST (arg);
7928 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7929 {
7930 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7931 REAL_VALUE_TYPE r;
7932
7933 real_ceil (&r, TYPE_MODE (type), &x);
7934 return build_real (type, r);
7935 }
7936 }
7937
7938 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7939 }
7940
7941 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7942 Return NULL_TREE if no simplification can be made. */
7943
7944 static tree
7945 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7946 {
7947 if (!validate_arg (arg, REAL_TYPE))
7948 return NULL_TREE;
7949
7950 /* Optimize round of constant value. */
7951 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7952 {
7953 REAL_VALUE_TYPE x;
7954
7955 x = TREE_REAL_CST (arg);
7956 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7957 {
7958 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7959 REAL_VALUE_TYPE r;
7960
7961 real_round (&r, TYPE_MODE (type), &x);
7962 return build_real (type, r);
7963 }
7964 }
7965
7966 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7967 }
7968
7969 /* Fold function call to builtin lround, lroundf or lroundl (or the
7970 corresponding long long versions) and other rounding functions. ARG
7971 is the argument to the call. Return NULL_TREE if no simplification
7972 can be made. */
7973
7974 static tree
7975 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7976 {
7977 if (!validate_arg (arg, REAL_TYPE))
7978 return NULL_TREE;
7979
7980 /* Optimize lround of constant value. */
7981 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7982 {
7983 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7984
7985 if (real_isfinite (&x))
7986 {
7987 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7988 tree ftype = TREE_TYPE (arg);
7989 double_int val;
7990 REAL_VALUE_TYPE r;
7991
7992 switch (DECL_FUNCTION_CODE (fndecl))
7993 {
7994 CASE_FLT_FN (BUILT_IN_IFLOOR):
7995 CASE_FLT_FN (BUILT_IN_LFLOOR):
7996 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7997 real_floor (&r, TYPE_MODE (ftype), &x);
7998 break;
7999
8000 CASE_FLT_FN (BUILT_IN_ICEIL):
8001 CASE_FLT_FN (BUILT_IN_LCEIL):
8002 CASE_FLT_FN (BUILT_IN_LLCEIL):
8003 real_ceil (&r, TYPE_MODE (ftype), &x);
8004 break;
8005
8006 CASE_FLT_FN (BUILT_IN_IROUND):
8007 CASE_FLT_FN (BUILT_IN_LROUND):
8008 CASE_FLT_FN (BUILT_IN_LLROUND):
8009 real_round (&r, TYPE_MODE (ftype), &x);
8010 break;
8011
8012 default:
8013 gcc_unreachable ();
8014 }
8015
8016 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8017 if (double_int_fits_to_tree_p (itype, val))
8018 return double_int_to_tree (itype, val);
8019 }
8020 }
8021
8022 switch (DECL_FUNCTION_CODE (fndecl))
8023 {
8024 CASE_FLT_FN (BUILT_IN_LFLOOR):
8025 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8026 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8027 if (tree_expr_nonnegative_p (arg))
8028 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8029 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8030 break;
8031 default:;
8032 }
8033
8034 return fold_fixed_mathfn (loc, fndecl, arg);
8035 }
8036
8037 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8038 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8039 the argument to the call. Return NULL_TREE if no simplification can
8040 be made. */
8041
8042 static tree
8043 fold_builtin_bitop (tree fndecl, tree arg)
8044 {
8045 if (!validate_arg (arg, INTEGER_TYPE))
8046 return NULL_TREE;
8047
8048 /* Optimize for constant argument. */
8049 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8050 {
8051 HOST_WIDE_INT hi, width, result;
8052 unsigned HOST_WIDE_INT lo;
8053 tree type;
8054
8055 type = TREE_TYPE (arg);
8056 width = TYPE_PRECISION (type);
8057 lo = TREE_INT_CST_LOW (arg);
8058
8059 /* Clear all the bits that are beyond the type's precision. */
8060 if (width > HOST_BITS_PER_WIDE_INT)
8061 {
8062 hi = TREE_INT_CST_HIGH (arg);
8063 if (width < HOST_BITS_PER_DOUBLE_INT)
8064 hi &= ~((unsigned HOST_WIDE_INT) (-1)
8065 << (width - HOST_BITS_PER_WIDE_INT));
8066 }
8067 else
8068 {
8069 hi = 0;
8070 if (width < HOST_BITS_PER_WIDE_INT)
8071 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8072 }
8073
8074 switch (DECL_FUNCTION_CODE (fndecl))
8075 {
8076 CASE_INT_FN (BUILT_IN_FFS):
8077 if (lo != 0)
8078 result = ffs_hwi (lo);
8079 else if (hi != 0)
8080 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8081 else
8082 result = 0;
8083 break;
8084
8085 CASE_INT_FN (BUILT_IN_CLZ):
8086 if (hi != 0)
8087 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8088 else if (lo != 0)
8089 result = width - floor_log2 (lo) - 1;
8090 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8091 result = width;
8092 break;
8093
8094 CASE_INT_FN (BUILT_IN_CTZ):
8095 if (lo != 0)
8096 result = ctz_hwi (lo);
8097 else if (hi != 0)
8098 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8099 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8100 result = width;
8101 break;
8102
8103 CASE_INT_FN (BUILT_IN_CLRSB):
8104 if (width > HOST_BITS_PER_WIDE_INT
8105 && (hi & ((unsigned HOST_WIDE_INT) 1
8106 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8107 {
8108 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8109 << (width - HOST_BITS_PER_WIDE_INT - 1));
8110 lo = ~lo;
8111 }
8112 else if (width <= HOST_BITS_PER_WIDE_INT
8113 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8114 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8115 if (hi != 0)
8116 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8117 else if (lo != 0)
8118 result = width - floor_log2 (lo) - 2;
8119 else
8120 result = width - 1;
8121 break;
8122
8123 CASE_INT_FN (BUILT_IN_POPCOUNT):
8124 result = 0;
8125 while (lo)
8126 result++, lo &= lo - 1;
8127 while (hi)
8128 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8129 break;
8130
8131 CASE_INT_FN (BUILT_IN_PARITY):
8132 result = 0;
8133 while (lo)
8134 result++, lo &= lo - 1;
8135 while (hi)
8136 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8137 result &= 1;
8138 break;
8139
8140 default:
8141 gcc_unreachable ();
8142 }
8143
8144 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8145 }
8146
8147 return NULL_TREE;
8148 }
8149
8150 /* Fold function call to builtin_bswap and the short, long and long long
8151 variants. Return NULL_TREE if no simplification can be made. */
8152 static tree
8153 fold_builtin_bswap (tree fndecl, tree arg)
8154 {
8155 if (! validate_arg (arg, INTEGER_TYPE))
8156 return NULL_TREE;
8157
8158 /* Optimize constant value. */
8159 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8160 {
8161 HOST_WIDE_INT hi, width, r_hi = 0;
8162 unsigned HOST_WIDE_INT lo, r_lo = 0;
8163 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8164
8165 width = TYPE_PRECISION (type);
8166 lo = TREE_INT_CST_LOW (arg);
8167 hi = TREE_INT_CST_HIGH (arg);
8168
8169 switch (DECL_FUNCTION_CODE (fndecl))
8170 {
8171 case BUILT_IN_BSWAP16:
8172 case BUILT_IN_BSWAP32:
8173 case BUILT_IN_BSWAP64:
8174 {
8175 int s;
8176
8177 for (s = 0; s < width; s += 8)
8178 {
8179 int d = width - s - 8;
8180 unsigned HOST_WIDE_INT byte;
8181
8182 if (s < HOST_BITS_PER_WIDE_INT)
8183 byte = (lo >> s) & 0xff;
8184 else
8185 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8186
8187 if (d < HOST_BITS_PER_WIDE_INT)
8188 r_lo |= byte << d;
8189 else
8190 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8191 }
8192 }
8193
8194 break;
8195
8196 default:
8197 gcc_unreachable ();
8198 }
8199
8200 if (width < HOST_BITS_PER_WIDE_INT)
8201 return build_int_cst (type, r_lo);
8202 else
8203 return build_int_cst_wide (type, r_lo, r_hi);
8204 }
8205
8206 return NULL_TREE;
8207 }
8208
8209 /* A subroutine of fold_builtin to fold the various logarithmic
8210 functions. Return NULL_TREE if no simplification can me made.
8211 FUNC is the corresponding MPFR logarithm function. */
8212
8213 static tree
8214 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8215 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8216 {
8217 if (validate_arg (arg, REAL_TYPE))
8218 {
8219 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8220 tree res;
8221 const enum built_in_function fcode = builtin_mathfn_code (arg);
8222
8223 /* Calculate the result when the argument is a constant. */
8224 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8225 return res;
8226
8227 /* Special case, optimize logN(expN(x)) = x. */
8228 if (flag_unsafe_math_optimizations
8229 && ((func == mpfr_log
8230 && (fcode == BUILT_IN_EXP
8231 || fcode == BUILT_IN_EXPF
8232 || fcode == BUILT_IN_EXPL))
8233 || (func == mpfr_log2
8234 && (fcode == BUILT_IN_EXP2
8235 || fcode == BUILT_IN_EXP2F
8236 || fcode == BUILT_IN_EXP2L))
8237 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8238 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8239
8240 /* Optimize logN(func()) for various exponential functions. We
8241 want to determine the value "x" and the power "exponent" in
8242 order to transform logN(x**exponent) into exponent*logN(x). */
8243 if (flag_unsafe_math_optimizations)
8244 {
8245 tree exponent = 0, x = 0;
8246
8247 switch (fcode)
8248 {
8249 CASE_FLT_FN (BUILT_IN_EXP):
8250 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8251 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8252 dconst_e ()));
8253 exponent = CALL_EXPR_ARG (arg, 0);
8254 break;
8255 CASE_FLT_FN (BUILT_IN_EXP2):
8256 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8257 x = build_real (type, dconst2);
8258 exponent = CALL_EXPR_ARG (arg, 0);
8259 break;
8260 CASE_FLT_FN (BUILT_IN_EXP10):
8261 CASE_FLT_FN (BUILT_IN_POW10):
8262 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8263 {
8264 REAL_VALUE_TYPE dconst10;
8265 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8266 x = build_real (type, dconst10);
8267 }
8268 exponent = CALL_EXPR_ARG (arg, 0);
8269 break;
8270 CASE_FLT_FN (BUILT_IN_SQRT):
8271 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8272 x = CALL_EXPR_ARG (arg, 0);
8273 exponent = build_real (type, dconsthalf);
8274 break;
8275 CASE_FLT_FN (BUILT_IN_CBRT):
8276 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8277 x = CALL_EXPR_ARG (arg, 0);
8278 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8279 dconst_third ()));
8280 break;
8281 CASE_FLT_FN (BUILT_IN_POW):
8282 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8283 x = CALL_EXPR_ARG (arg, 0);
8284 exponent = CALL_EXPR_ARG (arg, 1);
8285 break;
8286 default:
8287 break;
8288 }
8289
8290 /* Now perform the optimization. */
8291 if (x && exponent)
8292 {
8293 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8294 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8295 }
8296 }
8297 }
8298
8299 return NULL_TREE;
8300 }
8301
8302 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8303 NULL_TREE if no simplification can be made. */
8304
8305 static tree
8306 fold_builtin_hypot (location_t loc, tree fndecl,
8307 tree arg0, tree arg1, tree type)
8308 {
8309 tree res, narg0, narg1;
8310
8311 if (!validate_arg (arg0, REAL_TYPE)
8312 || !validate_arg (arg1, REAL_TYPE))
8313 return NULL_TREE;
8314
8315 /* Calculate the result when the argument is a constant. */
8316 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8317 return res;
8318
8319 /* If either argument to hypot has a negate or abs, strip that off.
8320 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8321 narg0 = fold_strip_sign_ops (arg0);
8322 narg1 = fold_strip_sign_ops (arg1);
8323 if (narg0 || narg1)
8324 {
8325 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8326 narg1 ? narg1 : arg1);
8327 }
8328
8329 /* If either argument is zero, hypot is fabs of the other. */
8330 if (real_zerop (arg0))
8331 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8332 else if (real_zerop (arg1))
8333 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8334
8335 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8336 if (flag_unsafe_math_optimizations
8337 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8338 {
8339 const REAL_VALUE_TYPE sqrt2_trunc
8340 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8341 return fold_build2_loc (loc, MULT_EXPR, type,
8342 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8343 build_real (type, sqrt2_trunc));
8344 }
8345
8346 return NULL_TREE;
8347 }
8348
8349
8350 /* Fold a builtin function call to pow, powf, or powl. Return
8351 NULL_TREE if no simplification can be made. */
8352 static tree
8353 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8354 {
8355 tree res;
8356
8357 if (!validate_arg (arg0, REAL_TYPE)
8358 || !validate_arg (arg1, REAL_TYPE))
8359 return NULL_TREE;
8360
8361 /* Calculate the result when the argument is a constant. */
8362 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8363 return res;
8364
8365 /* Optimize pow(1.0,y) = 1.0. */
8366 if (real_onep (arg0))
8367 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8368
8369 if (TREE_CODE (arg1) == REAL_CST
8370 && !TREE_OVERFLOW (arg1))
8371 {
8372 REAL_VALUE_TYPE cint;
8373 REAL_VALUE_TYPE c;
8374 HOST_WIDE_INT n;
8375
8376 c = TREE_REAL_CST (arg1);
8377
8378 /* Optimize pow(x,0.0) = 1.0. */
8379 if (REAL_VALUES_EQUAL (c, dconst0))
8380 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8381 arg0);
8382
8383 /* Optimize pow(x,1.0) = x. */
8384 if (REAL_VALUES_EQUAL (c, dconst1))
8385 return arg0;
8386
8387 /* Optimize pow(x,-1.0) = 1.0/x. */
8388 if (REAL_VALUES_EQUAL (c, dconstm1))
8389 return fold_build2_loc (loc, RDIV_EXPR, type,
8390 build_real (type, dconst1), arg0);
8391
8392 /* Optimize pow(x,0.5) = sqrt(x). */
8393 if (flag_unsafe_math_optimizations
8394 && REAL_VALUES_EQUAL (c, dconsthalf))
8395 {
8396 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8397
8398 if (sqrtfn != NULL_TREE)
8399 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8400 }
8401
8402 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8403 if (flag_unsafe_math_optimizations)
8404 {
8405 const REAL_VALUE_TYPE dconstroot
8406 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8407
8408 if (REAL_VALUES_EQUAL (c, dconstroot))
8409 {
8410 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8411 if (cbrtfn != NULL_TREE)
8412 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8413 }
8414 }
8415
8416 /* Check for an integer exponent. */
8417 n = real_to_integer (&c);
8418 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8419 if (real_identical (&c, &cint))
8420 {
8421 /* Attempt to evaluate pow at compile-time, unless this should
8422 raise an exception. */
8423 if (TREE_CODE (arg0) == REAL_CST
8424 && !TREE_OVERFLOW (arg0)
8425 && (n > 0
8426 || (!flag_trapping_math && !flag_errno_math)
8427 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8428 {
8429 REAL_VALUE_TYPE x;
8430 bool inexact;
8431
8432 x = TREE_REAL_CST (arg0);
8433 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8434 if (flag_unsafe_math_optimizations || !inexact)
8435 return build_real (type, x);
8436 }
8437
8438 /* Strip sign ops from even integer powers. */
8439 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8440 {
8441 tree narg0 = fold_strip_sign_ops (arg0);
8442 if (narg0)
8443 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8444 }
8445 }
8446 }
8447
8448 if (flag_unsafe_math_optimizations)
8449 {
8450 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8451
8452 /* Optimize pow(expN(x),y) = expN(x*y). */
8453 if (BUILTIN_EXPONENT_P (fcode))
8454 {
8455 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8456 tree arg = CALL_EXPR_ARG (arg0, 0);
8457 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8458 return build_call_expr_loc (loc, expfn, 1, arg);
8459 }
8460
8461 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8462 if (BUILTIN_SQRT_P (fcode))
8463 {
8464 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8465 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8466 build_real (type, dconsthalf));
8467 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8468 }
8469
8470 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8471 if (BUILTIN_CBRT_P (fcode))
8472 {
8473 tree arg = CALL_EXPR_ARG (arg0, 0);
8474 if (tree_expr_nonnegative_p (arg))
8475 {
8476 const REAL_VALUE_TYPE dconstroot
8477 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8478 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8479 build_real (type, dconstroot));
8480 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8481 }
8482 }
8483
8484 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8485 if (fcode == BUILT_IN_POW
8486 || fcode == BUILT_IN_POWF
8487 || fcode == BUILT_IN_POWL)
8488 {
8489 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8490 if (tree_expr_nonnegative_p (arg00))
8491 {
8492 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8493 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8494 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8495 }
8496 }
8497 }
8498
8499 return NULL_TREE;
8500 }
8501
8502 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8503 Return NULL_TREE if no simplification can be made. */
8504 static tree
8505 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8506 tree arg0, tree arg1, tree type)
8507 {
8508 if (!validate_arg (arg0, REAL_TYPE)
8509 || !validate_arg (arg1, INTEGER_TYPE))
8510 return NULL_TREE;
8511
8512 /* Optimize pow(1.0,y) = 1.0. */
8513 if (real_onep (arg0))
8514 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8515
8516 if (host_integerp (arg1, 0))
8517 {
8518 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8519
8520 /* Evaluate powi at compile-time. */
8521 if (TREE_CODE (arg0) == REAL_CST
8522 && !TREE_OVERFLOW (arg0))
8523 {
8524 REAL_VALUE_TYPE x;
8525 x = TREE_REAL_CST (arg0);
8526 real_powi (&x, TYPE_MODE (type), &x, c);
8527 return build_real (type, x);
8528 }
8529
8530 /* Optimize pow(x,0) = 1.0. */
8531 if (c == 0)
8532 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8533 arg0);
8534
8535 /* Optimize pow(x,1) = x. */
8536 if (c == 1)
8537 return arg0;
8538
8539 /* Optimize pow(x,-1) = 1.0/x. */
8540 if (c == -1)
8541 return fold_build2_loc (loc, RDIV_EXPR, type,
8542 build_real (type, dconst1), arg0);
8543 }
8544
8545 return NULL_TREE;
8546 }
8547
8548 /* A subroutine of fold_builtin to fold the various exponent
8549 functions. Return NULL_TREE if no simplification can be made.
8550 FUNC is the corresponding MPFR exponent function. */
8551
8552 static tree
8553 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8554 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8555 {
8556 if (validate_arg (arg, REAL_TYPE))
8557 {
8558 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8559 tree res;
8560
8561 /* Calculate the result when the argument is a constant. */
8562 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8563 return res;
8564
8565 /* Optimize expN(logN(x)) = x. */
8566 if (flag_unsafe_math_optimizations)
8567 {
8568 const enum built_in_function fcode = builtin_mathfn_code (arg);
8569
8570 if ((func == mpfr_exp
8571 && (fcode == BUILT_IN_LOG
8572 || fcode == BUILT_IN_LOGF
8573 || fcode == BUILT_IN_LOGL))
8574 || (func == mpfr_exp2
8575 && (fcode == BUILT_IN_LOG2
8576 || fcode == BUILT_IN_LOG2F
8577 || fcode == BUILT_IN_LOG2L))
8578 || (func == mpfr_exp10
8579 && (fcode == BUILT_IN_LOG10
8580 || fcode == BUILT_IN_LOG10F
8581 || fcode == BUILT_IN_LOG10L)))
8582 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8583 }
8584 }
8585
8586 return NULL_TREE;
8587 }
8588
8589 /* Return true if VAR is a VAR_DECL or a component thereof. */
8590
8591 static bool
8592 var_decl_component_p (tree var)
8593 {
8594 tree inner = var;
8595 while (handled_component_p (inner))
8596 inner = TREE_OPERAND (inner, 0);
8597 return SSA_VAR_P (inner);
8598 }
8599
8600 /* Fold function call to builtin memset. Return
8601 NULL_TREE if no simplification can be made. */
8602
8603 static tree
8604 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8605 tree type, bool ignore)
8606 {
8607 tree var, ret, etype;
8608 unsigned HOST_WIDE_INT length, cval;
8609
8610 if (! validate_arg (dest, POINTER_TYPE)
8611 || ! validate_arg (c, INTEGER_TYPE)
8612 || ! validate_arg (len, INTEGER_TYPE))
8613 return NULL_TREE;
8614
8615 if (! host_integerp (len, 1))
8616 return NULL_TREE;
8617
8618 /* If the LEN parameter is zero, return DEST. */
8619 if (integer_zerop (len))
8620 return omit_one_operand_loc (loc, type, dest, c);
8621
8622 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8623 return NULL_TREE;
8624
8625 var = dest;
8626 STRIP_NOPS (var);
8627 if (TREE_CODE (var) != ADDR_EXPR)
8628 return NULL_TREE;
8629
8630 var = TREE_OPERAND (var, 0);
8631 if (TREE_THIS_VOLATILE (var))
8632 return NULL_TREE;
8633
8634 etype = TREE_TYPE (var);
8635 if (TREE_CODE (etype) == ARRAY_TYPE)
8636 etype = TREE_TYPE (etype);
8637
8638 if (!INTEGRAL_TYPE_P (etype)
8639 && !POINTER_TYPE_P (etype))
8640 return NULL_TREE;
8641
8642 if (! var_decl_component_p (var))
8643 return NULL_TREE;
8644
8645 length = tree_low_cst (len, 1);
8646 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8647 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8648 return NULL_TREE;
8649
8650 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8651 return NULL_TREE;
8652
8653 if (integer_zerop (c))
8654 cval = 0;
8655 else
8656 {
8657 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8658 return NULL_TREE;
8659
8660 cval = TREE_INT_CST_LOW (c);
8661 cval &= 0xff;
8662 cval |= cval << 8;
8663 cval |= cval << 16;
8664 cval |= (cval << 31) << 1;
8665 }
8666
8667 ret = build_int_cst_type (etype, cval);
8668 var = build_fold_indirect_ref_loc (loc,
8669 fold_convert_loc (loc,
8670 build_pointer_type (etype),
8671 dest));
8672 ret = build2 (MODIFY_EXPR, etype, var, ret);
8673 if (ignore)
8674 return ret;
8675
8676 return omit_one_operand_loc (loc, type, dest, ret);
8677 }
8678
8679 /* Fold function call to builtin memset. Return
8680 NULL_TREE if no simplification can be made. */
8681
8682 static tree
8683 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8684 {
8685 if (! validate_arg (dest, POINTER_TYPE)
8686 || ! validate_arg (size, INTEGER_TYPE))
8687 return NULL_TREE;
8688
8689 if (!ignore)
8690 return NULL_TREE;
8691
8692 /* New argument list transforming bzero(ptr x, int y) to
8693 memset(ptr x, int 0, size_t y). This is done this way
8694 so that if it isn't expanded inline, we fallback to
8695 calling bzero instead of memset. */
8696
8697 return fold_builtin_memset (loc, dest, integer_zero_node,
8698 fold_convert_loc (loc, size_type_node, size),
8699 void_type_node, ignore);
8700 }
8701
8702 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8703 NULL_TREE if no simplification can be made.
8704 If ENDP is 0, return DEST (like memcpy).
8705 If ENDP is 1, return DEST+LEN (like mempcpy).
8706 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8707 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8708 (memmove). */
8709
8710 static tree
8711 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8712 tree len, tree type, bool ignore, int endp)
8713 {
8714 tree destvar, srcvar, expr;
8715
8716 if (! validate_arg (dest, POINTER_TYPE)
8717 || ! validate_arg (src, POINTER_TYPE)
8718 || ! validate_arg (len, INTEGER_TYPE))
8719 return NULL_TREE;
8720
8721 /* If the LEN parameter is zero, return DEST. */
8722 if (integer_zerop (len))
8723 return omit_one_operand_loc (loc, type, dest, src);
8724
8725 /* If SRC and DEST are the same (and not volatile), return
8726 DEST{,+LEN,+LEN-1}. */
8727 if (operand_equal_p (src, dest, 0))
8728 expr = len;
8729 else
8730 {
8731 tree srctype, desttype;
8732 unsigned int src_align, dest_align;
8733 tree off0;
8734
8735 if (endp == 3)
8736 {
8737 src_align = get_pointer_alignment (src);
8738 dest_align = get_pointer_alignment (dest);
8739
8740 /* Both DEST and SRC must be pointer types.
8741 ??? This is what old code did. Is the testing for pointer types
8742 really mandatory?
8743
8744 If either SRC is readonly or length is 1, we can use memcpy. */
8745 if (!dest_align || !src_align)
8746 return NULL_TREE;
8747 if (readonly_data_expr (src)
8748 || (host_integerp (len, 1)
8749 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8750 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8751 {
8752 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8753 if (!fn)
8754 return NULL_TREE;
8755 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8756 }
8757
8758 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8759 if (TREE_CODE (src) == ADDR_EXPR
8760 && TREE_CODE (dest) == ADDR_EXPR)
8761 {
8762 tree src_base, dest_base, fn;
8763 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8764 HOST_WIDE_INT size = -1;
8765 HOST_WIDE_INT maxsize = -1;
8766
8767 srcvar = TREE_OPERAND (src, 0);
8768 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8769 &size, &maxsize);
8770 destvar = TREE_OPERAND (dest, 0);
8771 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8772 &size, &maxsize);
8773 if (host_integerp (len, 1))
8774 maxsize = tree_low_cst (len, 1);
8775 else
8776 maxsize = -1;
8777 src_offset /= BITS_PER_UNIT;
8778 dest_offset /= BITS_PER_UNIT;
8779 if (SSA_VAR_P (src_base)
8780 && SSA_VAR_P (dest_base))
8781 {
8782 if (operand_equal_p (src_base, dest_base, 0)
8783 && ranges_overlap_p (src_offset, maxsize,
8784 dest_offset, maxsize))
8785 return NULL_TREE;
8786 }
8787 else if (TREE_CODE (src_base) == MEM_REF
8788 && TREE_CODE (dest_base) == MEM_REF)
8789 {
8790 double_int off;
8791 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8792 TREE_OPERAND (dest_base, 0), 0))
8793 return NULL_TREE;
8794 off = double_int_add (mem_ref_offset (src_base),
8795 shwi_to_double_int (src_offset));
8796 if (!double_int_fits_in_shwi_p (off))
8797 return NULL_TREE;
8798 src_offset = off.low;
8799 off = double_int_add (mem_ref_offset (dest_base),
8800 shwi_to_double_int (dest_offset));
8801 if (!double_int_fits_in_shwi_p (off))
8802 return NULL_TREE;
8803 dest_offset = off.low;
8804 if (ranges_overlap_p (src_offset, maxsize,
8805 dest_offset, maxsize))
8806 return NULL_TREE;
8807 }
8808 else
8809 return NULL_TREE;
8810
8811 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8812 if (!fn)
8813 return NULL_TREE;
8814 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8815 }
8816
8817 /* If the destination and source do not alias optimize into
8818 memcpy as well. */
8819 if ((is_gimple_min_invariant (dest)
8820 || TREE_CODE (dest) == SSA_NAME)
8821 && (is_gimple_min_invariant (src)
8822 || TREE_CODE (src) == SSA_NAME))
8823 {
8824 ao_ref destr, srcr;
8825 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8826 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8827 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8828 {
8829 tree fn;
8830 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8831 if (!fn)
8832 return NULL_TREE;
8833 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8834 }
8835 }
8836
8837 return NULL_TREE;
8838 }
8839
8840 if (!host_integerp (len, 0))
8841 return NULL_TREE;
8842 /* FIXME:
8843 This logic lose for arguments like (type *)malloc (sizeof (type)),
8844 since we strip the casts of up to VOID return value from malloc.
8845 Perhaps we ought to inherit type from non-VOID argument here? */
8846 STRIP_NOPS (src);
8847 STRIP_NOPS (dest);
8848 if (!POINTER_TYPE_P (TREE_TYPE (src))
8849 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8850 return NULL_TREE;
8851 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8852 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8853 {
8854 tree tem = TREE_OPERAND (src, 0);
8855 STRIP_NOPS (tem);
8856 if (tem != TREE_OPERAND (src, 0))
8857 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8858 }
8859 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8860 {
8861 tree tem = TREE_OPERAND (dest, 0);
8862 STRIP_NOPS (tem);
8863 if (tem != TREE_OPERAND (dest, 0))
8864 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8865 }
8866 srctype = TREE_TYPE (TREE_TYPE (src));
8867 if (TREE_CODE (srctype) == ARRAY_TYPE
8868 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8869 {
8870 srctype = TREE_TYPE (srctype);
8871 STRIP_NOPS (src);
8872 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8873 }
8874 desttype = TREE_TYPE (TREE_TYPE (dest));
8875 if (TREE_CODE (desttype) == ARRAY_TYPE
8876 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8877 {
8878 desttype = TREE_TYPE (desttype);
8879 STRIP_NOPS (dest);
8880 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8881 }
8882 if (TREE_ADDRESSABLE (srctype)
8883 || TREE_ADDRESSABLE (desttype))
8884 return NULL_TREE;
8885
8886 src_align = get_pointer_alignment (src);
8887 dest_align = get_pointer_alignment (dest);
8888 if (dest_align < TYPE_ALIGN (desttype)
8889 || src_align < TYPE_ALIGN (srctype))
8890 return NULL_TREE;
8891
8892 if (!ignore)
8893 dest = builtin_save_expr (dest);
8894
8895 /* Build accesses at offset zero with a ref-all character type. */
8896 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8897 ptr_mode, true), 0);
8898
8899 destvar = dest;
8900 STRIP_NOPS (destvar);
8901 if (TREE_CODE (destvar) == ADDR_EXPR
8902 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8903 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8904 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8905 else
8906 destvar = NULL_TREE;
8907
8908 srcvar = src;
8909 STRIP_NOPS (srcvar);
8910 if (TREE_CODE (srcvar) == ADDR_EXPR
8911 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8912 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8913 {
8914 if (!destvar
8915 || src_align >= TYPE_ALIGN (desttype))
8916 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8917 srcvar, off0);
8918 else if (!STRICT_ALIGNMENT)
8919 {
8920 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8921 src_align);
8922 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8923 }
8924 else
8925 srcvar = NULL_TREE;
8926 }
8927 else
8928 srcvar = NULL_TREE;
8929
8930 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8931 return NULL_TREE;
8932
8933 if (srcvar == NULL_TREE)
8934 {
8935 STRIP_NOPS (src);
8936 if (src_align >= TYPE_ALIGN (desttype))
8937 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8938 else
8939 {
8940 if (STRICT_ALIGNMENT)
8941 return NULL_TREE;
8942 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8943 src_align);
8944 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8945 }
8946 }
8947 else if (destvar == NULL_TREE)
8948 {
8949 STRIP_NOPS (dest);
8950 if (dest_align >= TYPE_ALIGN (srctype))
8951 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8952 else
8953 {
8954 if (STRICT_ALIGNMENT)
8955 return NULL_TREE;
8956 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8957 dest_align);
8958 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8959 }
8960 }
8961
8962 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8963 }
8964
8965 if (ignore)
8966 return expr;
8967
8968 if (endp == 0 || endp == 3)
8969 return omit_one_operand_loc (loc, type, dest, expr);
8970
8971 if (expr == len)
8972 expr = NULL_TREE;
8973
8974 if (endp == 2)
8975 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8976 ssize_int (1));
8977
8978 dest = fold_build_pointer_plus_loc (loc, dest, len);
8979 dest = fold_convert_loc (loc, type, dest);
8980 if (expr)
8981 dest = omit_one_operand_loc (loc, type, dest, expr);
8982 return dest;
8983 }
8984
8985 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8986 If LEN is not NULL, it represents the length of the string to be
8987 copied. Return NULL_TREE if no simplification can be made. */
8988
8989 tree
8990 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8991 {
8992 tree fn;
8993
8994 if (!validate_arg (dest, POINTER_TYPE)
8995 || !validate_arg (src, POINTER_TYPE))
8996 return NULL_TREE;
8997
8998 /* If SRC and DEST are the same (and not volatile), return DEST. */
8999 if (operand_equal_p (src, dest, 0))
9000 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9001
9002 if (optimize_function_for_size_p (cfun))
9003 return NULL_TREE;
9004
9005 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9006 if (!fn)
9007 return NULL_TREE;
9008
9009 if (!len)
9010 {
9011 len = c_strlen (src, 1);
9012 if (! len || TREE_SIDE_EFFECTS (len))
9013 return NULL_TREE;
9014 }
9015
9016 len = fold_convert_loc (loc, size_type_node, len);
9017 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9018 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9019 build_call_expr_loc (loc, fn, 3, dest, src, len));
9020 }
9021
9022 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9023 Return NULL_TREE if no simplification can be made. */
9024
9025 static tree
9026 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9027 {
9028 tree fn, len, lenp1, call, type;
9029
9030 if (!validate_arg (dest, POINTER_TYPE)
9031 || !validate_arg (src, POINTER_TYPE))
9032 return NULL_TREE;
9033
9034 len = c_strlen (src, 1);
9035 if (!len
9036 || TREE_CODE (len) != INTEGER_CST)
9037 return NULL_TREE;
9038
9039 if (optimize_function_for_size_p (cfun)
9040 /* If length is zero it's small enough. */
9041 && !integer_zerop (len))
9042 return NULL_TREE;
9043
9044 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9045 if (!fn)
9046 return NULL_TREE;
9047
9048 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9049 fold_convert_loc (loc, size_type_node, len),
9050 build_int_cst (size_type_node, 1));
9051 /* We use dest twice in building our expression. Save it from
9052 multiple expansions. */
9053 dest = builtin_save_expr (dest);
9054 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9055
9056 type = TREE_TYPE (TREE_TYPE (fndecl));
9057 dest = fold_build_pointer_plus_loc (loc, dest, len);
9058 dest = fold_convert_loc (loc, type, dest);
9059 dest = omit_one_operand_loc (loc, type, dest, call);
9060 return dest;
9061 }
9062
9063 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9064 If SLEN is not NULL, it represents the length of the source string.
9065 Return NULL_TREE if no simplification can be made. */
9066
9067 tree
9068 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9069 tree src, tree len, tree slen)
9070 {
9071 tree fn;
9072
9073 if (!validate_arg (dest, POINTER_TYPE)
9074 || !validate_arg (src, POINTER_TYPE)
9075 || !validate_arg (len, INTEGER_TYPE))
9076 return NULL_TREE;
9077
9078 /* If the LEN parameter is zero, return DEST. */
9079 if (integer_zerop (len))
9080 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9081
9082 /* We can't compare slen with len as constants below if len is not a
9083 constant. */
9084 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9085 return NULL_TREE;
9086
9087 if (!slen)
9088 slen = c_strlen (src, 1);
9089
9090 /* Now, we must be passed a constant src ptr parameter. */
9091 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9092 return NULL_TREE;
9093
9094 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9095
9096 /* We do not support simplification of this case, though we do
9097 support it when expanding trees into RTL. */
9098 /* FIXME: generate a call to __builtin_memset. */
9099 if (tree_int_cst_lt (slen, len))
9100 return NULL_TREE;
9101
9102 /* OK transform into builtin memcpy. */
9103 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9104 if (!fn)
9105 return NULL_TREE;
9106
9107 len = fold_convert_loc (loc, size_type_node, len);
9108 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9109 build_call_expr_loc (loc, fn, 3, dest, src, len));
9110 }
9111
9112 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9113 arguments to the call, and TYPE is its return type.
9114 Return NULL_TREE if no simplification can be made. */
9115
9116 static tree
9117 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9118 {
9119 if (!validate_arg (arg1, POINTER_TYPE)
9120 || !validate_arg (arg2, INTEGER_TYPE)
9121 || !validate_arg (len, INTEGER_TYPE))
9122 return NULL_TREE;
9123 else
9124 {
9125 const char *p1;
9126
9127 if (TREE_CODE (arg2) != INTEGER_CST
9128 || !host_integerp (len, 1))
9129 return NULL_TREE;
9130
9131 p1 = c_getstr (arg1);
9132 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9133 {
9134 char c;
9135 const char *r;
9136 tree tem;
9137
9138 if (target_char_cast (arg2, &c))
9139 return NULL_TREE;
9140
9141 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9142
9143 if (r == NULL)
9144 return build_int_cst (TREE_TYPE (arg1), 0);
9145
9146 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9147 return fold_convert_loc (loc, type, tem);
9148 }
9149 return NULL_TREE;
9150 }
9151 }
9152
9153 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9154 Return NULL_TREE if no simplification can be made. */
9155
9156 static tree
9157 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9158 {
9159 const char *p1, *p2;
9160
9161 if (!validate_arg (arg1, POINTER_TYPE)
9162 || !validate_arg (arg2, POINTER_TYPE)
9163 || !validate_arg (len, INTEGER_TYPE))
9164 return NULL_TREE;
9165
9166 /* If the LEN parameter is zero, return zero. */
9167 if (integer_zerop (len))
9168 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9169 arg1, arg2);
9170
9171 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9172 if (operand_equal_p (arg1, arg2, 0))
9173 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9174
9175 p1 = c_getstr (arg1);
9176 p2 = c_getstr (arg2);
9177
9178 /* If all arguments are constant, and the value of len is not greater
9179 than the lengths of arg1 and arg2, evaluate at compile-time. */
9180 if (host_integerp (len, 1) && p1 && p2
9181 && compare_tree_int (len, strlen (p1) + 1) <= 0
9182 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9183 {
9184 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9185
9186 if (r > 0)
9187 return integer_one_node;
9188 else if (r < 0)
9189 return integer_minus_one_node;
9190 else
9191 return integer_zero_node;
9192 }
9193
9194 /* If len parameter is one, return an expression corresponding to
9195 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9196 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9197 {
9198 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9199 tree cst_uchar_ptr_node
9200 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9201
9202 tree ind1
9203 = fold_convert_loc (loc, integer_type_node,
9204 build1 (INDIRECT_REF, cst_uchar_node,
9205 fold_convert_loc (loc,
9206 cst_uchar_ptr_node,
9207 arg1)));
9208 tree ind2
9209 = fold_convert_loc (loc, integer_type_node,
9210 build1 (INDIRECT_REF, cst_uchar_node,
9211 fold_convert_loc (loc,
9212 cst_uchar_ptr_node,
9213 arg2)));
9214 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9215 }
9216
9217 return NULL_TREE;
9218 }
9219
9220 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9221 Return NULL_TREE if no simplification can be made. */
9222
9223 static tree
9224 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9225 {
9226 const char *p1, *p2;
9227
9228 if (!validate_arg (arg1, POINTER_TYPE)
9229 || !validate_arg (arg2, POINTER_TYPE))
9230 return NULL_TREE;
9231
9232 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9233 if (operand_equal_p (arg1, arg2, 0))
9234 return integer_zero_node;
9235
9236 p1 = c_getstr (arg1);
9237 p2 = c_getstr (arg2);
9238
9239 if (p1 && p2)
9240 {
9241 const int i = strcmp (p1, p2);
9242 if (i < 0)
9243 return integer_minus_one_node;
9244 else if (i > 0)
9245 return integer_one_node;
9246 else
9247 return integer_zero_node;
9248 }
9249
9250 /* If the second arg is "", return *(const unsigned char*)arg1. */
9251 if (p2 && *p2 == '\0')
9252 {
9253 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9254 tree cst_uchar_ptr_node
9255 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9256
9257 return fold_convert_loc (loc, integer_type_node,
9258 build1 (INDIRECT_REF, cst_uchar_node,
9259 fold_convert_loc (loc,
9260 cst_uchar_ptr_node,
9261 arg1)));
9262 }
9263
9264 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9265 if (p1 && *p1 == '\0')
9266 {
9267 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9268 tree cst_uchar_ptr_node
9269 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9270
9271 tree temp
9272 = fold_convert_loc (loc, integer_type_node,
9273 build1 (INDIRECT_REF, cst_uchar_node,
9274 fold_convert_loc (loc,
9275 cst_uchar_ptr_node,
9276 arg2)));
9277 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9278 }
9279
9280 return NULL_TREE;
9281 }
9282
9283 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9284 Return NULL_TREE if no simplification can be made. */
9285
9286 static tree
9287 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9288 {
9289 const char *p1, *p2;
9290
9291 if (!validate_arg (arg1, POINTER_TYPE)
9292 || !validate_arg (arg2, POINTER_TYPE)
9293 || !validate_arg (len, INTEGER_TYPE))
9294 return NULL_TREE;
9295
9296 /* If the LEN parameter is zero, return zero. */
9297 if (integer_zerop (len))
9298 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9299 arg1, arg2);
9300
9301 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9302 if (operand_equal_p (arg1, arg2, 0))
9303 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9304
9305 p1 = c_getstr (arg1);
9306 p2 = c_getstr (arg2);
9307
9308 if (host_integerp (len, 1) && p1 && p2)
9309 {
9310 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9311 if (i > 0)
9312 return integer_one_node;
9313 else if (i < 0)
9314 return integer_minus_one_node;
9315 else
9316 return integer_zero_node;
9317 }
9318
9319 /* If the second arg is "", and the length is greater than zero,
9320 return *(const unsigned char*)arg1. */
9321 if (p2 && *p2 == '\0'
9322 && TREE_CODE (len) == INTEGER_CST
9323 && tree_int_cst_sgn (len) == 1)
9324 {
9325 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9326 tree cst_uchar_ptr_node
9327 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9328
9329 return fold_convert_loc (loc, integer_type_node,
9330 build1 (INDIRECT_REF, cst_uchar_node,
9331 fold_convert_loc (loc,
9332 cst_uchar_ptr_node,
9333 arg1)));
9334 }
9335
9336 /* If the first arg is "", and the length is greater than zero,
9337 return -*(const unsigned char*)arg2. */
9338 if (p1 && *p1 == '\0'
9339 && TREE_CODE (len) == INTEGER_CST
9340 && tree_int_cst_sgn (len) == 1)
9341 {
9342 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9343 tree cst_uchar_ptr_node
9344 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9345
9346 tree temp = fold_convert_loc (loc, integer_type_node,
9347 build1 (INDIRECT_REF, cst_uchar_node,
9348 fold_convert_loc (loc,
9349 cst_uchar_ptr_node,
9350 arg2)));
9351 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9352 }
9353
9354 /* If len parameter is one, return an expression corresponding to
9355 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9356 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9357 {
9358 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9359 tree cst_uchar_ptr_node
9360 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9361
9362 tree ind1 = fold_convert_loc (loc, integer_type_node,
9363 build1 (INDIRECT_REF, cst_uchar_node,
9364 fold_convert_loc (loc,
9365 cst_uchar_ptr_node,
9366 arg1)));
9367 tree ind2 = fold_convert_loc (loc, integer_type_node,
9368 build1 (INDIRECT_REF, cst_uchar_node,
9369 fold_convert_loc (loc,
9370 cst_uchar_ptr_node,
9371 arg2)));
9372 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9373 }
9374
9375 return NULL_TREE;
9376 }
9377
9378 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9379 ARG. Return NULL_TREE if no simplification can be made. */
9380
9381 static tree
9382 fold_builtin_signbit (location_t loc, tree arg, tree type)
9383 {
9384 if (!validate_arg (arg, REAL_TYPE))
9385 return NULL_TREE;
9386
9387 /* If ARG is a compile-time constant, determine the result. */
9388 if (TREE_CODE (arg) == REAL_CST
9389 && !TREE_OVERFLOW (arg))
9390 {
9391 REAL_VALUE_TYPE c;
9392
9393 c = TREE_REAL_CST (arg);
9394 return (REAL_VALUE_NEGATIVE (c)
9395 ? build_one_cst (type)
9396 : build_zero_cst (type));
9397 }
9398
9399 /* If ARG is non-negative, the result is always zero. */
9400 if (tree_expr_nonnegative_p (arg))
9401 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9402
9403 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9404 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9405 return fold_convert (type,
9406 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9407 build_real (TREE_TYPE (arg), dconst0)));
9408
9409 return NULL_TREE;
9410 }
9411
9412 /* Fold function call to builtin copysign, copysignf or copysignl with
9413 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9414 be made. */
9415
9416 static tree
9417 fold_builtin_copysign (location_t loc, tree fndecl,
9418 tree arg1, tree arg2, tree type)
9419 {
9420 tree tem;
9421
9422 if (!validate_arg (arg1, REAL_TYPE)
9423 || !validate_arg (arg2, REAL_TYPE))
9424 return NULL_TREE;
9425
9426 /* copysign(X,X) is X. */
9427 if (operand_equal_p (arg1, arg2, 0))
9428 return fold_convert_loc (loc, type, arg1);
9429
9430 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9431 if (TREE_CODE (arg1) == REAL_CST
9432 && TREE_CODE (arg2) == REAL_CST
9433 && !TREE_OVERFLOW (arg1)
9434 && !TREE_OVERFLOW (arg2))
9435 {
9436 REAL_VALUE_TYPE c1, c2;
9437
9438 c1 = TREE_REAL_CST (arg1);
9439 c2 = TREE_REAL_CST (arg2);
9440 /* c1.sign := c2.sign. */
9441 real_copysign (&c1, &c2);
9442 return build_real (type, c1);
9443 }
9444
9445 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9446 Remember to evaluate Y for side-effects. */
9447 if (tree_expr_nonnegative_p (arg2))
9448 return omit_one_operand_loc (loc, type,
9449 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9450 arg2);
9451
9452 /* Strip sign changing operations for the first argument. */
9453 tem = fold_strip_sign_ops (arg1);
9454 if (tem)
9455 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9456
9457 return NULL_TREE;
9458 }
9459
9460 /* Fold a call to builtin isascii with argument ARG. */
9461
9462 static tree
9463 fold_builtin_isascii (location_t loc, tree arg)
9464 {
9465 if (!validate_arg (arg, INTEGER_TYPE))
9466 return NULL_TREE;
9467 else
9468 {
9469 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9470 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9471 build_int_cst (integer_type_node,
9472 ~ (unsigned HOST_WIDE_INT) 0x7f));
9473 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9474 arg, integer_zero_node);
9475 }
9476 }
9477
9478 /* Fold a call to builtin toascii with argument ARG. */
9479
9480 static tree
9481 fold_builtin_toascii (location_t loc, tree arg)
9482 {
9483 if (!validate_arg (arg, INTEGER_TYPE))
9484 return NULL_TREE;
9485
9486 /* Transform toascii(c) -> (c & 0x7f). */
9487 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9488 build_int_cst (integer_type_node, 0x7f));
9489 }
9490
9491 /* Fold a call to builtin isdigit with argument ARG. */
9492
9493 static tree
9494 fold_builtin_isdigit (location_t loc, tree arg)
9495 {
9496 if (!validate_arg (arg, INTEGER_TYPE))
9497 return NULL_TREE;
9498 else
9499 {
9500 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9501 /* According to the C standard, isdigit is unaffected by locale.
9502 However, it definitely is affected by the target character set. */
9503 unsigned HOST_WIDE_INT target_digit0
9504 = lang_hooks.to_target_charset ('0');
9505
9506 if (target_digit0 == 0)
9507 return NULL_TREE;
9508
9509 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9510 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9511 build_int_cst (unsigned_type_node, target_digit0));
9512 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9513 build_int_cst (unsigned_type_node, 9));
9514 }
9515 }
9516
9517 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9518
9519 static tree
9520 fold_builtin_fabs (location_t loc, tree arg, tree type)
9521 {
9522 if (!validate_arg (arg, REAL_TYPE))
9523 return NULL_TREE;
9524
9525 arg = fold_convert_loc (loc, type, arg);
9526 if (TREE_CODE (arg) == REAL_CST)
9527 return fold_abs_const (arg, type);
9528 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9529 }
9530
9531 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9532
9533 static tree
9534 fold_builtin_abs (location_t loc, tree arg, tree type)
9535 {
9536 if (!validate_arg (arg, INTEGER_TYPE))
9537 return NULL_TREE;
9538
9539 arg = fold_convert_loc (loc, type, arg);
9540 if (TREE_CODE (arg) == INTEGER_CST)
9541 return fold_abs_const (arg, type);
9542 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9543 }
9544
9545 /* Fold a fma operation with arguments ARG[012]. */
9546
9547 tree
9548 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9549 tree type, tree arg0, tree arg1, tree arg2)
9550 {
9551 if (TREE_CODE (arg0) == REAL_CST
9552 && TREE_CODE (arg1) == REAL_CST
9553 && TREE_CODE (arg2) == REAL_CST)
9554 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9555
9556 return NULL_TREE;
9557 }
9558
9559 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9560
9561 static tree
9562 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9563 {
9564 if (validate_arg (arg0, REAL_TYPE)
9565 && validate_arg(arg1, REAL_TYPE)
9566 && validate_arg(arg2, REAL_TYPE))
9567 {
9568 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9569 if (tem)
9570 return tem;
9571
9572 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9573 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9574 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9575 }
9576 return NULL_TREE;
9577 }
9578
9579 /* Fold a call to builtin fmin or fmax. */
9580
9581 static tree
9582 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9583 tree type, bool max)
9584 {
9585 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9586 {
9587 /* Calculate the result when the argument is a constant. */
9588 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9589
9590 if (res)
9591 return res;
9592
9593 /* If either argument is NaN, return the other one. Avoid the
9594 transformation if we get (and honor) a signalling NaN. Using
9595 omit_one_operand() ensures we create a non-lvalue. */
9596 if (TREE_CODE (arg0) == REAL_CST
9597 && real_isnan (&TREE_REAL_CST (arg0))
9598 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9599 || ! TREE_REAL_CST (arg0).signalling))
9600 return omit_one_operand_loc (loc, type, arg1, arg0);
9601 if (TREE_CODE (arg1) == REAL_CST
9602 && real_isnan (&TREE_REAL_CST (arg1))
9603 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9604 || ! TREE_REAL_CST (arg1).signalling))
9605 return omit_one_operand_loc (loc, type, arg0, arg1);
9606
9607 /* Transform fmin/fmax(x,x) -> x. */
9608 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9609 return omit_one_operand_loc (loc, type, arg0, arg1);
9610
9611 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9612 functions to return the numeric arg if the other one is NaN.
9613 These tree codes don't honor that, so only transform if
9614 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9615 handled, so we don't have to worry about it either. */
9616 if (flag_finite_math_only)
9617 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9618 fold_convert_loc (loc, type, arg0),
9619 fold_convert_loc (loc, type, arg1));
9620 }
9621 return NULL_TREE;
9622 }
9623
9624 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9625
9626 static tree
9627 fold_builtin_carg (location_t loc, tree arg, tree type)
9628 {
9629 if (validate_arg (arg, COMPLEX_TYPE)
9630 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9631 {
9632 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9633
9634 if (atan2_fn)
9635 {
9636 tree new_arg = builtin_save_expr (arg);
9637 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9638 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9639 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9640 }
9641 }
9642
9643 return NULL_TREE;
9644 }
9645
9646 /* Fold a call to builtin logb/ilogb. */
9647
9648 static tree
9649 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9650 {
9651 if (! validate_arg (arg, REAL_TYPE))
9652 return NULL_TREE;
9653
9654 STRIP_NOPS (arg);
9655
9656 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9657 {
9658 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9659
9660 switch (value->cl)
9661 {
9662 case rvc_nan:
9663 case rvc_inf:
9664 /* If arg is Inf or NaN and we're logb, return it. */
9665 if (TREE_CODE (rettype) == REAL_TYPE)
9666 return fold_convert_loc (loc, rettype, arg);
9667 /* Fall through... */
9668 case rvc_zero:
9669 /* Zero may set errno and/or raise an exception for logb, also
9670 for ilogb we don't know FP_ILOGB0. */
9671 return NULL_TREE;
9672 case rvc_normal:
9673 /* For normal numbers, proceed iff radix == 2. In GCC,
9674 normalized significands are in the range [0.5, 1.0). We
9675 want the exponent as if they were [1.0, 2.0) so get the
9676 exponent and subtract 1. */
9677 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9678 return fold_convert_loc (loc, rettype,
9679 build_int_cst (integer_type_node,
9680 REAL_EXP (value)-1));
9681 break;
9682 }
9683 }
9684
9685 return NULL_TREE;
9686 }
9687
9688 /* Fold a call to builtin significand, if radix == 2. */
9689
9690 static tree
9691 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9692 {
9693 if (! validate_arg (arg, REAL_TYPE))
9694 return NULL_TREE;
9695
9696 STRIP_NOPS (arg);
9697
9698 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9699 {
9700 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9701
9702 switch (value->cl)
9703 {
9704 case rvc_zero:
9705 case rvc_nan:
9706 case rvc_inf:
9707 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9708 return fold_convert_loc (loc, rettype, arg);
9709 case rvc_normal:
9710 /* For normal numbers, proceed iff radix == 2. */
9711 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9712 {
9713 REAL_VALUE_TYPE result = *value;
9714 /* In GCC, normalized significands are in the range [0.5,
9715 1.0). We want them to be [1.0, 2.0) so set the
9716 exponent to 1. */
9717 SET_REAL_EXP (&result, 1);
9718 return build_real (rettype, result);
9719 }
9720 break;
9721 }
9722 }
9723
9724 return NULL_TREE;
9725 }
9726
9727 /* Fold a call to builtin frexp, we can assume the base is 2. */
9728
9729 static tree
9730 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9731 {
9732 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9733 return NULL_TREE;
9734
9735 STRIP_NOPS (arg0);
9736
9737 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9738 return NULL_TREE;
9739
9740 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9741
9742 /* Proceed if a valid pointer type was passed in. */
9743 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9744 {
9745 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9746 tree frac, exp;
9747
9748 switch (value->cl)
9749 {
9750 case rvc_zero:
9751 /* For +-0, return (*exp = 0, +-0). */
9752 exp = integer_zero_node;
9753 frac = arg0;
9754 break;
9755 case rvc_nan:
9756 case rvc_inf:
9757 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9758 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9759 case rvc_normal:
9760 {
9761 /* Since the frexp function always expects base 2, and in
9762 GCC normalized significands are already in the range
9763 [0.5, 1.0), we have exactly what frexp wants. */
9764 REAL_VALUE_TYPE frac_rvt = *value;
9765 SET_REAL_EXP (&frac_rvt, 0);
9766 frac = build_real (rettype, frac_rvt);
9767 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9768 }
9769 break;
9770 default:
9771 gcc_unreachable ();
9772 }
9773
9774 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9775 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9776 TREE_SIDE_EFFECTS (arg1) = 1;
9777 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9778 }
9779
9780 return NULL_TREE;
9781 }
9782
9783 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9784 then we can assume the base is two. If it's false, then we have to
9785 check the mode of the TYPE parameter in certain cases. */
9786
9787 static tree
9788 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9789 tree type, bool ldexp)
9790 {
9791 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9792 {
9793 STRIP_NOPS (arg0);
9794 STRIP_NOPS (arg1);
9795
9796 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9797 if (real_zerop (arg0) || integer_zerop (arg1)
9798 || (TREE_CODE (arg0) == REAL_CST
9799 && !real_isfinite (&TREE_REAL_CST (arg0))))
9800 return omit_one_operand_loc (loc, type, arg0, arg1);
9801
9802 /* If both arguments are constant, then try to evaluate it. */
9803 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9804 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9805 && host_integerp (arg1, 0))
9806 {
9807 /* Bound the maximum adjustment to twice the range of the
9808 mode's valid exponents. Use abs to ensure the range is
9809 positive as a sanity check. */
9810 const long max_exp_adj = 2 *
9811 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9812 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9813
9814 /* Get the user-requested adjustment. */
9815 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9816
9817 /* The requested adjustment must be inside this range. This
9818 is a preliminary cap to avoid things like overflow, we
9819 may still fail to compute the result for other reasons. */
9820 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9821 {
9822 REAL_VALUE_TYPE initial_result;
9823
9824 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9825
9826 /* Ensure we didn't overflow. */
9827 if (! real_isinf (&initial_result))
9828 {
9829 const REAL_VALUE_TYPE trunc_result
9830 = real_value_truncate (TYPE_MODE (type), initial_result);
9831
9832 /* Only proceed if the target mode can hold the
9833 resulting value. */
9834 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9835 return build_real (type, trunc_result);
9836 }
9837 }
9838 }
9839 }
9840
9841 return NULL_TREE;
9842 }
9843
9844 /* Fold a call to builtin modf. */
9845
9846 static tree
9847 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9848 {
9849 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9850 return NULL_TREE;
9851
9852 STRIP_NOPS (arg0);
9853
9854 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9855 return NULL_TREE;
9856
9857 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9858
9859 /* Proceed if a valid pointer type was passed in. */
9860 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9861 {
9862 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9863 REAL_VALUE_TYPE trunc, frac;
9864
9865 switch (value->cl)
9866 {
9867 case rvc_nan:
9868 case rvc_zero:
9869 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9870 trunc = frac = *value;
9871 break;
9872 case rvc_inf:
9873 /* For +-Inf, return (*arg1 = arg0, +-0). */
9874 frac = dconst0;
9875 frac.sign = value->sign;
9876 trunc = *value;
9877 break;
9878 case rvc_normal:
9879 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9880 real_trunc (&trunc, VOIDmode, value);
9881 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9882 /* If the original number was negative and already
9883 integral, then the fractional part is -0.0. */
9884 if (value->sign && frac.cl == rvc_zero)
9885 frac.sign = value->sign;
9886 break;
9887 }
9888
9889 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9890 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9891 build_real (rettype, trunc));
9892 TREE_SIDE_EFFECTS (arg1) = 1;
9893 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9894 build_real (rettype, frac));
9895 }
9896
9897 return NULL_TREE;
9898 }
9899
9900 /* Given a location LOC, an interclass builtin function decl FNDECL
9901 and its single argument ARG, return an folded expression computing
9902 the same, or NULL_TREE if we either couldn't or didn't want to fold
9903 (the latter happen if there's an RTL instruction available). */
9904
9905 static tree
9906 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9907 {
9908 enum machine_mode mode;
9909
9910 if (!validate_arg (arg, REAL_TYPE))
9911 return NULL_TREE;
9912
9913 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9914 return NULL_TREE;
9915
9916 mode = TYPE_MODE (TREE_TYPE (arg));
9917
9918 /* If there is no optab, try generic code. */
9919 switch (DECL_FUNCTION_CODE (fndecl))
9920 {
9921 tree result;
9922
9923 CASE_FLT_FN (BUILT_IN_ISINF):
9924 {
9925 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9926 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9927 tree const type = TREE_TYPE (arg);
9928 REAL_VALUE_TYPE r;
9929 char buf[128];
9930
9931 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9932 real_from_string (&r, buf);
9933 result = build_call_expr (isgr_fn, 2,
9934 fold_build1_loc (loc, ABS_EXPR, type, arg),
9935 build_real (type, r));
9936 return result;
9937 }
9938 CASE_FLT_FN (BUILT_IN_FINITE):
9939 case BUILT_IN_ISFINITE:
9940 {
9941 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9942 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9943 tree const type = TREE_TYPE (arg);
9944 REAL_VALUE_TYPE r;
9945 char buf[128];
9946
9947 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9948 real_from_string (&r, buf);
9949 result = build_call_expr (isle_fn, 2,
9950 fold_build1_loc (loc, ABS_EXPR, type, arg),
9951 build_real (type, r));
9952 /*result = fold_build2_loc (loc, UNGT_EXPR,
9953 TREE_TYPE (TREE_TYPE (fndecl)),
9954 fold_build1_loc (loc, ABS_EXPR, type, arg),
9955 build_real (type, r));
9956 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9957 TREE_TYPE (TREE_TYPE (fndecl)),
9958 result);*/
9959 return result;
9960 }
9961 case BUILT_IN_ISNORMAL:
9962 {
9963 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9964 islessequal(fabs(x),DBL_MAX). */
9965 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9966 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9967 tree const type = TREE_TYPE (arg);
9968 REAL_VALUE_TYPE rmax, rmin;
9969 char buf[128];
9970
9971 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9972 real_from_string (&rmax, buf);
9973 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9974 real_from_string (&rmin, buf);
9975 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9976 result = build_call_expr (isle_fn, 2, arg,
9977 build_real (type, rmax));
9978 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9979 build_call_expr (isge_fn, 2, arg,
9980 build_real (type, rmin)));
9981 return result;
9982 }
9983 default:
9984 break;
9985 }
9986
9987 return NULL_TREE;
9988 }
9989
9990 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9991 ARG is the argument for the call. */
9992
9993 static tree
9994 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9995 {
9996 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9997 REAL_VALUE_TYPE r;
9998
9999 if (!validate_arg (arg, REAL_TYPE))
10000 return NULL_TREE;
10001
10002 switch (builtin_index)
10003 {
10004 case BUILT_IN_ISINF:
10005 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10006 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10007
10008 if (TREE_CODE (arg) == REAL_CST)
10009 {
10010 r = TREE_REAL_CST (arg);
10011 if (real_isinf (&r))
10012 return real_compare (GT_EXPR, &r, &dconst0)
10013 ? integer_one_node : integer_minus_one_node;
10014 else
10015 return integer_zero_node;
10016 }
10017
10018 return NULL_TREE;
10019
10020 case BUILT_IN_ISINF_SIGN:
10021 {
10022 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10023 /* In a boolean context, GCC will fold the inner COND_EXPR to
10024 1. So e.g. "if (isinf_sign(x))" would be folded to just
10025 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10026 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10027 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10028 tree tmp = NULL_TREE;
10029
10030 arg = builtin_save_expr (arg);
10031
10032 if (signbit_fn && isinf_fn)
10033 {
10034 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10035 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10036
10037 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10038 signbit_call, integer_zero_node);
10039 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10040 isinf_call, integer_zero_node);
10041
10042 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10043 integer_minus_one_node, integer_one_node);
10044 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10045 isinf_call, tmp,
10046 integer_zero_node);
10047 }
10048
10049 return tmp;
10050 }
10051
10052 case BUILT_IN_ISFINITE:
10053 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10054 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10055 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10056
10057 if (TREE_CODE (arg) == REAL_CST)
10058 {
10059 r = TREE_REAL_CST (arg);
10060 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10061 }
10062
10063 return NULL_TREE;
10064
10065 case BUILT_IN_ISNAN:
10066 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10067 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10068
10069 if (TREE_CODE (arg) == REAL_CST)
10070 {
10071 r = TREE_REAL_CST (arg);
10072 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10073 }
10074
10075 arg = builtin_save_expr (arg);
10076 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10077
10078 default:
10079 gcc_unreachable ();
10080 }
10081 }
10082
10083 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10084 This builtin will generate code to return the appropriate floating
10085 point classification depending on the value of the floating point
10086 number passed in. The possible return values must be supplied as
10087 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10088 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10089 one floating point argument which is "type generic". */
10090
10091 static tree
10092 fold_builtin_fpclassify (location_t loc, tree exp)
10093 {
10094 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10095 arg, type, res, tmp;
10096 enum machine_mode mode;
10097 REAL_VALUE_TYPE r;
10098 char buf[128];
10099
10100 /* Verify the required arguments in the original call. */
10101 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10102 INTEGER_TYPE, INTEGER_TYPE,
10103 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10104 return NULL_TREE;
10105
10106 fp_nan = CALL_EXPR_ARG (exp, 0);
10107 fp_infinite = CALL_EXPR_ARG (exp, 1);
10108 fp_normal = CALL_EXPR_ARG (exp, 2);
10109 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10110 fp_zero = CALL_EXPR_ARG (exp, 4);
10111 arg = CALL_EXPR_ARG (exp, 5);
10112 type = TREE_TYPE (arg);
10113 mode = TYPE_MODE (type);
10114 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10115
10116 /* fpclassify(x) ->
10117 isnan(x) ? FP_NAN :
10118 (fabs(x) == Inf ? FP_INFINITE :
10119 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10120 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10121
10122 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10123 build_real (type, dconst0));
10124 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10125 tmp, fp_zero, fp_subnormal);
10126
10127 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10128 real_from_string (&r, buf);
10129 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10130 arg, build_real (type, r));
10131 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10132
10133 if (HONOR_INFINITIES (mode))
10134 {
10135 real_inf (&r);
10136 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10137 build_real (type, r));
10138 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10139 fp_infinite, res);
10140 }
10141
10142 if (HONOR_NANS (mode))
10143 {
10144 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10145 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10146 }
10147
10148 return res;
10149 }
10150
10151 /* Fold a call to an unordered comparison function such as
10152 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10153 being called and ARG0 and ARG1 are the arguments for the call.
10154 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10155 the opposite of the desired result. UNORDERED_CODE is used
10156 for modes that can hold NaNs and ORDERED_CODE is used for
10157 the rest. */
10158
10159 static tree
10160 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10161 enum tree_code unordered_code,
10162 enum tree_code ordered_code)
10163 {
10164 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10165 enum tree_code code;
10166 tree type0, type1;
10167 enum tree_code code0, code1;
10168 tree cmp_type = NULL_TREE;
10169
10170 type0 = TREE_TYPE (arg0);
10171 type1 = TREE_TYPE (arg1);
10172
10173 code0 = TREE_CODE (type0);
10174 code1 = TREE_CODE (type1);
10175
10176 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10177 /* Choose the wider of two real types. */
10178 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10179 ? type0 : type1;
10180 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10181 cmp_type = type0;
10182 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10183 cmp_type = type1;
10184
10185 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10186 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10187
10188 if (unordered_code == UNORDERED_EXPR)
10189 {
10190 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10191 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10192 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10193 }
10194
10195 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10196 : ordered_code;
10197 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10198 fold_build2_loc (loc, code, type, arg0, arg1));
10199 }
10200
10201 /* Fold a call to built-in function FNDECL with 0 arguments.
10202 IGNORE is true if the result of the function call is ignored. This
10203 function returns NULL_TREE if no simplification was possible. */
10204
10205 static tree
10206 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10207 {
10208 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10209 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10210 switch (fcode)
10211 {
10212 CASE_FLT_FN (BUILT_IN_INF):
10213 case BUILT_IN_INFD32:
10214 case BUILT_IN_INFD64:
10215 case BUILT_IN_INFD128:
10216 return fold_builtin_inf (loc, type, true);
10217
10218 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10219 return fold_builtin_inf (loc, type, false);
10220
10221 case BUILT_IN_CLASSIFY_TYPE:
10222 return fold_builtin_classify_type (NULL_TREE);
10223
10224 default:
10225 break;
10226 }
10227 return NULL_TREE;
10228 }
10229
10230 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10231 IGNORE is true if the result of the function call is ignored. This
10232 function returns NULL_TREE if no simplification was possible. */
10233
10234 static tree
10235 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10236 {
10237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10238 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10239 switch (fcode)
10240 {
10241 case BUILT_IN_CONSTANT_P:
10242 {
10243 tree val = fold_builtin_constant_p (arg0);
10244
10245 /* Gimplification will pull the CALL_EXPR for the builtin out of
10246 an if condition. When not optimizing, we'll not CSE it back.
10247 To avoid link error types of regressions, return false now. */
10248 if (!val && !optimize)
10249 val = integer_zero_node;
10250
10251 return val;
10252 }
10253
10254 case BUILT_IN_CLASSIFY_TYPE:
10255 return fold_builtin_classify_type (arg0);
10256
10257 case BUILT_IN_STRLEN:
10258 return fold_builtin_strlen (loc, type, arg0);
10259
10260 CASE_FLT_FN (BUILT_IN_FABS):
10261 return fold_builtin_fabs (loc, arg0, type);
10262
10263 case BUILT_IN_ABS:
10264 case BUILT_IN_LABS:
10265 case BUILT_IN_LLABS:
10266 case BUILT_IN_IMAXABS:
10267 return fold_builtin_abs (loc, arg0, type);
10268
10269 CASE_FLT_FN (BUILT_IN_CONJ):
10270 if (validate_arg (arg0, COMPLEX_TYPE)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10272 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10273 break;
10274
10275 CASE_FLT_FN (BUILT_IN_CREAL):
10276 if (validate_arg (arg0, COMPLEX_TYPE)
10277 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10278 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10279 break;
10280
10281 CASE_FLT_FN (BUILT_IN_CIMAG):
10282 if (validate_arg (arg0, COMPLEX_TYPE)
10283 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10284 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10285 break;
10286
10287 CASE_FLT_FN (BUILT_IN_CCOS):
10288 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10289
10290 CASE_FLT_FN (BUILT_IN_CCOSH):
10291 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10292
10293 CASE_FLT_FN (BUILT_IN_CPROJ):
10294 return fold_builtin_cproj(loc, arg0, type);
10295
10296 CASE_FLT_FN (BUILT_IN_CSIN):
10297 if (validate_arg (arg0, COMPLEX_TYPE)
10298 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10299 return do_mpc_arg1 (arg0, type, mpc_sin);
10300 break;
10301
10302 CASE_FLT_FN (BUILT_IN_CSINH):
10303 if (validate_arg (arg0, COMPLEX_TYPE)
10304 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10305 return do_mpc_arg1 (arg0, type, mpc_sinh);
10306 break;
10307
10308 CASE_FLT_FN (BUILT_IN_CTAN):
10309 if (validate_arg (arg0, COMPLEX_TYPE)
10310 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10311 return do_mpc_arg1 (arg0, type, mpc_tan);
10312 break;
10313
10314 CASE_FLT_FN (BUILT_IN_CTANH):
10315 if (validate_arg (arg0, COMPLEX_TYPE)
10316 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10317 return do_mpc_arg1 (arg0, type, mpc_tanh);
10318 break;
10319
10320 CASE_FLT_FN (BUILT_IN_CLOG):
10321 if (validate_arg (arg0, COMPLEX_TYPE)
10322 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10323 return do_mpc_arg1 (arg0, type, mpc_log);
10324 break;
10325
10326 CASE_FLT_FN (BUILT_IN_CSQRT):
10327 if (validate_arg (arg0, COMPLEX_TYPE)
10328 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10329 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10330 break;
10331
10332 CASE_FLT_FN (BUILT_IN_CASIN):
10333 if (validate_arg (arg0, COMPLEX_TYPE)
10334 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10335 return do_mpc_arg1 (arg0, type, mpc_asin);
10336 break;
10337
10338 CASE_FLT_FN (BUILT_IN_CACOS):
10339 if (validate_arg (arg0, COMPLEX_TYPE)
10340 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10341 return do_mpc_arg1 (arg0, type, mpc_acos);
10342 break;
10343
10344 CASE_FLT_FN (BUILT_IN_CATAN):
10345 if (validate_arg (arg0, COMPLEX_TYPE)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10347 return do_mpc_arg1 (arg0, type, mpc_atan);
10348 break;
10349
10350 CASE_FLT_FN (BUILT_IN_CASINH):
10351 if (validate_arg (arg0, COMPLEX_TYPE)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10353 return do_mpc_arg1 (arg0, type, mpc_asinh);
10354 break;
10355
10356 CASE_FLT_FN (BUILT_IN_CACOSH):
10357 if (validate_arg (arg0, COMPLEX_TYPE)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10359 return do_mpc_arg1 (arg0, type, mpc_acosh);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_CATANH):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return do_mpc_arg1 (arg0, type, mpc_atanh);
10366 break;
10367
10368 CASE_FLT_FN (BUILT_IN_CABS):
10369 return fold_builtin_cabs (loc, arg0, type, fndecl);
10370
10371 CASE_FLT_FN (BUILT_IN_CARG):
10372 return fold_builtin_carg (loc, arg0, type);
10373
10374 CASE_FLT_FN (BUILT_IN_SQRT):
10375 return fold_builtin_sqrt (loc, arg0, type);
10376
10377 CASE_FLT_FN (BUILT_IN_CBRT):
10378 return fold_builtin_cbrt (loc, arg0, type);
10379
10380 CASE_FLT_FN (BUILT_IN_ASIN):
10381 if (validate_arg (arg0, REAL_TYPE))
10382 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10383 &dconstm1, &dconst1, true);
10384 break;
10385
10386 CASE_FLT_FN (BUILT_IN_ACOS):
10387 if (validate_arg (arg0, REAL_TYPE))
10388 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10389 &dconstm1, &dconst1, true);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_ATAN):
10393 if (validate_arg (arg0, REAL_TYPE))
10394 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10395 break;
10396
10397 CASE_FLT_FN (BUILT_IN_ASINH):
10398 if (validate_arg (arg0, REAL_TYPE))
10399 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_ACOSH):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10405 &dconst1, NULL, true);
10406 break;
10407
10408 CASE_FLT_FN (BUILT_IN_ATANH):
10409 if (validate_arg (arg0, REAL_TYPE))
10410 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10411 &dconstm1, &dconst1, false);
10412 break;
10413
10414 CASE_FLT_FN (BUILT_IN_SIN):
10415 if (validate_arg (arg0, REAL_TYPE))
10416 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10417 break;
10418
10419 CASE_FLT_FN (BUILT_IN_COS):
10420 return fold_builtin_cos (loc, arg0, type, fndecl);
10421
10422 CASE_FLT_FN (BUILT_IN_TAN):
10423 return fold_builtin_tan (arg0, type);
10424
10425 CASE_FLT_FN (BUILT_IN_CEXP):
10426 return fold_builtin_cexp (loc, arg0, type);
10427
10428 CASE_FLT_FN (BUILT_IN_CEXPI):
10429 if (validate_arg (arg0, REAL_TYPE))
10430 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10431 break;
10432
10433 CASE_FLT_FN (BUILT_IN_SINH):
10434 if (validate_arg (arg0, REAL_TYPE))
10435 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10436 break;
10437
10438 CASE_FLT_FN (BUILT_IN_COSH):
10439 return fold_builtin_cosh (loc, arg0, type, fndecl);
10440
10441 CASE_FLT_FN (BUILT_IN_TANH):
10442 if (validate_arg (arg0, REAL_TYPE))
10443 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10444 break;
10445
10446 CASE_FLT_FN (BUILT_IN_ERF):
10447 if (validate_arg (arg0, REAL_TYPE))
10448 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10449 break;
10450
10451 CASE_FLT_FN (BUILT_IN_ERFC):
10452 if (validate_arg (arg0, REAL_TYPE))
10453 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10454 break;
10455
10456 CASE_FLT_FN (BUILT_IN_TGAMMA):
10457 if (validate_arg (arg0, REAL_TYPE))
10458 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10459 break;
10460
10461 CASE_FLT_FN (BUILT_IN_EXP):
10462 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10463
10464 CASE_FLT_FN (BUILT_IN_EXP2):
10465 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10466
10467 CASE_FLT_FN (BUILT_IN_EXP10):
10468 CASE_FLT_FN (BUILT_IN_POW10):
10469 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10470
10471 CASE_FLT_FN (BUILT_IN_EXPM1):
10472 if (validate_arg (arg0, REAL_TYPE))
10473 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10474 break;
10475
10476 CASE_FLT_FN (BUILT_IN_LOG):
10477 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10478
10479 CASE_FLT_FN (BUILT_IN_LOG2):
10480 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10481
10482 CASE_FLT_FN (BUILT_IN_LOG10):
10483 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10484
10485 CASE_FLT_FN (BUILT_IN_LOG1P):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10488 &dconstm1, NULL, false);
10489 break;
10490
10491 CASE_FLT_FN (BUILT_IN_J0):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10494 NULL, NULL, 0);
10495 break;
10496
10497 CASE_FLT_FN (BUILT_IN_J1):
10498 if (validate_arg (arg0, REAL_TYPE))
10499 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10500 NULL, NULL, 0);
10501 break;
10502
10503 CASE_FLT_FN (BUILT_IN_Y0):
10504 if (validate_arg (arg0, REAL_TYPE))
10505 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10506 &dconst0, NULL, false);
10507 break;
10508
10509 CASE_FLT_FN (BUILT_IN_Y1):
10510 if (validate_arg (arg0, REAL_TYPE))
10511 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10512 &dconst0, NULL, false);
10513 break;
10514
10515 CASE_FLT_FN (BUILT_IN_NAN):
10516 case BUILT_IN_NAND32:
10517 case BUILT_IN_NAND64:
10518 case BUILT_IN_NAND128:
10519 return fold_builtin_nan (arg0, type, true);
10520
10521 CASE_FLT_FN (BUILT_IN_NANS):
10522 return fold_builtin_nan (arg0, type, false);
10523
10524 CASE_FLT_FN (BUILT_IN_FLOOR):
10525 return fold_builtin_floor (loc, fndecl, arg0);
10526
10527 CASE_FLT_FN (BUILT_IN_CEIL):
10528 return fold_builtin_ceil (loc, fndecl, arg0);
10529
10530 CASE_FLT_FN (BUILT_IN_TRUNC):
10531 return fold_builtin_trunc (loc, fndecl, arg0);
10532
10533 CASE_FLT_FN (BUILT_IN_ROUND):
10534 return fold_builtin_round (loc, fndecl, arg0);
10535
10536 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10537 CASE_FLT_FN (BUILT_IN_RINT):
10538 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10539
10540 CASE_FLT_FN (BUILT_IN_ICEIL):
10541 CASE_FLT_FN (BUILT_IN_LCEIL):
10542 CASE_FLT_FN (BUILT_IN_LLCEIL):
10543 CASE_FLT_FN (BUILT_IN_LFLOOR):
10544 CASE_FLT_FN (BUILT_IN_IFLOOR):
10545 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10546 CASE_FLT_FN (BUILT_IN_IROUND):
10547 CASE_FLT_FN (BUILT_IN_LROUND):
10548 CASE_FLT_FN (BUILT_IN_LLROUND):
10549 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10550
10551 CASE_FLT_FN (BUILT_IN_IRINT):
10552 CASE_FLT_FN (BUILT_IN_LRINT):
10553 CASE_FLT_FN (BUILT_IN_LLRINT):
10554 return fold_fixed_mathfn (loc, fndecl, arg0);
10555
10556 case BUILT_IN_BSWAP16:
10557 case BUILT_IN_BSWAP32:
10558 case BUILT_IN_BSWAP64:
10559 return fold_builtin_bswap (fndecl, arg0);
10560
10561 CASE_INT_FN (BUILT_IN_FFS):
10562 CASE_INT_FN (BUILT_IN_CLZ):
10563 CASE_INT_FN (BUILT_IN_CTZ):
10564 CASE_INT_FN (BUILT_IN_CLRSB):
10565 CASE_INT_FN (BUILT_IN_POPCOUNT):
10566 CASE_INT_FN (BUILT_IN_PARITY):
10567 return fold_builtin_bitop (fndecl, arg0);
10568
10569 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10570 return fold_builtin_signbit (loc, arg0, type);
10571
10572 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10573 return fold_builtin_significand (loc, arg0, type);
10574
10575 CASE_FLT_FN (BUILT_IN_ILOGB):
10576 CASE_FLT_FN (BUILT_IN_LOGB):
10577 return fold_builtin_logb (loc, arg0, type);
10578
10579 case BUILT_IN_ISASCII:
10580 return fold_builtin_isascii (loc, arg0);
10581
10582 case BUILT_IN_TOASCII:
10583 return fold_builtin_toascii (loc, arg0);
10584
10585 case BUILT_IN_ISDIGIT:
10586 return fold_builtin_isdigit (loc, arg0);
10587
10588 CASE_FLT_FN (BUILT_IN_FINITE):
10589 case BUILT_IN_FINITED32:
10590 case BUILT_IN_FINITED64:
10591 case BUILT_IN_FINITED128:
10592 case BUILT_IN_ISFINITE:
10593 {
10594 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10595 if (ret)
10596 return ret;
10597 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10598 }
10599
10600 CASE_FLT_FN (BUILT_IN_ISINF):
10601 case BUILT_IN_ISINFD32:
10602 case BUILT_IN_ISINFD64:
10603 case BUILT_IN_ISINFD128:
10604 {
10605 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10606 if (ret)
10607 return ret;
10608 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10609 }
10610
10611 case BUILT_IN_ISNORMAL:
10612 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10613
10614 case BUILT_IN_ISINF_SIGN:
10615 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10616
10617 CASE_FLT_FN (BUILT_IN_ISNAN):
10618 case BUILT_IN_ISNAND32:
10619 case BUILT_IN_ISNAND64:
10620 case BUILT_IN_ISNAND128:
10621 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10622
10623 case BUILT_IN_PRINTF:
10624 case BUILT_IN_PRINTF_UNLOCKED:
10625 case BUILT_IN_VPRINTF:
10626 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10627
10628 case BUILT_IN_FREE:
10629 if (integer_zerop (arg0))
10630 return build_empty_stmt (loc);
10631 break;
10632
10633 default:
10634 break;
10635 }
10636
10637 return NULL_TREE;
10638
10639 }
10640
10641 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10642 IGNORE is true if the result of the function call is ignored. This
10643 function returns NULL_TREE if no simplification was possible. */
10644
10645 static tree
10646 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10647 {
10648 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10649 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10650
10651 switch (fcode)
10652 {
10653 CASE_FLT_FN (BUILT_IN_JN):
10654 if (validate_arg (arg0, INTEGER_TYPE)
10655 && validate_arg (arg1, REAL_TYPE))
10656 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10657 break;
10658
10659 CASE_FLT_FN (BUILT_IN_YN):
10660 if (validate_arg (arg0, INTEGER_TYPE)
10661 && validate_arg (arg1, REAL_TYPE))
10662 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10663 &dconst0, false);
10664 break;
10665
10666 CASE_FLT_FN (BUILT_IN_DREM):
10667 CASE_FLT_FN (BUILT_IN_REMAINDER):
10668 if (validate_arg (arg0, REAL_TYPE)
10669 && validate_arg(arg1, REAL_TYPE))
10670 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10671 break;
10672
10673 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10674 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10675 if (validate_arg (arg0, REAL_TYPE)
10676 && validate_arg(arg1, POINTER_TYPE))
10677 return do_mpfr_lgamma_r (arg0, arg1, type);
10678 break;
10679
10680 CASE_FLT_FN (BUILT_IN_ATAN2):
10681 if (validate_arg (arg0, REAL_TYPE)
10682 && validate_arg(arg1, REAL_TYPE))
10683 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10684 break;
10685
10686 CASE_FLT_FN (BUILT_IN_FDIM):
10687 if (validate_arg (arg0, REAL_TYPE)
10688 && validate_arg(arg1, REAL_TYPE))
10689 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10690 break;
10691
10692 CASE_FLT_FN (BUILT_IN_HYPOT):
10693 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10694
10695 CASE_FLT_FN (BUILT_IN_CPOW):
10696 if (validate_arg (arg0, COMPLEX_TYPE)
10697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10698 && validate_arg (arg1, COMPLEX_TYPE)
10699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10700 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10701 break;
10702
10703 CASE_FLT_FN (BUILT_IN_LDEXP):
10704 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10705 CASE_FLT_FN (BUILT_IN_SCALBN):
10706 CASE_FLT_FN (BUILT_IN_SCALBLN):
10707 return fold_builtin_load_exponent (loc, arg0, arg1,
10708 type, /*ldexp=*/false);
10709
10710 CASE_FLT_FN (BUILT_IN_FREXP):
10711 return fold_builtin_frexp (loc, arg0, arg1, type);
10712
10713 CASE_FLT_FN (BUILT_IN_MODF):
10714 return fold_builtin_modf (loc, arg0, arg1, type);
10715
10716 case BUILT_IN_BZERO:
10717 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10718
10719 case BUILT_IN_FPUTS:
10720 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10721
10722 case BUILT_IN_FPUTS_UNLOCKED:
10723 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10724
10725 case BUILT_IN_STRSTR:
10726 return fold_builtin_strstr (loc, arg0, arg1, type);
10727
10728 case BUILT_IN_STRCAT:
10729 return fold_builtin_strcat (loc, arg0, arg1);
10730
10731 case BUILT_IN_STRSPN:
10732 return fold_builtin_strspn (loc, arg0, arg1);
10733
10734 case BUILT_IN_STRCSPN:
10735 return fold_builtin_strcspn (loc, arg0, arg1);
10736
10737 case BUILT_IN_STRCHR:
10738 case BUILT_IN_INDEX:
10739 return fold_builtin_strchr (loc, arg0, arg1, type);
10740
10741 case BUILT_IN_STRRCHR:
10742 case BUILT_IN_RINDEX:
10743 return fold_builtin_strrchr (loc, arg0, arg1, type);
10744
10745 case BUILT_IN_STRCPY:
10746 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10747
10748 case BUILT_IN_STPCPY:
10749 if (ignore)
10750 {
10751 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10752 if (!fn)
10753 break;
10754
10755 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10756 }
10757 else
10758 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10759 break;
10760
10761 case BUILT_IN_STRCMP:
10762 return fold_builtin_strcmp (loc, arg0, arg1);
10763
10764 case BUILT_IN_STRPBRK:
10765 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10766
10767 case BUILT_IN_EXPECT:
10768 return fold_builtin_expect (loc, arg0, arg1);
10769
10770 CASE_FLT_FN (BUILT_IN_POW):
10771 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10772
10773 CASE_FLT_FN (BUILT_IN_POWI):
10774 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10775
10776 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10777 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10778
10779 CASE_FLT_FN (BUILT_IN_FMIN):
10780 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10781
10782 CASE_FLT_FN (BUILT_IN_FMAX):
10783 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10784
10785 case BUILT_IN_ISGREATER:
10786 return fold_builtin_unordered_cmp (loc, fndecl,
10787 arg0, arg1, UNLE_EXPR, LE_EXPR);
10788 case BUILT_IN_ISGREATEREQUAL:
10789 return fold_builtin_unordered_cmp (loc, fndecl,
10790 arg0, arg1, UNLT_EXPR, LT_EXPR);
10791 case BUILT_IN_ISLESS:
10792 return fold_builtin_unordered_cmp (loc, fndecl,
10793 arg0, arg1, UNGE_EXPR, GE_EXPR);
10794 case BUILT_IN_ISLESSEQUAL:
10795 return fold_builtin_unordered_cmp (loc, fndecl,
10796 arg0, arg1, UNGT_EXPR, GT_EXPR);
10797 case BUILT_IN_ISLESSGREATER:
10798 return fold_builtin_unordered_cmp (loc, fndecl,
10799 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10800 case BUILT_IN_ISUNORDERED:
10801 return fold_builtin_unordered_cmp (loc, fndecl,
10802 arg0, arg1, UNORDERED_EXPR,
10803 NOP_EXPR);
10804
10805 /* We do the folding for va_start in the expander. */
10806 case BUILT_IN_VA_START:
10807 break;
10808
10809 case BUILT_IN_SPRINTF:
10810 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10811
10812 case BUILT_IN_OBJECT_SIZE:
10813 return fold_builtin_object_size (arg0, arg1);
10814
10815 case BUILT_IN_PRINTF:
10816 case BUILT_IN_PRINTF_UNLOCKED:
10817 case BUILT_IN_VPRINTF:
10818 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10819
10820 case BUILT_IN_PRINTF_CHK:
10821 case BUILT_IN_VPRINTF_CHK:
10822 if (!validate_arg (arg0, INTEGER_TYPE)
10823 || TREE_SIDE_EFFECTS (arg0))
10824 return NULL_TREE;
10825 else
10826 return fold_builtin_printf (loc, fndecl,
10827 arg1, NULL_TREE, ignore, fcode);
10828 break;
10829
10830 case BUILT_IN_FPRINTF:
10831 case BUILT_IN_FPRINTF_UNLOCKED:
10832 case BUILT_IN_VFPRINTF:
10833 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10834 ignore, fcode);
10835
10836 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10837 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10838
10839 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10840 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10841
10842 default:
10843 break;
10844 }
10845 return NULL_TREE;
10846 }
10847
10848 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10849 and ARG2. IGNORE is true if the result of the function call is ignored.
10850 This function returns NULL_TREE if no simplification was possible. */
10851
10852 static tree
10853 fold_builtin_3 (location_t loc, tree fndecl,
10854 tree arg0, tree arg1, tree arg2, bool ignore)
10855 {
10856 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10857 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10858 switch (fcode)
10859 {
10860
10861 CASE_FLT_FN (BUILT_IN_SINCOS):
10862 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10863
10864 CASE_FLT_FN (BUILT_IN_FMA):
10865 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10866 break;
10867
10868 CASE_FLT_FN (BUILT_IN_REMQUO):
10869 if (validate_arg (arg0, REAL_TYPE)
10870 && validate_arg(arg1, REAL_TYPE)
10871 && validate_arg(arg2, POINTER_TYPE))
10872 return do_mpfr_remquo (arg0, arg1, arg2);
10873 break;
10874
10875 case BUILT_IN_MEMSET:
10876 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10877
10878 case BUILT_IN_BCOPY:
10879 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10880 void_type_node, true, /*endp=*/3);
10881
10882 case BUILT_IN_MEMCPY:
10883 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10884 type, ignore, /*endp=*/0);
10885
10886 case BUILT_IN_MEMPCPY:
10887 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10888 type, ignore, /*endp=*/1);
10889
10890 case BUILT_IN_MEMMOVE:
10891 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10892 type, ignore, /*endp=*/3);
10893
10894 case BUILT_IN_STRNCAT:
10895 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10896
10897 case BUILT_IN_STRNCPY:
10898 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10899
10900 case BUILT_IN_STRNCMP:
10901 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10902
10903 case BUILT_IN_MEMCHR:
10904 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10905
10906 case BUILT_IN_BCMP:
10907 case BUILT_IN_MEMCMP:
10908 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10909
10910 case BUILT_IN_SPRINTF:
10911 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10912
10913 case BUILT_IN_SNPRINTF:
10914 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10915
10916 case BUILT_IN_STRCPY_CHK:
10917 case BUILT_IN_STPCPY_CHK:
10918 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10919 ignore, fcode);
10920
10921 case BUILT_IN_STRCAT_CHK:
10922 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10923
10924 case BUILT_IN_PRINTF_CHK:
10925 case BUILT_IN_VPRINTF_CHK:
10926 if (!validate_arg (arg0, INTEGER_TYPE)
10927 || TREE_SIDE_EFFECTS (arg0))
10928 return NULL_TREE;
10929 else
10930 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10931 break;
10932
10933 case BUILT_IN_FPRINTF:
10934 case BUILT_IN_FPRINTF_UNLOCKED:
10935 case BUILT_IN_VFPRINTF:
10936 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10937 ignore, fcode);
10938
10939 case BUILT_IN_FPRINTF_CHK:
10940 case BUILT_IN_VFPRINTF_CHK:
10941 if (!validate_arg (arg1, INTEGER_TYPE)
10942 || TREE_SIDE_EFFECTS (arg1))
10943 return NULL_TREE;
10944 else
10945 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10946 ignore, fcode);
10947
10948 default:
10949 break;
10950 }
10951 return NULL_TREE;
10952 }
10953
10954 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10955 ARG2, and ARG3. IGNORE is true if the result of the function call is
10956 ignored. This function returns NULL_TREE if no simplification was
10957 possible. */
10958
10959 static tree
10960 fold_builtin_4 (location_t loc, tree fndecl,
10961 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10962 {
10963 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10964
10965 switch (fcode)
10966 {
10967 case BUILT_IN_MEMCPY_CHK:
10968 case BUILT_IN_MEMPCPY_CHK:
10969 case BUILT_IN_MEMMOVE_CHK:
10970 case BUILT_IN_MEMSET_CHK:
10971 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10972 NULL_TREE, ignore,
10973 DECL_FUNCTION_CODE (fndecl));
10974
10975 case BUILT_IN_STRNCPY_CHK:
10976 case BUILT_IN_STPNCPY_CHK:
10977 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10978 ignore, fcode);
10979
10980 case BUILT_IN_STRNCAT_CHK:
10981 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10982
10983 case BUILT_IN_SNPRINTF:
10984 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10985
10986 case BUILT_IN_FPRINTF_CHK:
10987 case BUILT_IN_VFPRINTF_CHK:
10988 if (!validate_arg (arg1, INTEGER_TYPE)
10989 || TREE_SIDE_EFFECTS (arg1))
10990 return NULL_TREE;
10991 else
10992 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10993 ignore, fcode);
10994 break;
10995
10996 default:
10997 break;
10998 }
10999 return NULL_TREE;
11000 }
11001
11002 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11003 arguments, where NARGS <= 4. IGNORE is true if the result of the
11004 function call is ignored. This function returns NULL_TREE if no
11005 simplification was possible. Note that this only folds builtins with
11006 fixed argument patterns. Foldings that do varargs-to-varargs
11007 transformations, or that match calls with more than 4 arguments,
11008 need to be handled with fold_builtin_varargs instead. */
11009
11010 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11011
11012 static tree
11013 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11014 {
11015 tree ret = NULL_TREE;
11016
11017 switch (nargs)
11018 {
11019 case 0:
11020 ret = fold_builtin_0 (loc, fndecl, ignore);
11021 break;
11022 case 1:
11023 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11024 break;
11025 case 2:
11026 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11027 break;
11028 case 3:
11029 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11030 break;
11031 case 4:
11032 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11033 ignore);
11034 break;
11035 default:
11036 break;
11037 }
11038 if (ret)
11039 {
11040 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11041 SET_EXPR_LOCATION (ret, loc);
11042 TREE_NO_WARNING (ret) = 1;
11043 return ret;
11044 }
11045 return NULL_TREE;
11046 }
11047
11048 /* Builtins with folding operations that operate on "..." arguments
11049 need special handling; we need to store the arguments in a convenient
11050 data structure before attempting any folding. Fortunately there are
11051 only a few builtins that fall into this category. FNDECL is the
11052 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11053 result of the function call is ignored. */
11054
11055 static tree
11056 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11057 bool ignore ATTRIBUTE_UNUSED)
11058 {
11059 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11060 tree ret = NULL_TREE;
11061
11062 switch (fcode)
11063 {
11064 case BUILT_IN_SPRINTF_CHK:
11065 case BUILT_IN_VSPRINTF_CHK:
11066 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11067 break;
11068
11069 case BUILT_IN_SNPRINTF_CHK:
11070 case BUILT_IN_VSNPRINTF_CHK:
11071 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11072 break;
11073
11074 case BUILT_IN_FPCLASSIFY:
11075 ret = fold_builtin_fpclassify (loc, exp);
11076 break;
11077
11078 default:
11079 break;
11080 }
11081 if (ret)
11082 {
11083 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11084 SET_EXPR_LOCATION (ret, loc);
11085 TREE_NO_WARNING (ret) = 1;
11086 return ret;
11087 }
11088 return NULL_TREE;
11089 }
11090
11091 /* Return true if FNDECL shouldn't be folded right now.
11092 If a built-in function has an inline attribute always_inline
11093 wrapper, defer folding it after always_inline functions have
11094 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11095 might not be performed. */
11096
11097 bool
11098 avoid_folding_inline_builtin (tree fndecl)
11099 {
11100 return (DECL_DECLARED_INLINE_P (fndecl)
11101 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11102 && cfun
11103 && !cfun->always_inline_functions_inlined
11104 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11105 }
11106
11107 /* A wrapper function for builtin folding that prevents warnings for
11108 "statement without effect" and the like, caused by removing the
11109 call node earlier than the warning is generated. */
11110
11111 tree
11112 fold_call_expr (location_t loc, tree exp, bool ignore)
11113 {
11114 tree ret = NULL_TREE;
11115 tree fndecl = get_callee_fndecl (exp);
11116 if (fndecl
11117 && TREE_CODE (fndecl) == FUNCTION_DECL
11118 && DECL_BUILT_IN (fndecl)
11119 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11120 yet. Defer folding until we see all the arguments
11121 (after inlining). */
11122 && !CALL_EXPR_VA_ARG_PACK (exp))
11123 {
11124 int nargs = call_expr_nargs (exp);
11125
11126 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11127 instead last argument is __builtin_va_arg_pack (). Defer folding
11128 even in that case, until arguments are finalized. */
11129 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11130 {
11131 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11132 if (fndecl2
11133 && TREE_CODE (fndecl2) == FUNCTION_DECL
11134 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11135 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11136 return NULL_TREE;
11137 }
11138
11139 if (avoid_folding_inline_builtin (fndecl))
11140 return NULL_TREE;
11141
11142 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11143 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11144 CALL_EXPR_ARGP (exp), ignore);
11145 else
11146 {
11147 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11148 {
11149 tree *args = CALL_EXPR_ARGP (exp);
11150 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11151 }
11152 if (!ret)
11153 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11154 if (ret)
11155 return ret;
11156 }
11157 }
11158 return NULL_TREE;
11159 }
11160
11161 /* Conveniently construct a function call expression. FNDECL names the
11162 function to be called and N arguments are passed in the array
11163 ARGARRAY. */
11164
11165 tree
11166 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11167 {
11168 tree fntype = TREE_TYPE (fndecl);
11169 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11170
11171 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11172 }
11173
11174 /* Conveniently construct a function call expression. FNDECL names the
11175 function to be called and the arguments are passed in the vector
11176 VEC. */
11177
11178 tree
11179 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
11180 {
11181 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
11182 VEC_address (tree, vec));
11183 }
11184
11185
11186 /* Conveniently construct a function call expression. FNDECL names the
11187 function to be called, N is the number of arguments, and the "..."
11188 parameters are the argument expressions. */
11189
11190 tree
11191 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11192 {
11193 va_list ap;
11194 tree *argarray = XALLOCAVEC (tree, n);
11195 int i;
11196
11197 va_start (ap, n);
11198 for (i = 0; i < n; i++)
11199 argarray[i] = va_arg (ap, tree);
11200 va_end (ap);
11201 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11202 }
11203
11204 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11205 varargs macros aren't supported by all bootstrap compilers. */
11206
11207 tree
11208 build_call_expr (tree fndecl, int n, ...)
11209 {
11210 va_list ap;
11211 tree *argarray = XALLOCAVEC (tree, n);
11212 int i;
11213
11214 va_start (ap, n);
11215 for (i = 0; i < n; i++)
11216 argarray[i] = va_arg (ap, tree);
11217 va_end (ap);
11218 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11219 }
11220
11221 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11222 N arguments are passed in the array ARGARRAY. */
11223
11224 tree
11225 fold_builtin_call_array (location_t loc, tree type,
11226 tree fn,
11227 int n,
11228 tree *argarray)
11229 {
11230 tree ret = NULL_TREE;
11231 tree exp;
11232
11233 if (TREE_CODE (fn) == ADDR_EXPR)
11234 {
11235 tree fndecl = TREE_OPERAND (fn, 0);
11236 if (TREE_CODE (fndecl) == FUNCTION_DECL
11237 && DECL_BUILT_IN (fndecl))
11238 {
11239 /* If last argument is __builtin_va_arg_pack (), arguments to this
11240 function are not finalized yet. Defer folding until they are. */
11241 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11242 {
11243 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11244 if (fndecl2
11245 && TREE_CODE (fndecl2) == FUNCTION_DECL
11246 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11247 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11248 return build_call_array_loc (loc, type, fn, n, argarray);
11249 }
11250 if (avoid_folding_inline_builtin (fndecl))
11251 return build_call_array_loc (loc, type, fn, n, argarray);
11252 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11253 {
11254 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11255 if (ret)
11256 return ret;
11257
11258 return build_call_array_loc (loc, type, fn, n, argarray);
11259 }
11260 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11261 {
11262 /* First try the transformations that don't require consing up
11263 an exp. */
11264 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11265 if (ret)
11266 return ret;
11267 }
11268
11269 /* If we got this far, we need to build an exp. */
11270 exp = build_call_array_loc (loc, type, fn, n, argarray);
11271 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11272 return ret ? ret : exp;
11273 }
11274 }
11275
11276 return build_call_array_loc (loc, type, fn, n, argarray);
11277 }
11278
11279 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11280 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11281 of arguments in ARGS to be omitted. OLDNARGS is the number of
11282 elements in ARGS. */
11283
11284 static tree
11285 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11286 int skip, tree fndecl, int n, va_list newargs)
11287 {
11288 int nargs = oldnargs - skip + n;
11289 tree *buffer;
11290
11291 if (n > 0)
11292 {
11293 int i, j;
11294
11295 buffer = XALLOCAVEC (tree, nargs);
11296 for (i = 0; i < n; i++)
11297 buffer[i] = va_arg (newargs, tree);
11298 for (j = skip; j < oldnargs; j++, i++)
11299 buffer[i] = args[j];
11300 }
11301 else
11302 buffer = args + skip;
11303
11304 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11305 }
11306
11307 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11308 list ARGS along with N new arguments specified as the "..."
11309 parameters. SKIP is the number of arguments in ARGS to be omitted.
11310 OLDNARGS is the number of elements in ARGS. */
11311
11312 static tree
11313 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11314 int skip, tree fndecl, int n, ...)
11315 {
11316 va_list ap;
11317 tree t;
11318
11319 va_start (ap, n);
11320 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11321 va_end (ap);
11322
11323 return t;
11324 }
11325
11326 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11327 along with N new arguments specified as the "..." parameters. SKIP
11328 is the number of arguments in EXP to be omitted. This function is used
11329 to do varargs-to-varargs transformations. */
11330
11331 static tree
11332 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11333 {
11334 va_list ap;
11335 tree t;
11336
11337 va_start (ap, n);
11338 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11339 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11340 va_end (ap);
11341
11342 return t;
11343 }
11344
11345 /* Validate a single argument ARG against a tree code CODE representing
11346 a type. */
11347
11348 static bool
11349 validate_arg (const_tree arg, enum tree_code code)
11350 {
11351 if (!arg)
11352 return false;
11353 else if (code == POINTER_TYPE)
11354 return POINTER_TYPE_P (TREE_TYPE (arg));
11355 else if (code == INTEGER_TYPE)
11356 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11357 return code == TREE_CODE (TREE_TYPE (arg));
11358 }
11359
11360 /* This function validates the types of a function call argument list
11361 against a specified list of tree_codes. If the last specifier is a 0,
11362 that represents an ellipses, otherwise the last specifier must be a
11363 VOID_TYPE.
11364
11365 This is the GIMPLE version of validate_arglist. Eventually we want to
11366 completely convert builtins.c to work from GIMPLEs and the tree based
11367 validate_arglist will then be removed. */
11368
11369 bool
11370 validate_gimple_arglist (const_gimple call, ...)
11371 {
11372 enum tree_code code;
11373 bool res = 0;
11374 va_list ap;
11375 const_tree arg;
11376 size_t i;
11377
11378 va_start (ap, call);
11379 i = 0;
11380
11381 do
11382 {
11383 code = (enum tree_code) va_arg (ap, int);
11384 switch (code)
11385 {
11386 case 0:
11387 /* This signifies an ellipses, any further arguments are all ok. */
11388 res = true;
11389 goto end;
11390 case VOID_TYPE:
11391 /* This signifies an endlink, if no arguments remain, return
11392 true, otherwise return false. */
11393 res = (i == gimple_call_num_args (call));
11394 goto end;
11395 default:
11396 /* If no parameters remain or the parameter's code does not
11397 match the specified code, return false. Otherwise continue
11398 checking any remaining arguments. */
11399 arg = gimple_call_arg (call, i++);
11400 if (!validate_arg (arg, code))
11401 goto end;
11402 break;
11403 }
11404 }
11405 while (1);
11406
11407 /* We need gotos here since we can only have one VA_CLOSE in a
11408 function. */
11409 end: ;
11410 va_end (ap);
11411
11412 return res;
11413 }
11414
11415 /* This function validates the types of a function call argument list
11416 against a specified list of tree_codes. If the last specifier is a 0,
11417 that represents an ellipses, otherwise the last specifier must be a
11418 VOID_TYPE. */
11419
11420 bool
11421 validate_arglist (const_tree callexpr, ...)
11422 {
11423 enum tree_code code;
11424 bool res = 0;
11425 va_list ap;
11426 const_call_expr_arg_iterator iter;
11427 const_tree arg;
11428
11429 va_start (ap, callexpr);
11430 init_const_call_expr_arg_iterator (callexpr, &iter);
11431
11432 do
11433 {
11434 code = (enum tree_code) va_arg (ap, int);
11435 switch (code)
11436 {
11437 case 0:
11438 /* This signifies an ellipses, any further arguments are all ok. */
11439 res = true;
11440 goto end;
11441 case VOID_TYPE:
11442 /* This signifies an endlink, if no arguments remain, return
11443 true, otherwise return false. */
11444 res = !more_const_call_expr_args_p (&iter);
11445 goto end;
11446 default:
11447 /* If no parameters remain or the parameter's code does not
11448 match the specified code, return false. Otherwise continue
11449 checking any remaining arguments. */
11450 arg = next_const_call_expr_arg (&iter);
11451 if (!validate_arg (arg, code))
11452 goto end;
11453 break;
11454 }
11455 }
11456 while (1);
11457
11458 /* We need gotos here since we can only have one VA_CLOSE in a
11459 function. */
11460 end: ;
11461 va_end (ap);
11462
11463 return res;
11464 }
11465
11466 /* Default target-specific builtin expander that does nothing. */
11467
11468 rtx
11469 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11470 rtx target ATTRIBUTE_UNUSED,
11471 rtx subtarget ATTRIBUTE_UNUSED,
11472 enum machine_mode mode ATTRIBUTE_UNUSED,
11473 int ignore ATTRIBUTE_UNUSED)
11474 {
11475 return NULL_RTX;
11476 }
11477
11478 /* Returns true is EXP represents data that would potentially reside
11479 in a readonly section. */
11480
11481 static bool
11482 readonly_data_expr (tree exp)
11483 {
11484 STRIP_NOPS (exp);
11485
11486 if (TREE_CODE (exp) != ADDR_EXPR)
11487 return false;
11488
11489 exp = get_base_address (TREE_OPERAND (exp, 0));
11490 if (!exp)
11491 return false;
11492
11493 /* Make sure we call decl_readonly_section only for trees it
11494 can handle (since it returns true for everything it doesn't
11495 understand). */
11496 if (TREE_CODE (exp) == STRING_CST
11497 || TREE_CODE (exp) == CONSTRUCTOR
11498 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11499 return decl_readonly_section (exp, 0);
11500 else
11501 return false;
11502 }
11503
11504 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11505 to the call, and TYPE is its return type.
11506
11507 Return NULL_TREE if no simplification was possible, otherwise return the
11508 simplified form of the call as a tree.
11509
11510 The simplified form may be a constant or other expression which
11511 computes the same value, but in a more efficient manner (including
11512 calls to other builtin functions).
11513
11514 The call may contain arguments which need to be evaluated, but
11515 which are not useful to determine the result of the call. In
11516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11517 COMPOUND_EXPR will be an argument which must be evaluated.
11518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11519 COMPOUND_EXPR in the chain will contain the tree for the simplified
11520 form of the builtin function call. */
11521
11522 static tree
11523 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11524 {
11525 if (!validate_arg (s1, POINTER_TYPE)
11526 || !validate_arg (s2, POINTER_TYPE))
11527 return NULL_TREE;
11528 else
11529 {
11530 tree fn;
11531 const char *p1, *p2;
11532
11533 p2 = c_getstr (s2);
11534 if (p2 == NULL)
11535 return NULL_TREE;
11536
11537 p1 = c_getstr (s1);
11538 if (p1 != NULL)
11539 {
11540 const char *r = strstr (p1, p2);
11541 tree tem;
11542
11543 if (r == NULL)
11544 return build_int_cst (TREE_TYPE (s1), 0);
11545
11546 /* Return an offset into the constant string argument. */
11547 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11548 return fold_convert_loc (loc, type, tem);
11549 }
11550
11551 /* The argument is const char *, and the result is char *, so we need
11552 a type conversion here to avoid a warning. */
11553 if (p2[0] == '\0')
11554 return fold_convert_loc (loc, type, s1);
11555
11556 if (p2[1] != '\0')
11557 return NULL_TREE;
11558
11559 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11560 if (!fn)
11561 return NULL_TREE;
11562
11563 /* New argument list transforming strstr(s1, s2) to
11564 strchr(s1, s2[0]). */
11565 return build_call_expr_loc (loc, fn, 2, s1,
11566 build_int_cst (integer_type_node, p2[0]));
11567 }
11568 }
11569
11570 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11571 the call, and TYPE is its return type.
11572
11573 Return NULL_TREE if no simplification was possible, otherwise return the
11574 simplified form of the call as a tree.
11575
11576 The simplified form may be a constant or other expression which
11577 computes the same value, but in a more efficient manner (including
11578 calls to other builtin functions).
11579
11580 The call may contain arguments which need to be evaluated, but
11581 which are not useful to determine the result of the call. In
11582 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11583 COMPOUND_EXPR will be an argument which must be evaluated.
11584 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11585 COMPOUND_EXPR in the chain will contain the tree for the simplified
11586 form of the builtin function call. */
11587
11588 static tree
11589 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11590 {
11591 if (!validate_arg (s1, POINTER_TYPE)
11592 || !validate_arg (s2, INTEGER_TYPE))
11593 return NULL_TREE;
11594 else
11595 {
11596 const char *p1;
11597
11598 if (TREE_CODE (s2) != INTEGER_CST)
11599 return NULL_TREE;
11600
11601 p1 = c_getstr (s1);
11602 if (p1 != NULL)
11603 {
11604 char c;
11605 const char *r;
11606 tree tem;
11607
11608 if (target_char_cast (s2, &c))
11609 return NULL_TREE;
11610
11611 r = strchr (p1, c);
11612
11613 if (r == NULL)
11614 return build_int_cst (TREE_TYPE (s1), 0);
11615
11616 /* Return an offset into the constant string argument. */
11617 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11618 return fold_convert_loc (loc, type, tem);
11619 }
11620 return NULL_TREE;
11621 }
11622 }
11623
11624 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11625 the call, and TYPE is its return type.
11626
11627 Return NULL_TREE if no simplification was possible, otherwise return the
11628 simplified form of the call as a tree.
11629
11630 The simplified form may be a constant or other expression which
11631 computes the same value, but in a more efficient manner (including
11632 calls to other builtin functions).
11633
11634 The call may contain arguments which need to be evaluated, but
11635 which are not useful to determine the result of the call. In
11636 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11637 COMPOUND_EXPR will be an argument which must be evaluated.
11638 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11639 COMPOUND_EXPR in the chain will contain the tree for the simplified
11640 form of the builtin function call. */
11641
11642 static tree
11643 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11644 {
11645 if (!validate_arg (s1, POINTER_TYPE)
11646 || !validate_arg (s2, INTEGER_TYPE))
11647 return NULL_TREE;
11648 else
11649 {
11650 tree fn;
11651 const char *p1;
11652
11653 if (TREE_CODE (s2) != INTEGER_CST)
11654 return NULL_TREE;
11655
11656 p1 = c_getstr (s1);
11657 if (p1 != NULL)
11658 {
11659 char c;
11660 const char *r;
11661 tree tem;
11662
11663 if (target_char_cast (s2, &c))
11664 return NULL_TREE;
11665
11666 r = strrchr (p1, c);
11667
11668 if (r == NULL)
11669 return build_int_cst (TREE_TYPE (s1), 0);
11670
11671 /* Return an offset into the constant string argument. */
11672 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11673 return fold_convert_loc (loc, type, tem);
11674 }
11675
11676 if (! integer_zerop (s2))
11677 return NULL_TREE;
11678
11679 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11680 if (!fn)
11681 return NULL_TREE;
11682
11683 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11684 return build_call_expr_loc (loc, fn, 2, s1, s2);
11685 }
11686 }
11687
11688 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11689 to the call, and TYPE is its return type.
11690
11691 Return NULL_TREE if no simplification was possible, otherwise return the
11692 simplified form of the call as a tree.
11693
11694 The simplified form may be a constant or other expression which
11695 computes the same value, but in a more efficient manner (including
11696 calls to other builtin functions).
11697
11698 The call may contain arguments which need to be evaluated, but
11699 which are not useful to determine the result of the call. In
11700 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11701 COMPOUND_EXPR will be an argument which must be evaluated.
11702 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11703 COMPOUND_EXPR in the chain will contain the tree for the simplified
11704 form of the builtin function call. */
11705
11706 static tree
11707 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11708 {
11709 if (!validate_arg (s1, POINTER_TYPE)
11710 || !validate_arg (s2, POINTER_TYPE))
11711 return NULL_TREE;
11712 else
11713 {
11714 tree fn;
11715 const char *p1, *p2;
11716
11717 p2 = c_getstr (s2);
11718 if (p2 == NULL)
11719 return NULL_TREE;
11720
11721 p1 = c_getstr (s1);
11722 if (p1 != NULL)
11723 {
11724 const char *r = strpbrk (p1, p2);
11725 tree tem;
11726
11727 if (r == NULL)
11728 return build_int_cst (TREE_TYPE (s1), 0);
11729
11730 /* Return an offset into the constant string argument. */
11731 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11732 return fold_convert_loc (loc, type, tem);
11733 }
11734
11735 if (p2[0] == '\0')
11736 /* strpbrk(x, "") == NULL.
11737 Evaluate and ignore s1 in case it had side-effects. */
11738 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11739
11740 if (p2[1] != '\0')
11741 return NULL_TREE; /* Really call strpbrk. */
11742
11743 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11744 if (!fn)
11745 return NULL_TREE;
11746
11747 /* New argument list transforming strpbrk(s1, s2) to
11748 strchr(s1, s2[0]). */
11749 return build_call_expr_loc (loc, fn, 2, s1,
11750 build_int_cst (integer_type_node, p2[0]));
11751 }
11752 }
11753
11754 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11755 to the call.
11756
11757 Return NULL_TREE if no simplification was possible, otherwise return the
11758 simplified form of the call as a tree.
11759
11760 The simplified form may be a constant or other expression which
11761 computes the same value, but in a more efficient manner (including
11762 calls to other builtin functions).
11763
11764 The call may contain arguments which need to be evaluated, but
11765 which are not useful to determine the result of the call. In
11766 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11767 COMPOUND_EXPR will be an argument which must be evaluated.
11768 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11769 COMPOUND_EXPR in the chain will contain the tree for the simplified
11770 form of the builtin function call. */
11771
11772 static tree
11773 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11774 {
11775 if (!validate_arg (dst, POINTER_TYPE)
11776 || !validate_arg (src, POINTER_TYPE))
11777 return NULL_TREE;
11778 else
11779 {
11780 const char *p = c_getstr (src);
11781
11782 /* If the string length is zero, return the dst parameter. */
11783 if (p && *p == '\0')
11784 return dst;
11785
11786 if (optimize_insn_for_speed_p ())
11787 {
11788 /* See if we can store by pieces into (dst + strlen(dst)). */
11789 tree newdst, call;
11790 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11791 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11792
11793 if (!strlen_fn || !strcpy_fn)
11794 return NULL_TREE;
11795
11796 /* If we don't have a movstr we don't want to emit an strcpy
11797 call. We have to do that if the length of the source string
11798 isn't computable (in that case we can use memcpy probably
11799 later expanding to a sequence of mov instructions). If we
11800 have movstr instructions we can emit strcpy calls. */
11801 if (!HAVE_movstr)
11802 {
11803 tree len = c_strlen (src, 1);
11804 if (! len || TREE_SIDE_EFFECTS (len))
11805 return NULL_TREE;
11806 }
11807
11808 /* Stabilize the argument list. */
11809 dst = builtin_save_expr (dst);
11810
11811 /* Create strlen (dst). */
11812 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11813 /* Create (dst p+ strlen (dst)). */
11814
11815 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11816 newdst = builtin_save_expr (newdst);
11817
11818 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11819 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11820 }
11821 return NULL_TREE;
11822 }
11823 }
11824
11825 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11826 arguments to the call.
11827
11828 Return NULL_TREE if no simplification was possible, otherwise return the
11829 simplified form of the call as a tree.
11830
11831 The simplified form may be a constant or other expression which
11832 computes the same value, but in a more efficient manner (including
11833 calls to other builtin functions).
11834
11835 The call may contain arguments which need to be evaluated, but
11836 which are not useful to determine the result of the call. In
11837 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11838 COMPOUND_EXPR will be an argument which must be evaluated.
11839 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11840 COMPOUND_EXPR in the chain will contain the tree for the simplified
11841 form of the builtin function call. */
11842
11843 static tree
11844 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11845 {
11846 if (!validate_arg (dst, POINTER_TYPE)
11847 || !validate_arg (src, POINTER_TYPE)
11848 || !validate_arg (len, INTEGER_TYPE))
11849 return NULL_TREE;
11850 else
11851 {
11852 const char *p = c_getstr (src);
11853
11854 /* If the requested length is zero, or the src parameter string
11855 length is zero, return the dst parameter. */
11856 if (integer_zerop (len) || (p && *p == '\0'))
11857 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11858
11859 /* If the requested len is greater than or equal to the string
11860 length, call strcat. */
11861 if (TREE_CODE (len) == INTEGER_CST && p
11862 && compare_tree_int (len, strlen (p)) >= 0)
11863 {
11864 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11865
11866 /* If the replacement _DECL isn't initialized, don't do the
11867 transformation. */
11868 if (!fn)
11869 return NULL_TREE;
11870
11871 return build_call_expr_loc (loc, fn, 2, dst, src);
11872 }
11873 return NULL_TREE;
11874 }
11875 }
11876
11877 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11878 to the call.
11879
11880 Return NULL_TREE if no simplification was possible, otherwise return the
11881 simplified form of the call as a tree.
11882
11883 The simplified form may be a constant or other expression which
11884 computes the same value, but in a more efficient manner (including
11885 calls to other builtin functions).
11886
11887 The call may contain arguments which need to be evaluated, but
11888 which are not useful to determine the result of the call. In
11889 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11890 COMPOUND_EXPR will be an argument which must be evaluated.
11891 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11892 COMPOUND_EXPR in the chain will contain the tree for the simplified
11893 form of the builtin function call. */
11894
11895 static tree
11896 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11897 {
11898 if (!validate_arg (s1, POINTER_TYPE)
11899 || !validate_arg (s2, POINTER_TYPE))
11900 return NULL_TREE;
11901 else
11902 {
11903 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11904
11905 /* If both arguments are constants, evaluate at compile-time. */
11906 if (p1 && p2)
11907 {
11908 const size_t r = strspn (p1, p2);
11909 return size_int (r);
11910 }
11911
11912 /* If either argument is "", return NULL_TREE. */
11913 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11914 /* Evaluate and ignore both arguments in case either one has
11915 side-effects. */
11916 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11917 s1, s2);
11918 return NULL_TREE;
11919 }
11920 }
11921
11922 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11923 to the call.
11924
11925 Return NULL_TREE if no simplification was possible, otherwise return the
11926 simplified form of the call as a tree.
11927
11928 The simplified form may be a constant or other expression which
11929 computes the same value, but in a more efficient manner (including
11930 calls to other builtin functions).
11931
11932 The call may contain arguments which need to be evaluated, but
11933 which are not useful to determine the result of the call. In
11934 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11935 COMPOUND_EXPR will be an argument which must be evaluated.
11936 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11937 COMPOUND_EXPR in the chain will contain the tree for the simplified
11938 form of the builtin function call. */
11939
11940 static tree
11941 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11942 {
11943 if (!validate_arg (s1, POINTER_TYPE)
11944 || !validate_arg (s2, POINTER_TYPE))
11945 return NULL_TREE;
11946 else
11947 {
11948 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11949
11950 /* If both arguments are constants, evaluate at compile-time. */
11951 if (p1 && p2)
11952 {
11953 const size_t r = strcspn (p1, p2);
11954 return size_int (r);
11955 }
11956
11957 /* If the first argument is "", return NULL_TREE. */
11958 if (p1 && *p1 == '\0')
11959 {
11960 /* Evaluate and ignore argument s2 in case it has
11961 side-effects. */
11962 return omit_one_operand_loc (loc, size_type_node,
11963 size_zero_node, s2);
11964 }
11965
11966 /* If the second argument is "", return __builtin_strlen(s1). */
11967 if (p2 && *p2 == '\0')
11968 {
11969 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11970
11971 /* If the replacement _DECL isn't initialized, don't do the
11972 transformation. */
11973 if (!fn)
11974 return NULL_TREE;
11975
11976 return build_call_expr_loc (loc, fn, 1, s1);
11977 }
11978 return NULL_TREE;
11979 }
11980 }
11981
11982 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11983 to the call. IGNORE is true if the value returned
11984 by the builtin will be ignored. UNLOCKED is true is true if this
11985 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11986 the known length of the string. Return NULL_TREE if no simplification
11987 was possible. */
11988
11989 tree
11990 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11991 bool ignore, bool unlocked, tree len)
11992 {
11993 /* If we're using an unlocked function, assume the other unlocked
11994 functions exist explicitly. */
11995 tree const fn_fputc = (unlocked
11996 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11997 : builtin_decl_implicit (BUILT_IN_FPUTC));
11998 tree const fn_fwrite = (unlocked
11999 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12000 : builtin_decl_implicit (BUILT_IN_FWRITE));
12001
12002 /* If the return value is used, don't do the transformation. */
12003 if (!ignore)
12004 return NULL_TREE;
12005
12006 /* Verify the arguments in the original call. */
12007 if (!validate_arg (arg0, POINTER_TYPE)
12008 || !validate_arg (arg1, POINTER_TYPE))
12009 return NULL_TREE;
12010
12011 if (! len)
12012 len = c_strlen (arg0, 0);
12013
12014 /* Get the length of the string passed to fputs. If the length
12015 can't be determined, punt. */
12016 if (!len
12017 || TREE_CODE (len) != INTEGER_CST)
12018 return NULL_TREE;
12019
12020 switch (compare_tree_int (len, 1))
12021 {
12022 case -1: /* length is 0, delete the call entirely . */
12023 return omit_one_operand_loc (loc, integer_type_node,
12024 integer_zero_node, arg1);;
12025
12026 case 0: /* length is 1, call fputc. */
12027 {
12028 const char *p = c_getstr (arg0);
12029
12030 if (p != NULL)
12031 {
12032 if (fn_fputc)
12033 return build_call_expr_loc (loc, fn_fputc, 2,
12034 build_int_cst
12035 (integer_type_node, p[0]), arg1);
12036 else
12037 return NULL_TREE;
12038 }
12039 }
12040 /* FALLTHROUGH */
12041 case 1: /* length is greater than 1, call fwrite. */
12042 {
12043 /* If optimizing for size keep fputs. */
12044 if (optimize_function_for_size_p (cfun))
12045 return NULL_TREE;
12046 /* New argument list transforming fputs(string, stream) to
12047 fwrite(string, 1, len, stream). */
12048 if (fn_fwrite)
12049 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12050 size_one_node, len, arg1);
12051 else
12052 return NULL_TREE;
12053 }
12054 default:
12055 gcc_unreachable ();
12056 }
12057 return NULL_TREE;
12058 }
12059
12060 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12061 produced. False otherwise. This is done so that we don't output the error
12062 or warning twice or three times. */
12063
12064 bool
12065 fold_builtin_next_arg (tree exp, bool va_start_p)
12066 {
12067 tree fntype = TREE_TYPE (current_function_decl);
12068 int nargs = call_expr_nargs (exp);
12069 tree arg;
12070 /* There is good chance the current input_location points inside the
12071 definition of the va_start macro (perhaps on the token for
12072 builtin) in a system header, so warnings will not be emitted.
12073 Use the location in real source code. */
12074 source_location current_location =
12075 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12076 NULL);
12077
12078 if (!stdarg_p (fntype))
12079 {
12080 error ("%<va_start%> used in function with fixed args");
12081 return true;
12082 }
12083
12084 if (va_start_p)
12085 {
12086 if (va_start_p && (nargs != 2))
12087 {
12088 error ("wrong number of arguments to function %<va_start%>");
12089 return true;
12090 }
12091 arg = CALL_EXPR_ARG (exp, 1);
12092 }
12093 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12094 when we checked the arguments and if needed issued a warning. */
12095 else
12096 {
12097 if (nargs == 0)
12098 {
12099 /* Evidently an out of date version of <stdarg.h>; can't validate
12100 va_start's second argument, but can still work as intended. */
12101 warning_at (current_location,
12102 OPT_Wvarargs,
12103 "%<__builtin_next_arg%> called without an argument");
12104 return true;
12105 }
12106 else if (nargs > 1)
12107 {
12108 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12109 return true;
12110 }
12111 arg = CALL_EXPR_ARG (exp, 0);
12112 }
12113
12114 if (TREE_CODE (arg) == SSA_NAME)
12115 arg = SSA_NAME_VAR (arg);
12116
12117 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12118 or __builtin_next_arg (0) the first time we see it, after checking
12119 the arguments and if needed issuing a warning. */
12120 if (!integer_zerop (arg))
12121 {
12122 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12123
12124 /* Strip off all nops for the sake of the comparison. This
12125 is not quite the same as STRIP_NOPS. It does more.
12126 We must also strip off INDIRECT_EXPR for C++ reference
12127 parameters. */
12128 while (CONVERT_EXPR_P (arg)
12129 || TREE_CODE (arg) == INDIRECT_REF)
12130 arg = TREE_OPERAND (arg, 0);
12131 if (arg != last_parm)
12132 {
12133 /* FIXME: Sometimes with the tree optimizers we can get the
12134 not the last argument even though the user used the last
12135 argument. We just warn and set the arg to be the last
12136 argument so that we will get wrong-code because of
12137 it. */
12138 warning_at (current_location,
12139 OPT_Wvarargs,
12140 "second parameter of %<va_start%> not last named argument");
12141 }
12142
12143 /* Undefined by C99 7.15.1.4p4 (va_start):
12144 "If the parameter parmN is declared with the register storage
12145 class, with a function or array type, or with a type that is
12146 not compatible with the type that results after application of
12147 the default argument promotions, the behavior is undefined."
12148 */
12149 else if (DECL_REGISTER (arg))
12150 {
12151 warning_at (current_location,
12152 OPT_Wvarargs,
12153 "undefined behaviour when second parameter of "
12154 "%<va_start%> is declared with %<register%> storage");
12155 }
12156
12157 /* We want to verify the second parameter just once before the tree
12158 optimizers are run and then avoid keeping it in the tree,
12159 as otherwise we could warn even for correct code like:
12160 void foo (int i, ...)
12161 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12162 if (va_start_p)
12163 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12164 else
12165 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12166 }
12167 return false;
12168 }
12169
12170
12171 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12172 ORIG may be null if this is a 2-argument call. We don't attempt to
12173 simplify calls with more than 3 arguments.
12174
12175 Return NULL_TREE if no simplification was possible, otherwise return the
12176 simplified form of the call as a tree. If IGNORED is true, it means that
12177 the caller does not use the returned value of the function. */
12178
12179 static tree
12180 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12181 tree orig, int ignored)
12182 {
12183 tree call, retval;
12184 const char *fmt_str = NULL;
12185
12186 /* Verify the required arguments in the original call. We deal with two
12187 types of sprintf() calls: 'sprintf (str, fmt)' and
12188 'sprintf (dest, "%s", orig)'. */
12189 if (!validate_arg (dest, POINTER_TYPE)
12190 || !validate_arg (fmt, POINTER_TYPE))
12191 return NULL_TREE;
12192 if (orig && !validate_arg (orig, POINTER_TYPE))
12193 return NULL_TREE;
12194
12195 /* Check whether the format is a literal string constant. */
12196 fmt_str = c_getstr (fmt);
12197 if (fmt_str == NULL)
12198 return NULL_TREE;
12199
12200 call = NULL_TREE;
12201 retval = NULL_TREE;
12202
12203 if (!init_target_chars ())
12204 return NULL_TREE;
12205
12206 /* If the format doesn't contain % args or %%, use strcpy. */
12207 if (strchr (fmt_str, target_percent) == NULL)
12208 {
12209 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12210
12211 if (!fn)
12212 return NULL_TREE;
12213
12214 /* Don't optimize sprintf (buf, "abc", ptr++). */
12215 if (orig)
12216 return NULL_TREE;
12217
12218 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12219 'format' is known to contain no % formats. */
12220 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12221 if (!ignored)
12222 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12223 }
12224
12225 /* If the format is "%s", use strcpy if the result isn't used. */
12226 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12227 {
12228 tree fn;
12229 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12230
12231 if (!fn)
12232 return NULL_TREE;
12233
12234 /* Don't crash on sprintf (str1, "%s"). */
12235 if (!orig)
12236 return NULL_TREE;
12237
12238 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12239 if (!ignored)
12240 {
12241 retval = c_strlen (orig, 1);
12242 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12243 return NULL_TREE;
12244 }
12245 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12246 }
12247
12248 if (call && retval)
12249 {
12250 retval = fold_convert_loc
12251 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12252 retval);
12253 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12254 }
12255 else
12256 return call;
12257 }
12258
12259 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12260 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12261 attempt to simplify calls with more than 4 arguments.
12262
12263 Return NULL_TREE if no simplification was possible, otherwise return the
12264 simplified form of the call as a tree. If IGNORED is true, it means that
12265 the caller does not use the returned value of the function. */
12266
12267 static tree
12268 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12269 tree orig, int ignored)
12270 {
12271 tree call, retval;
12272 const char *fmt_str = NULL;
12273 unsigned HOST_WIDE_INT destlen;
12274
12275 /* Verify the required arguments in the original call. We deal with two
12276 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12277 'snprintf (dest, cst, "%s", orig)'. */
12278 if (!validate_arg (dest, POINTER_TYPE)
12279 || !validate_arg (destsize, INTEGER_TYPE)
12280 || !validate_arg (fmt, POINTER_TYPE))
12281 return NULL_TREE;
12282 if (orig && !validate_arg (orig, POINTER_TYPE))
12283 return NULL_TREE;
12284
12285 if (!host_integerp (destsize, 1))
12286 return NULL_TREE;
12287
12288 /* Check whether the format is a literal string constant. */
12289 fmt_str = c_getstr (fmt);
12290 if (fmt_str == NULL)
12291 return NULL_TREE;
12292
12293 call = NULL_TREE;
12294 retval = NULL_TREE;
12295
12296 if (!init_target_chars ())
12297 return NULL_TREE;
12298
12299 destlen = tree_low_cst (destsize, 1);
12300
12301 /* If the format doesn't contain % args or %%, use strcpy. */
12302 if (strchr (fmt_str, target_percent) == NULL)
12303 {
12304 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12305 size_t len = strlen (fmt_str);
12306
12307 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12308 if (orig)
12309 return NULL_TREE;
12310
12311 /* We could expand this as
12312 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12313 or to
12314 memcpy (str, fmt_with_nul_at_cstm1, cst);
12315 but in the former case that might increase code size
12316 and in the latter case grow .rodata section too much.
12317 So punt for now. */
12318 if (len >= destlen)
12319 return NULL_TREE;
12320
12321 if (!fn)
12322 return NULL_TREE;
12323
12324 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12325 'format' is known to contain no % formats and
12326 strlen (fmt) < cst. */
12327 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12328
12329 if (!ignored)
12330 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12331 }
12332
12333 /* If the format is "%s", use strcpy if the result isn't used. */
12334 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12335 {
12336 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12337 unsigned HOST_WIDE_INT origlen;
12338
12339 /* Don't crash on snprintf (str1, cst, "%s"). */
12340 if (!orig)
12341 return NULL_TREE;
12342
12343 retval = c_strlen (orig, 1);
12344 if (!retval || !host_integerp (retval, 1))
12345 return NULL_TREE;
12346
12347 origlen = tree_low_cst (retval, 1);
12348 /* We could expand this as
12349 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12350 or to
12351 memcpy (str1, str2_with_nul_at_cstm1, cst);
12352 but in the former case that might increase code size
12353 and in the latter case grow .rodata section too much.
12354 So punt for now. */
12355 if (origlen >= destlen)
12356 return NULL_TREE;
12357
12358 /* Convert snprintf (str1, cst, "%s", str2) into
12359 strcpy (str1, str2) if strlen (str2) < cst. */
12360 if (!fn)
12361 return NULL_TREE;
12362
12363 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12364
12365 if (ignored)
12366 retval = NULL_TREE;
12367 }
12368
12369 if (call && retval)
12370 {
12371 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12372 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12373 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12374 }
12375 else
12376 return call;
12377 }
12378
12379 /* Expand a call EXP to __builtin_object_size. */
12380
12381 rtx
12382 expand_builtin_object_size (tree exp)
12383 {
12384 tree ost;
12385 int object_size_type;
12386 tree fndecl = get_callee_fndecl (exp);
12387
12388 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12389 {
12390 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12391 exp, fndecl);
12392 expand_builtin_trap ();
12393 return const0_rtx;
12394 }
12395
12396 ost = CALL_EXPR_ARG (exp, 1);
12397 STRIP_NOPS (ost);
12398
12399 if (TREE_CODE (ost) != INTEGER_CST
12400 || tree_int_cst_sgn (ost) < 0
12401 || compare_tree_int (ost, 3) > 0)
12402 {
12403 error ("%Klast argument of %D is not integer constant between 0 and 3",
12404 exp, fndecl);
12405 expand_builtin_trap ();
12406 return const0_rtx;
12407 }
12408
12409 object_size_type = tree_low_cst (ost, 0);
12410
12411 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12412 }
12413
12414 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12415 FCODE is the BUILT_IN_* to use.
12416 Return NULL_RTX if we failed; the caller should emit a normal call,
12417 otherwise try to get the result in TARGET, if convenient (and in
12418 mode MODE if that's convenient). */
12419
12420 static rtx
12421 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12422 enum built_in_function fcode)
12423 {
12424 tree dest, src, len, size;
12425
12426 if (!validate_arglist (exp,
12427 POINTER_TYPE,
12428 fcode == BUILT_IN_MEMSET_CHK
12429 ? INTEGER_TYPE : POINTER_TYPE,
12430 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12431 return NULL_RTX;
12432
12433 dest = CALL_EXPR_ARG (exp, 0);
12434 src = CALL_EXPR_ARG (exp, 1);
12435 len = CALL_EXPR_ARG (exp, 2);
12436 size = CALL_EXPR_ARG (exp, 3);
12437
12438 if (! host_integerp (size, 1))
12439 return NULL_RTX;
12440
12441 if (host_integerp (len, 1) || integer_all_onesp (size))
12442 {
12443 tree fn;
12444
12445 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12446 {
12447 warning_at (tree_nonartificial_location (exp),
12448 0, "%Kcall to %D will always overflow destination buffer",
12449 exp, get_callee_fndecl (exp));
12450 return NULL_RTX;
12451 }
12452
12453 fn = NULL_TREE;
12454 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12455 mem{cpy,pcpy,move,set} is available. */
12456 switch (fcode)
12457 {
12458 case BUILT_IN_MEMCPY_CHK:
12459 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12460 break;
12461 case BUILT_IN_MEMPCPY_CHK:
12462 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12463 break;
12464 case BUILT_IN_MEMMOVE_CHK:
12465 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12466 break;
12467 case BUILT_IN_MEMSET_CHK:
12468 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12469 break;
12470 default:
12471 break;
12472 }
12473
12474 if (! fn)
12475 return NULL_RTX;
12476
12477 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12478 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12479 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12480 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12481 }
12482 else if (fcode == BUILT_IN_MEMSET_CHK)
12483 return NULL_RTX;
12484 else
12485 {
12486 unsigned int dest_align = get_pointer_alignment (dest);
12487
12488 /* If DEST is not a pointer type, call the normal function. */
12489 if (dest_align == 0)
12490 return NULL_RTX;
12491
12492 /* If SRC and DEST are the same (and not volatile), do nothing. */
12493 if (operand_equal_p (src, dest, 0))
12494 {
12495 tree expr;
12496
12497 if (fcode != BUILT_IN_MEMPCPY_CHK)
12498 {
12499 /* Evaluate and ignore LEN in case it has side-effects. */
12500 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12501 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12502 }
12503
12504 expr = fold_build_pointer_plus (dest, len);
12505 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12506 }
12507
12508 /* __memmove_chk special case. */
12509 if (fcode == BUILT_IN_MEMMOVE_CHK)
12510 {
12511 unsigned int src_align = get_pointer_alignment (src);
12512
12513 if (src_align == 0)
12514 return NULL_RTX;
12515
12516 /* If src is categorized for a readonly section we can use
12517 normal __memcpy_chk. */
12518 if (readonly_data_expr (src))
12519 {
12520 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12521 if (!fn)
12522 return NULL_RTX;
12523 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12524 dest, src, len, size);
12525 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12526 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12527 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12528 }
12529 }
12530 return NULL_RTX;
12531 }
12532 }
12533
12534 /* Emit warning if a buffer overflow is detected at compile time. */
12535
12536 static void
12537 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12538 {
12539 int is_strlen = 0;
12540 tree len, size;
12541 location_t loc = tree_nonartificial_location (exp);
12542
12543 switch (fcode)
12544 {
12545 case BUILT_IN_STRCPY_CHK:
12546 case BUILT_IN_STPCPY_CHK:
12547 /* For __strcat_chk the warning will be emitted only if overflowing
12548 by at least strlen (dest) + 1 bytes. */
12549 case BUILT_IN_STRCAT_CHK:
12550 len = CALL_EXPR_ARG (exp, 1);
12551 size = CALL_EXPR_ARG (exp, 2);
12552 is_strlen = 1;
12553 break;
12554 case BUILT_IN_STRNCAT_CHK:
12555 case BUILT_IN_STRNCPY_CHK:
12556 case BUILT_IN_STPNCPY_CHK:
12557 len = CALL_EXPR_ARG (exp, 2);
12558 size = CALL_EXPR_ARG (exp, 3);
12559 break;
12560 case BUILT_IN_SNPRINTF_CHK:
12561 case BUILT_IN_VSNPRINTF_CHK:
12562 len = CALL_EXPR_ARG (exp, 1);
12563 size = CALL_EXPR_ARG (exp, 3);
12564 break;
12565 default:
12566 gcc_unreachable ();
12567 }
12568
12569 if (!len || !size)
12570 return;
12571
12572 if (! host_integerp (size, 1) || integer_all_onesp (size))
12573 return;
12574
12575 if (is_strlen)
12576 {
12577 len = c_strlen (len, 1);
12578 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12579 return;
12580 }
12581 else if (fcode == BUILT_IN_STRNCAT_CHK)
12582 {
12583 tree src = CALL_EXPR_ARG (exp, 1);
12584 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12585 return;
12586 src = c_strlen (src, 1);
12587 if (! src || ! host_integerp (src, 1))
12588 {
12589 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12590 exp, get_callee_fndecl (exp));
12591 return;
12592 }
12593 else if (tree_int_cst_lt (src, size))
12594 return;
12595 }
12596 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12597 return;
12598
12599 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12600 exp, get_callee_fndecl (exp));
12601 }
12602
12603 /* Emit warning if a buffer overflow is detected at compile time
12604 in __sprintf_chk/__vsprintf_chk calls. */
12605
12606 static void
12607 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12608 {
12609 tree size, len, fmt;
12610 const char *fmt_str;
12611 int nargs = call_expr_nargs (exp);
12612
12613 /* Verify the required arguments in the original call. */
12614
12615 if (nargs < 4)
12616 return;
12617 size = CALL_EXPR_ARG (exp, 2);
12618 fmt = CALL_EXPR_ARG (exp, 3);
12619
12620 if (! host_integerp (size, 1) || integer_all_onesp (size))
12621 return;
12622
12623 /* Check whether the format is a literal string constant. */
12624 fmt_str = c_getstr (fmt);
12625 if (fmt_str == NULL)
12626 return;
12627
12628 if (!init_target_chars ())
12629 return;
12630
12631 /* If the format doesn't contain % args or %%, we know its size. */
12632 if (strchr (fmt_str, target_percent) == 0)
12633 len = build_int_cstu (size_type_node, strlen (fmt_str));
12634 /* If the format is "%s" and first ... argument is a string literal,
12635 we know it too. */
12636 else if (fcode == BUILT_IN_SPRINTF_CHK
12637 && strcmp (fmt_str, target_percent_s) == 0)
12638 {
12639 tree arg;
12640
12641 if (nargs < 5)
12642 return;
12643 arg = CALL_EXPR_ARG (exp, 4);
12644 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12645 return;
12646
12647 len = c_strlen (arg, 1);
12648 if (!len || ! host_integerp (len, 1))
12649 return;
12650 }
12651 else
12652 return;
12653
12654 if (! tree_int_cst_lt (len, size))
12655 warning_at (tree_nonartificial_location (exp),
12656 0, "%Kcall to %D will always overflow destination buffer",
12657 exp, get_callee_fndecl (exp));
12658 }
12659
12660 /* Emit warning if a free is called with address of a variable. */
12661
12662 static void
12663 maybe_emit_free_warning (tree exp)
12664 {
12665 tree arg = CALL_EXPR_ARG (exp, 0);
12666
12667 STRIP_NOPS (arg);
12668 if (TREE_CODE (arg) != ADDR_EXPR)
12669 return;
12670
12671 arg = get_base_address (TREE_OPERAND (arg, 0));
12672 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12673 return;
12674
12675 if (SSA_VAR_P (arg))
12676 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12677 "%Kattempt to free a non-heap object %qD", exp, arg);
12678 else
12679 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12680 "%Kattempt to free a non-heap object", exp);
12681 }
12682
12683 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12684 if possible. */
12685
12686 tree
12687 fold_builtin_object_size (tree ptr, tree ost)
12688 {
12689 unsigned HOST_WIDE_INT bytes;
12690 int object_size_type;
12691
12692 if (!validate_arg (ptr, POINTER_TYPE)
12693 || !validate_arg (ost, INTEGER_TYPE))
12694 return NULL_TREE;
12695
12696 STRIP_NOPS (ost);
12697
12698 if (TREE_CODE (ost) != INTEGER_CST
12699 || tree_int_cst_sgn (ost) < 0
12700 || compare_tree_int (ost, 3) > 0)
12701 return NULL_TREE;
12702
12703 object_size_type = tree_low_cst (ost, 0);
12704
12705 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12706 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12707 and (size_t) 0 for types 2 and 3. */
12708 if (TREE_SIDE_EFFECTS (ptr))
12709 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12710
12711 if (TREE_CODE (ptr) == ADDR_EXPR)
12712 {
12713 bytes = compute_builtin_object_size (ptr, object_size_type);
12714 if (double_int_fits_to_tree_p (size_type_node,
12715 uhwi_to_double_int (bytes)))
12716 return build_int_cstu (size_type_node, bytes);
12717 }
12718 else if (TREE_CODE (ptr) == SSA_NAME)
12719 {
12720 /* If object size is not known yet, delay folding until
12721 later. Maybe subsequent passes will help determining
12722 it. */
12723 bytes = compute_builtin_object_size (ptr, object_size_type);
12724 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12725 && double_int_fits_to_tree_p (size_type_node,
12726 uhwi_to_double_int (bytes)))
12727 return build_int_cstu (size_type_node, bytes);
12728 }
12729
12730 return NULL_TREE;
12731 }
12732
12733 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12734 DEST, SRC, LEN, and SIZE are the arguments to the call.
12735 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12736 code of the builtin. If MAXLEN is not NULL, it is maximum length
12737 passed as third argument. */
12738
12739 tree
12740 fold_builtin_memory_chk (location_t loc, tree fndecl,
12741 tree dest, tree src, tree len, tree size,
12742 tree maxlen, bool ignore,
12743 enum built_in_function fcode)
12744 {
12745 tree fn;
12746
12747 if (!validate_arg (dest, POINTER_TYPE)
12748 || !validate_arg (src,
12749 (fcode == BUILT_IN_MEMSET_CHK
12750 ? INTEGER_TYPE : POINTER_TYPE))
12751 || !validate_arg (len, INTEGER_TYPE)
12752 || !validate_arg (size, INTEGER_TYPE))
12753 return NULL_TREE;
12754
12755 /* If SRC and DEST are the same (and not volatile), return DEST
12756 (resp. DEST+LEN for __mempcpy_chk). */
12757 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12758 {
12759 if (fcode != BUILT_IN_MEMPCPY_CHK)
12760 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12761 dest, len);
12762 else
12763 {
12764 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12765 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12766 }
12767 }
12768
12769 if (! host_integerp (size, 1))
12770 return NULL_TREE;
12771
12772 if (! integer_all_onesp (size))
12773 {
12774 if (! host_integerp (len, 1))
12775 {
12776 /* If LEN is not constant, try MAXLEN too.
12777 For MAXLEN only allow optimizing into non-_ocs function
12778 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12779 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12780 {
12781 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12782 {
12783 /* (void) __mempcpy_chk () can be optimized into
12784 (void) __memcpy_chk (). */
12785 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12786 if (!fn)
12787 return NULL_TREE;
12788
12789 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12790 }
12791 return NULL_TREE;
12792 }
12793 }
12794 else
12795 maxlen = len;
12796
12797 if (tree_int_cst_lt (size, maxlen))
12798 return NULL_TREE;
12799 }
12800
12801 fn = NULL_TREE;
12802 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12803 mem{cpy,pcpy,move,set} is available. */
12804 switch (fcode)
12805 {
12806 case BUILT_IN_MEMCPY_CHK:
12807 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12808 break;
12809 case BUILT_IN_MEMPCPY_CHK:
12810 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12811 break;
12812 case BUILT_IN_MEMMOVE_CHK:
12813 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12814 break;
12815 case BUILT_IN_MEMSET_CHK:
12816 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12817 break;
12818 default:
12819 break;
12820 }
12821
12822 if (!fn)
12823 return NULL_TREE;
12824
12825 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12826 }
12827
12828 /* Fold a call to the __st[rp]cpy_chk builtin.
12829 DEST, SRC, and SIZE are the arguments to the call.
12830 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12831 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12832 strings passed as second argument. */
12833
12834 tree
12835 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12836 tree src, tree size,
12837 tree maxlen, bool ignore,
12838 enum built_in_function fcode)
12839 {
12840 tree len, fn;
12841
12842 if (!validate_arg (dest, POINTER_TYPE)
12843 || !validate_arg (src, POINTER_TYPE)
12844 || !validate_arg (size, INTEGER_TYPE))
12845 return NULL_TREE;
12846
12847 /* If SRC and DEST are the same (and not volatile), return DEST. */
12848 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12849 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12850
12851 if (! host_integerp (size, 1))
12852 return NULL_TREE;
12853
12854 if (! integer_all_onesp (size))
12855 {
12856 len = c_strlen (src, 1);
12857 if (! len || ! host_integerp (len, 1))
12858 {
12859 /* If LEN is not constant, try MAXLEN too.
12860 For MAXLEN only allow optimizing into non-_ocs function
12861 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12862 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12863 {
12864 if (fcode == BUILT_IN_STPCPY_CHK)
12865 {
12866 if (! ignore)
12867 return NULL_TREE;
12868
12869 /* If return value of __stpcpy_chk is ignored,
12870 optimize into __strcpy_chk. */
12871 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12872 if (!fn)
12873 return NULL_TREE;
12874
12875 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12876 }
12877
12878 if (! len || TREE_SIDE_EFFECTS (len))
12879 return NULL_TREE;
12880
12881 /* If c_strlen returned something, but not a constant,
12882 transform __strcpy_chk into __memcpy_chk. */
12883 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12884 if (!fn)
12885 return NULL_TREE;
12886
12887 len = fold_convert_loc (loc, size_type_node, len);
12888 len = size_binop_loc (loc, PLUS_EXPR, len,
12889 build_int_cst (size_type_node, 1));
12890 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12891 build_call_expr_loc (loc, fn, 4,
12892 dest, src, len, size));
12893 }
12894 }
12895 else
12896 maxlen = len;
12897
12898 if (! tree_int_cst_lt (maxlen, size))
12899 return NULL_TREE;
12900 }
12901
12902 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12903 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12904 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12905 if (!fn)
12906 return NULL_TREE;
12907
12908 return build_call_expr_loc (loc, fn, 2, dest, src);
12909 }
12910
12911 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12912 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12913 length passed as third argument. IGNORE is true if return value can be
12914 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12915
12916 tree
12917 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12918 tree len, tree size, tree maxlen, bool ignore,
12919 enum built_in_function fcode)
12920 {
12921 tree fn;
12922
12923 if (!validate_arg (dest, POINTER_TYPE)
12924 || !validate_arg (src, POINTER_TYPE)
12925 || !validate_arg (len, INTEGER_TYPE)
12926 || !validate_arg (size, INTEGER_TYPE))
12927 return NULL_TREE;
12928
12929 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12930 {
12931 /* If return value of __stpncpy_chk is ignored,
12932 optimize into __strncpy_chk. */
12933 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12934 if (fn)
12935 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12936 }
12937
12938 if (! host_integerp (size, 1))
12939 return NULL_TREE;
12940
12941 if (! integer_all_onesp (size))
12942 {
12943 if (! host_integerp (len, 1))
12944 {
12945 /* If LEN is not constant, try MAXLEN too.
12946 For MAXLEN only allow optimizing into non-_ocs function
12947 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12948 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12949 return NULL_TREE;
12950 }
12951 else
12952 maxlen = len;
12953
12954 if (tree_int_cst_lt (size, maxlen))
12955 return NULL_TREE;
12956 }
12957
12958 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12959 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12960 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12961 if (!fn)
12962 return NULL_TREE;
12963
12964 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12965 }
12966
12967 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12968 are the arguments to the call. */
12969
12970 static tree
12971 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12972 tree src, tree size)
12973 {
12974 tree fn;
12975 const char *p;
12976
12977 if (!validate_arg (dest, POINTER_TYPE)
12978 || !validate_arg (src, POINTER_TYPE)
12979 || !validate_arg (size, INTEGER_TYPE))
12980 return NULL_TREE;
12981
12982 p = c_getstr (src);
12983 /* If the SRC parameter is "", return DEST. */
12984 if (p && *p == '\0')
12985 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12986
12987 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12988 return NULL_TREE;
12989
12990 /* If __builtin_strcat_chk is used, assume strcat is available. */
12991 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12992 if (!fn)
12993 return NULL_TREE;
12994
12995 return build_call_expr_loc (loc, fn, 2, dest, src);
12996 }
12997
12998 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12999 LEN, and SIZE. */
13000
13001 static tree
13002 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13003 tree dest, tree src, tree len, tree size)
13004 {
13005 tree fn;
13006 const char *p;
13007
13008 if (!validate_arg (dest, POINTER_TYPE)
13009 || !validate_arg (src, POINTER_TYPE)
13010 || !validate_arg (size, INTEGER_TYPE)
13011 || !validate_arg (size, INTEGER_TYPE))
13012 return NULL_TREE;
13013
13014 p = c_getstr (src);
13015 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13016 if (p && *p == '\0')
13017 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13018 else if (integer_zerop (len))
13019 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13020
13021 if (! host_integerp (size, 1))
13022 return NULL_TREE;
13023
13024 if (! integer_all_onesp (size))
13025 {
13026 tree src_len = c_strlen (src, 1);
13027 if (src_len
13028 && host_integerp (src_len, 1)
13029 && host_integerp (len, 1)
13030 && ! tree_int_cst_lt (len, src_len))
13031 {
13032 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13033 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13034 if (!fn)
13035 return NULL_TREE;
13036
13037 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13038 }
13039 return NULL_TREE;
13040 }
13041
13042 /* If __builtin_strncat_chk is used, assume strncat is available. */
13043 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13044 if (!fn)
13045 return NULL_TREE;
13046
13047 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13048 }
13049
13050 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13051 Return NULL_TREE if a normal call should be emitted rather than
13052 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13053 or BUILT_IN_VSPRINTF_CHK. */
13054
13055 static tree
13056 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13057 enum built_in_function fcode)
13058 {
13059 tree dest, size, len, fn, fmt, flag;
13060 const char *fmt_str;
13061
13062 /* Verify the required arguments in the original call. */
13063 if (nargs < 4)
13064 return NULL_TREE;
13065 dest = args[0];
13066 if (!validate_arg (dest, POINTER_TYPE))
13067 return NULL_TREE;
13068 flag = args[1];
13069 if (!validate_arg (flag, INTEGER_TYPE))
13070 return NULL_TREE;
13071 size = args[2];
13072 if (!validate_arg (size, INTEGER_TYPE))
13073 return NULL_TREE;
13074 fmt = args[3];
13075 if (!validate_arg (fmt, POINTER_TYPE))
13076 return NULL_TREE;
13077
13078 if (! host_integerp (size, 1))
13079 return NULL_TREE;
13080
13081 len = NULL_TREE;
13082
13083 if (!init_target_chars ())
13084 return NULL_TREE;
13085
13086 /* Check whether the format is a literal string constant. */
13087 fmt_str = c_getstr (fmt);
13088 if (fmt_str != NULL)
13089 {
13090 /* If the format doesn't contain % args or %%, we know the size. */
13091 if (strchr (fmt_str, target_percent) == 0)
13092 {
13093 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13094 len = build_int_cstu (size_type_node, strlen (fmt_str));
13095 }
13096 /* If the format is "%s" and first ... argument is a string literal,
13097 we know the size too. */
13098 else if (fcode == BUILT_IN_SPRINTF_CHK
13099 && strcmp (fmt_str, target_percent_s) == 0)
13100 {
13101 tree arg;
13102
13103 if (nargs == 5)
13104 {
13105 arg = args[4];
13106 if (validate_arg (arg, POINTER_TYPE))
13107 {
13108 len = c_strlen (arg, 1);
13109 if (! len || ! host_integerp (len, 1))
13110 len = NULL_TREE;
13111 }
13112 }
13113 }
13114 }
13115
13116 if (! integer_all_onesp (size))
13117 {
13118 if (! len || ! tree_int_cst_lt (len, size))
13119 return NULL_TREE;
13120 }
13121
13122 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13123 or if format doesn't contain % chars or is "%s". */
13124 if (! integer_zerop (flag))
13125 {
13126 if (fmt_str == NULL)
13127 return NULL_TREE;
13128 if (strchr (fmt_str, target_percent) != NULL
13129 && strcmp (fmt_str, target_percent_s))
13130 return NULL_TREE;
13131 }
13132
13133 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13134 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13135 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13136 if (!fn)
13137 return NULL_TREE;
13138
13139 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13140 }
13141
13142 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13143 a normal call should be emitted rather than expanding the function
13144 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13145
13146 static tree
13147 fold_builtin_sprintf_chk (location_t loc, tree exp,
13148 enum built_in_function fcode)
13149 {
13150 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13151 CALL_EXPR_ARGP (exp), fcode);
13152 }
13153
13154 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13155 NULL_TREE if a normal call should be emitted rather than expanding
13156 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13157 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13158 passed as second argument. */
13159
13160 static tree
13161 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13162 tree maxlen, enum built_in_function fcode)
13163 {
13164 tree dest, size, len, fn, fmt, flag;
13165 const char *fmt_str;
13166
13167 /* Verify the required arguments in the original call. */
13168 if (nargs < 5)
13169 return NULL_TREE;
13170 dest = args[0];
13171 if (!validate_arg (dest, POINTER_TYPE))
13172 return NULL_TREE;
13173 len = args[1];
13174 if (!validate_arg (len, INTEGER_TYPE))
13175 return NULL_TREE;
13176 flag = args[2];
13177 if (!validate_arg (flag, INTEGER_TYPE))
13178 return NULL_TREE;
13179 size = args[3];
13180 if (!validate_arg (size, INTEGER_TYPE))
13181 return NULL_TREE;
13182 fmt = args[4];
13183 if (!validate_arg (fmt, POINTER_TYPE))
13184 return NULL_TREE;
13185
13186 if (! host_integerp (size, 1))
13187 return NULL_TREE;
13188
13189 if (! integer_all_onesp (size))
13190 {
13191 if (! host_integerp (len, 1))
13192 {
13193 /* If LEN is not constant, try MAXLEN too.
13194 For MAXLEN only allow optimizing into non-_ocs function
13195 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13196 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13197 return NULL_TREE;
13198 }
13199 else
13200 maxlen = len;
13201
13202 if (tree_int_cst_lt (size, maxlen))
13203 return NULL_TREE;
13204 }
13205
13206 if (!init_target_chars ())
13207 return NULL_TREE;
13208
13209 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13210 or if format doesn't contain % chars or is "%s". */
13211 if (! integer_zerop (flag))
13212 {
13213 fmt_str = c_getstr (fmt);
13214 if (fmt_str == NULL)
13215 return NULL_TREE;
13216 if (strchr (fmt_str, target_percent) != NULL
13217 && strcmp (fmt_str, target_percent_s))
13218 return NULL_TREE;
13219 }
13220
13221 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13222 available. */
13223 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13224 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13225 if (!fn)
13226 return NULL_TREE;
13227
13228 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13229 }
13230
13231 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13232 a normal call should be emitted rather than expanding the function
13233 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13234 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13235 passed as second argument. */
13236
13237 tree
13238 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13239 enum built_in_function fcode)
13240 {
13241 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13242 CALL_EXPR_ARGP (exp), maxlen, fcode);
13243 }
13244
13245 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13246 FMT and ARG are the arguments to the call; we don't fold cases with
13247 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13248
13249 Return NULL_TREE if no simplification was possible, otherwise return the
13250 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13251 code of the function to be simplified. */
13252
13253 static tree
13254 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13255 tree arg, bool ignore,
13256 enum built_in_function fcode)
13257 {
13258 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13259 const char *fmt_str = NULL;
13260
13261 /* If the return value is used, don't do the transformation. */
13262 if (! ignore)
13263 return NULL_TREE;
13264
13265 /* Verify the required arguments in the original call. */
13266 if (!validate_arg (fmt, POINTER_TYPE))
13267 return NULL_TREE;
13268
13269 /* Check whether the format is a literal string constant. */
13270 fmt_str = c_getstr (fmt);
13271 if (fmt_str == NULL)
13272 return NULL_TREE;
13273
13274 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13275 {
13276 /* If we're using an unlocked function, assume the other
13277 unlocked functions exist explicitly. */
13278 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13279 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13280 }
13281 else
13282 {
13283 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13284 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13285 }
13286
13287 if (!init_target_chars ())
13288 return NULL_TREE;
13289
13290 if (strcmp (fmt_str, target_percent_s) == 0
13291 || strchr (fmt_str, target_percent) == NULL)
13292 {
13293 const char *str;
13294
13295 if (strcmp (fmt_str, target_percent_s) == 0)
13296 {
13297 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13298 return NULL_TREE;
13299
13300 if (!arg || !validate_arg (arg, POINTER_TYPE))
13301 return NULL_TREE;
13302
13303 str = c_getstr (arg);
13304 if (str == NULL)
13305 return NULL_TREE;
13306 }
13307 else
13308 {
13309 /* The format specifier doesn't contain any '%' characters. */
13310 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13311 && arg)
13312 return NULL_TREE;
13313 str = fmt_str;
13314 }
13315
13316 /* If the string was "", printf does nothing. */
13317 if (str[0] == '\0')
13318 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13319
13320 /* If the string has length of 1, call putchar. */
13321 if (str[1] == '\0')
13322 {
13323 /* Given printf("c"), (where c is any one character,)
13324 convert "c"[0] to an int and pass that to the replacement
13325 function. */
13326 newarg = build_int_cst (integer_type_node, str[0]);
13327 if (fn_putchar)
13328 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13329 }
13330 else
13331 {
13332 /* If the string was "string\n", call puts("string"). */
13333 size_t len = strlen (str);
13334 if ((unsigned char)str[len - 1] == target_newline
13335 && (size_t) (int) len == len
13336 && (int) len > 0)
13337 {
13338 char *newstr;
13339 tree offset_node, string_cst;
13340
13341 /* Create a NUL-terminated string that's one char shorter
13342 than the original, stripping off the trailing '\n'. */
13343 newarg = build_string_literal (len, str);
13344 string_cst = string_constant (newarg, &offset_node);
13345 gcc_checking_assert (string_cst
13346 && (TREE_STRING_LENGTH (string_cst)
13347 == (int) len)
13348 && integer_zerop (offset_node)
13349 && (unsigned char)
13350 TREE_STRING_POINTER (string_cst)[len - 1]
13351 == target_newline);
13352 /* build_string_literal creates a new STRING_CST,
13353 modify it in place to avoid double copying. */
13354 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13355 newstr[len - 1] = '\0';
13356 if (fn_puts)
13357 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13358 }
13359 else
13360 /* We'd like to arrange to call fputs(string,stdout) here,
13361 but we need stdout and don't have a way to get it yet. */
13362 return NULL_TREE;
13363 }
13364 }
13365
13366 /* The other optimizations can be done only on the non-va_list variants. */
13367 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13368 return NULL_TREE;
13369
13370 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13371 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13372 {
13373 if (!arg || !validate_arg (arg, POINTER_TYPE))
13374 return NULL_TREE;
13375 if (fn_puts)
13376 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13377 }
13378
13379 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13380 else if (strcmp (fmt_str, target_percent_c) == 0)
13381 {
13382 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13383 return NULL_TREE;
13384 if (fn_putchar)
13385 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13386 }
13387
13388 if (!call)
13389 return NULL_TREE;
13390
13391 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13392 }
13393
13394 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13395 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13396 more than 3 arguments, and ARG may be null in the 2-argument case.
13397
13398 Return NULL_TREE if no simplification was possible, otherwise return the
13399 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13400 code of the function to be simplified. */
13401
13402 static tree
13403 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13404 tree fmt, tree arg, bool ignore,
13405 enum built_in_function fcode)
13406 {
13407 tree fn_fputc, fn_fputs, call = NULL_TREE;
13408 const char *fmt_str = NULL;
13409
13410 /* If the return value is used, don't do the transformation. */
13411 if (! ignore)
13412 return NULL_TREE;
13413
13414 /* Verify the required arguments in the original call. */
13415 if (!validate_arg (fp, POINTER_TYPE))
13416 return NULL_TREE;
13417 if (!validate_arg (fmt, POINTER_TYPE))
13418 return NULL_TREE;
13419
13420 /* Check whether the format is a literal string constant. */
13421 fmt_str = c_getstr (fmt);
13422 if (fmt_str == NULL)
13423 return NULL_TREE;
13424
13425 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13426 {
13427 /* If we're using an unlocked function, assume the other
13428 unlocked functions exist explicitly. */
13429 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13430 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13431 }
13432 else
13433 {
13434 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13435 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13436 }
13437
13438 if (!init_target_chars ())
13439 return NULL_TREE;
13440
13441 /* If the format doesn't contain % args or %%, use strcpy. */
13442 if (strchr (fmt_str, target_percent) == NULL)
13443 {
13444 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13445 && arg)
13446 return NULL_TREE;
13447
13448 /* If the format specifier was "", fprintf does nothing. */
13449 if (fmt_str[0] == '\0')
13450 {
13451 /* If FP has side-effects, just wait until gimplification is
13452 done. */
13453 if (TREE_SIDE_EFFECTS (fp))
13454 return NULL_TREE;
13455
13456 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13457 }
13458
13459 /* When "string" doesn't contain %, replace all cases of
13460 fprintf (fp, string) with fputs (string, fp). The fputs
13461 builtin will take care of special cases like length == 1. */
13462 if (fn_fputs)
13463 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13464 }
13465
13466 /* The other optimizations can be done only on the non-va_list variants. */
13467 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13468 return NULL_TREE;
13469
13470 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13471 else if (strcmp (fmt_str, target_percent_s) == 0)
13472 {
13473 if (!arg || !validate_arg (arg, POINTER_TYPE))
13474 return NULL_TREE;
13475 if (fn_fputs)
13476 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13477 }
13478
13479 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13480 else if (strcmp (fmt_str, target_percent_c) == 0)
13481 {
13482 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13483 return NULL_TREE;
13484 if (fn_fputc)
13485 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13486 }
13487
13488 if (!call)
13489 return NULL_TREE;
13490 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13491 }
13492
13493 /* Initialize format string characters in the target charset. */
13494
13495 static bool
13496 init_target_chars (void)
13497 {
13498 static bool init;
13499 if (!init)
13500 {
13501 target_newline = lang_hooks.to_target_charset ('\n');
13502 target_percent = lang_hooks.to_target_charset ('%');
13503 target_c = lang_hooks.to_target_charset ('c');
13504 target_s = lang_hooks.to_target_charset ('s');
13505 if (target_newline == 0 || target_percent == 0 || target_c == 0
13506 || target_s == 0)
13507 return false;
13508
13509 target_percent_c[0] = target_percent;
13510 target_percent_c[1] = target_c;
13511 target_percent_c[2] = '\0';
13512
13513 target_percent_s[0] = target_percent;
13514 target_percent_s[1] = target_s;
13515 target_percent_s[2] = '\0';
13516
13517 target_percent_s_newline[0] = target_percent;
13518 target_percent_s_newline[1] = target_s;
13519 target_percent_s_newline[2] = target_newline;
13520 target_percent_s_newline[3] = '\0';
13521
13522 init = true;
13523 }
13524 return true;
13525 }
13526
13527 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13528 and no overflow/underflow occurred. INEXACT is true if M was not
13529 exactly calculated. TYPE is the tree type for the result. This
13530 function assumes that you cleared the MPFR flags and then
13531 calculated M to see if anything subsequently set a flag prior to
13532 entering this function. Return NULL_TREE if any checks fail. */
13533
13534 static tree
13535 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13536 {
13537 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13538 overflow/underflow occurred. If -frounding-math, proceed iff the
13539 result of calling FUNC was exact. */
13540 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13541 && (!flag_rounding_math || !inexact))
13542 {
13543 REAL_VALUE_TYPE rr;
13544
13545 real_from_mpfr (&rr, m, type, GMP_RNDN);
13546 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13547 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13548 but the mpft_t is not, then we underflowed in the
13549 conversion. */
13550 if (real_isfinite (&rr)
13551 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13552 {
13553 REAL_VALUE_TYPE rmode;
13554
13555 real_convert (&rmode, TYPE_MODE (type), &rr);
13556 /* Proceed iff the specified mode can hold the value. */
13557 if (real_identical (&rmode, &rr))
13558 return build_real (type, rmode);
13559 }
13560 }
13561 return NULL_TREE;
13562 }
13563
13564 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13565 number and no overflow/underflow occurred. INEXACT is true if M
13566 was not exactly calculated. TYPE is the tree type for the result.
13567 This function assumes that you cleared the MPFR flags and then
13568 calculated M to see if anything subsequently set a flag prior to
13569 entering this function. Return NULL_TREE if any checks fail, if
13570 FORCE_CONVERT is true, then bypass the checks. */
13571
13572 static tree
13573 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13574 {
13575 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13576 overflow/underflow occurred. If -frounding-math, proceed iff the
13577 result of calling FUNC was exact. */
13578 if (force_convert
13579 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13580 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13581 && (!flag_rounding_math || !inexact)))
13582 {
13583 REAL_VALUE_TYPE re, im;
13584
13585 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13586 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13587 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13588 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13589 but the mpft_t is not, then we underflowed in the
13590 conversion. */
13591 if (force_convert
13592 || (real_isfinite (&re) && real_isfinite (&im)
13593 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13594 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13595 {
13596 REAL_VALUE_TYPE re_mode, im_mode;
13597
13598 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13599 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13600 /* Proceed iff the specified mode can hold the value. */
13601 if (force_convert
13602 || (real_identical (&re_mode, &re)
13603 && real_identical (&im_mode, &im)))
13604 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13605 build_real (TREE_TYPE (type), im_mode));
13606 }
13607 }
13608 return NULL_TREE;
13609 }
13610
13611 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13612 FUNC on it and return the resulting value as a tree with type TYPE.
13613 If MIN and/or MAX are not NULL, then the supplied ARG must be
13614 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13615 acceptable values, otherwise they are not. The mpfr precision is
13616 set to the precision of TYPE. We assume that function FUNC returns
13617 zero if the result could be calculated exactly within the requested
13618 precision. */
13619
13620 static tree
13621 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13622 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13623 bool inclusive)
13624 {
13625 tree result = NULL_TREE;
13626
13627 STRIP_NOPS (arg);
13628
13629 /* To proceed, MPFR must exactly represent the target floating point
13630 format, which only happens when the target base equals two. */
13631 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13632 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13633 {
13634 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13635
13636 if (real_isfinite (ra)
13637 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13638 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13639 {
13640 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13641 const int prec = fmt->p;
13642 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13643 int inexact;
13644 mpfr_t m;
13645
13646 mpfr_init2 (m, prec);
13647 mpfr_from_real (m, ra, GMP_RNDN);
13648 mpfr_clear_flags ();
13649 inexact = func (m, m, rnd);
13650 result = do_mpfr_ckconv (m, type, inexact);
13651 mpfr_clear (m);
13652 }
13653 }
13654
13655 return result;
13656 }
13657
13658 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13659 FUNC on it and return the resulting value as a tree with type TYPE.
13660 The mpfr precision is set to the precision of TYPE. We assume that
13661 function FUNC returns zero if the result could be calculated
13662 exactly within the requested precision. */
13663
13664 static tree
13665 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13666 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13667 {
13668 tree result = NULL_TREE;
13669
13670 STRIP_NOPS (arg1);
13671 STRIP_NOPS (arg2);
13672
13673 /* To proceed, MPFR must exactly represent the target floating point
13674 format, which only happens when the target base equals two. */
13675 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13676 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13677 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13678 {
13679 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13680 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13681
13682 if (real_isfinite (ra1) && real_isfinite (ra2))
13683 {
13684 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13685 const int prec = fmt->p;
13686 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13687 int inexact;
13688 mpfr_t m1, m2;
13689
13690 mpfr_inits2 (prec, m1, m2, NULL);
13691 mpfr_from_real (m1, ra1, GMP_RNDN);
13692 mpfr_from_real (m2, ra2, GMP_RNDN);
13693 mpfr_clear_flags ();
13694 inexact = func (m1, m1, m2, rnd);
13695 result = do_mpfr_ckconv (m1, type, inexact);
13696 mpfr_clears (m1, m2, NULL);
13697 }
13698 }
13699
13700 return result;
13701 }
13702
13703 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13704 FUNC on it and return the resulting value as a tree with type TYPE.
13705 The mpfr precision is set to the precision of TYPE. We assume that
13706 function FUNC returns zero if the result could be calculated
13707 exactly within the requested precision. */
13708
13709 static tree
13710 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13711 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13712 {
13713 tree result = NULL_TREE;
13714
13715 STRIP_NOPS (arg1);
13716 STRIP_NOPS (arg2);
13717 STRIP_NOPS (arg3);
13718
13719 /* To proceed, MPFR must exactly represent the target floating point
13720 format, which only happens when the target base equals two. */
13721 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13722 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13723 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13724 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13725 {
13726 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13727 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13728 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13729
13730 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13731 {
13732 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13733 const int prec = fmt->p;
13734 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13735 int inexact;
13736 mpfr_t m1, m2, m3;
13737
13738 mpfr_inits2 (prec, m1, m2, m3, NULL);
13739 mpfr_from_real (m1, ra1, GMP_RNDN);
13740 mpfr_from_real (m2, ra2, GMP_RNDN);
13741 mpfr_from_real (m3, ra3, GMP_RNDN);
13742 mpfr_clear_flags ();
13743 inexact = func (m1, m1, m2, m3, rnd);
13744 result = do_mpfr_ckconv (m1, type, inexact);
13745 mpfr_clears (m1, m2, m3, NULL);
13746 }
13747 }
13748
13749 return result;
13750 }
13751
13752 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13753 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13754 If ARG_SINP and ARG_COSP are NULL then the result is returned
13755 as a complex value.
13756 The type is taken from the type of ARG and is used for setting the
13757 precision of the calculation and results. */
13758
13759 static tree
13760 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13761 {
13762 tree const type = TREE_TYPE (arg);
13763 tree result = NULL_TREE;
13764
13765 STRIP_NOPS (arg);
13766
13767 /* To proceed, MPFR must exactly represent the target floating point
13768 format, which only happens when the target base equals two. */
13769 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13770 && TREE_CODE (arg) == REAL_CST
13771 && !TREE_OVERFLOW (arg))
13772 {
13773 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13774
13775 if (real_isfinite (ra))
13776 {
13777 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13778 const int prec = fmt->p;
13779 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13780 tree result_s, result_c;
13781 int inexact;
13782 mpfr_t m, ms, mc;
13783
13784 mpfr_inits2 (prec, m, ms, mc, NULL);
13785 mpfr_from_real (m, ra, GMP_RNDN);
13786 mpfr_clear_flags ();
13787 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13788 result_s = do_mpfr_ckconv (ms, type, inexact);
13789 result_c = do_mpfr_ckconv (mc, type, inexact);
13790 mpfr_clears (m, ms, mc, NULL);
13791 if (result_s && result_c)
13792 {
13793 /* If we are to return in a complex value do so. */
13794 if (!arg_sinp && !arg_cosp)
13795 return build_complex (build_complex_type (type),
13796 result_c, result_s);
13797
13798 /* Dereference the sin/cos pointer arguments. */
13799 arg_sinp = build_fold_indirect_ref (arg_sinp);
13800 arg_cosp = build_fold_indirect_ref (arg_cosp);
13801 /* Proceed if valid pointer type were passed in. */
13802 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13803 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13804 {
13805 /* Set the values. */
13806 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13807 result_s);
13808 TREE_SIDE_EFFECTS (result_s) = 1;
13809 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13810 result_c);
13811 TREE_SIDE_EFFECTS (result_c) = 1;
13812 /* Combine the assignments into a compound expr. */
13813 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13814 result_s, result_c));
13815 }
13816 }
13817 }
13818 }
13819 return result;
13820 }
13821
13822 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13823 two-argument mpfr order N Bessel function FUNC on them and return
13824 the resulting value as a tree with type TYPE. The mpfr precision
13825 is set to the precision of TYPE. We assume that function FUNC
13826 returns zero if the result could be calculated exactly within the
13827 requested precision. */
13828 static tree
13829 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13830 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13831 const REAL_VALUE_TYPE *min, bool inclusive)
13832 {
13833 tree result = NULL_TREE;
13834
13835 STRIP_NOPS (arg1);
13836 STRIP_NOPS (arg2);
13837
13838 /* To proceed, MPFR must exactly represent the target floating point
13839 format, which only happens when the target base equals two. */
13840 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13841 && host_integerp (arg1, 0)
13842 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13843 {
13844 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13845 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13846
13847 if (n == (long)n
13848 && real_isfinite (ra)
13849 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13850 {
13851 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13852 const int prec = fmt->p;
13853 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13854 int inexact;
13855 mpfr_t m;
13856
13857 mpfr_init2 (m, prec);
13858 mpfr_from_real (m, ra, GMP_RNDN);
13859 mpfr_clear_flags ();
13860 inexact = func (m, n, m, rnd);
13861 result = do_mpfr_ckconv (m, type, inexact);
13862 mpfr_clear (m);
13863 }
13864 }
13865
13866 return result;
13867 }
13868
13869 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13870 the pointer *(ARG_QUO) and return the result. The type is taken
13871 from the type of ARG0 and is used for setting the precision of the
13872 calculation and results. */
13873
13874 static tree
13875 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13876 {
13877 tree const type = TREE_TYPE (arg0);
13878 tree result = NULL_TREE;
13879
13880 STRIP_NOPS (arg0);
13881 STRIP_NOPS (arg1);
13882
13883 /* To proceed, MPFR must exactly represent the target floating point
13884 format, which only happens when the target base equals two. */
13885 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13886 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13887 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13888 {
13889 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13890 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13891
13892 if (real_isfinite (ra0) && real_isfinite (ra1))
13893 {
13894 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13895 const int prec = fmt->p;
13896 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13897 tree result_rem;
13898 long integer_quo;
13899 mpfr_t m0, m1;
13900
13901 mpfr_inits2 (prec, m0, m1, NULL);
13902 mpfr_from_real (m0, ra0, GMP_RNDN);
13903 mpfr_from_real (m1, ra1, GMP_RNDN);
13904 mpfr_clear_flags ();
13905 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13906 /* Remquo is independent of the rounding mode, so pass
13907 inexact=0 to do_mpfr_ckconv(). */
13908 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13909 mpfr_clears (m0, m1, NULL);
13910 if (result_rem)
13911 {
13912 /* MPFR calculates quo in the host's long so it may
13913 return more bits in quo than the target int can hold
13914 if sizeof(host long) > sizeof(target int). This can
13915 happen even for native compilers in LP64 mode. In
13916 these cases, modulo the quo value with the largest
13917 number that the target int can hold while leaving one
13918 bit for the sign. */
13919 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13920 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13921
13922 /* Dereference the quo pointer argument. */
13923 arg_quo = build_fold_indirect_ref (arg_quo);
13924 /* Proceed iff a valid pointer type was passed in. */
13925 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13926 {
13927 /* Set the value. */
13928 tree result_quo
13929 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13930 build_int_cst (TREE_TYPE (arg_quo),
13931 integer_quo));
13932 TREE_SIDE_EFFECTS (result_quo) = 1;
13933 /* Combine the quo assignment with the rem. */
13934 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13935 result_quo, result_rem));
13936 }
13937 }
13938 }
13939 }
13940 return result;
13941 }
13942
13943 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13944 resulting value as a tree with type TYPE. The mpfr precision is
13945 set to the precision of TYPE. We assume that this mpfr function
13946 returns zero if the result could be calculated exactly within the
13947 requested precision. In addition, the integer pointer represented
13948 by ARG_SG will be dereferenced and set to the appropriate signgam
13949 (-1,1) value. */
13950
13951 static tree
13952 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13953 {
13954 tree result = NULL_TREE;
13955
13956 STRIP_NOPS (arg);
13957
13958 /* To proceed, MPFR must exactly represent the target floating point
13959 format, which only happens when the target base equals two. Also
13960 verify ARG is a constant and that ARG_SG is an int pointer. */
13961 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13962 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13963 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13964 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13965 {
13966 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13967
13968 /* In addition to NaN and Inf, the argument cannot be zero or a
13969 negative integer. */
13970 if (real_isfinite (ra)
13971 && ra->cl != rvc_zero
13972 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13973 {
13974 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13975 const int prec = fmt->p;
13976 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13977 int inexact, sg;
13978 mpfr_t m;
13979 tree result_lg;
13980
13981 mpfr_init2 (m, prec);
13982 mpfr_from_real (m, ra, GMP_RNDN);
13983 mpfr_clear_flags ();
13984 inexact = mpfr_lgamma (m, &sg, m, rnd);
13985 result_lg = do_mpfr_ckconv (m, type, inexact);
13986 mpfr_clear (m);
13987 if (result_lg)
13988 {
13989 tree result_sg;
13990
13991 /* Dereference the arg_sg pointer argument. */
13992 arg_sg = build_fold_indirect_ref (arg_sg);
13993 /* Assign the signgam value into *arg_sg. */
13994 result_sg = fold_build2 (MODIFY_EXPR,
13995 TREE_TYPE (arg_sg), arg_sg,
13996 build_int_cst (TREE_TYPE (arg_sg), sg));
13997 TREE_SIDE_EFFECTS (result_sg) = 1;
13998 /* Combine the signgam assignment with the lgamma result. */
13999 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14000 result_sg, result_lg));
14001 }
14002 }
14003 }
14004
14005 return result;
14006 }
14007
14008 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14009 function FUNC on it and return the resulting value as a tree with
14010 type TYPE. The mpfr precision is set to the precision of TYPE. We
14011 assume that function FUNC returns zero if the result could be
14012 calculated exactly within the requested precision. */
14013
14014 static tree
14015 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14016 {
14017 tree result = NULL_TREE;
14018
14019 STRIP_NOPS (arg);
14020
14021 /* To proceed, MPFR must exactly represent the target floating point
14022 format, which only happens when the target base equals two. */
14023 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14025 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14026 {
14027 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14028 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14029
14030 if (real_isfinite (re) && real_isfinite (im))
14031 {
14032 const struct real_format *const fmt =
14033 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14034 const int prec = fmt->p;
14035 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14036 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14037 int inexact;
14038 mpc_t m;
14039
14040 mpc_init2 (m, prec);
14041 mpfr_from_real (mpc_realref(m), re, rnd);
14042 mpfr_from_real (mpc_imagref(m), im, rnd);
14043 mpfr_clear_flags ();
14044 inexact = func (m, m, crnd);
14045 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14046 mpc_clear (m);
14047 }
14048 }
14049
14050 return result;
14051 }
14052
14053 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14054 mpc function FUNC on it and return the resulting value as a tree
14055 with type TYPE. The mpfr precision is set to the precision of
14056 TYPE. We assume that function FUNC returns zero if the result
14057 could be calculated exactly within the requested precision. If
14058 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14059 in the arguments and/or results. */
14060
14061 tree
14062 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14063 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14064 {
14065 tree result = NULL_TREE;
14066
14067 STRIP_NOPS (arg0);
14068 STRIP_NOPS (arg1);
14069
14070 /* To proceed, MPFR must exactly represent the target floating point
14071 format, which only happens when the target base equals two. */
14072 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14073 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14074 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14075 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14076 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14077 {
14078 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14079 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14080 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14081 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14082
14083 if (do_nonfinite
14084 || (real_isfinite (re0) && real_isfinite (im0)
14085 && real_isfinite (re1) && real_isfinite (im1)))
14086 {
14087 const struct real_format *const fmt =
14088 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14089 const int prec = fmt->p;
14090 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14091 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14092 int inexact;
14093 mpc_t m0, m1;
14094
14095 mpc_init2 (m0, prec);
14096 mpc_init2 (m1, prec);
14097 mpfr_from_real (mpc_realref(m0), re0, rnd);
14098 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14099 mpfr_from_real (mpc_realref(m1), re1, rnd);
14100 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14101 mpfr_clear_flags ();
14102 inexact = func (m0, m0, m1, crnd);
14103 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14104 mpc_clear (m0);
14105 mpc_clear (m1);
14106 }
14107 }
14108
14109 return result;
14110 }
14111
14112 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14113 a normal call should be emitted rather than expanding the function
14114 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14115
14116 static tree
14117 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14118 {
14119 int nargs = gimple_call_num_args (stmt);
14120
14121 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14122 (nargs > 0
14123 ? gimple_call_arg_ptr (stmt, 0)
14124 : &error_mark_node), fcode);
14125 }
14126
14127 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14128 a normal call should be emitted rather than expanding the function
14129 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14130 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14131 passed as second argument. */
14132
14133 tree
14134 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14135 enum built_in_function fcode)
14136 {
14137 int nargs = gimple_call_num_args (stmt);
14138
14139 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14140 (nargs > 0
14141 ? gimple_call_arg_ptr (stmt, 0)
14142 : &error_mark_node), maxlen, fcode);
14143 }
14144
14145 /* Builtins with folding operations that operate on "..." arguments
14146 need special handling; we need to store the arguments in a convenient
14147 data structure before attempting any folding. Fortunately there are
14148 only a few builtins that fall into this category. FNDECL is the
14149 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14150 result of the function call is ignored. */
14151
14152 static tree
14153 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14154 bool ignore ATTRIBUTE_UNUSED)
14155 {
14156 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14157 tree ret = NULL_TREE;
14158
14159 switch (fcode)
14160 {
14161 case BUILT_IN_SPRINTF_CHK:
14162 case BUILT_IN_VSPRINTF_CHK:
14163 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14164 break;
14165
14166 case BUILT_IN_SNPRINTF_CHK:
14167 case BUILT_IN_VSNPRINTF_CHK:
14168 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14169
14170 default:
14171 break;
14172 }
14173 if (ret)
14174 {
14175 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14176 TREE_NO_WARNING (ret) = 1;
14177 return ret;
14178 }
14179 return NULL_TREE;
14180 }
14181
14182 /* A wrapper function for builtin folding that prevents warnings for
14183 "statement without effect" and the like, caused by removing the
14184 call node earlier than the warning is generated. */
14185
14186 tree
14187 fold_call_stmt (gimple stmt, bool ignore)
14188 {
14189 tree ret = NULL_TREE;
14190 tree fndecl = gimple_call_fndecl (stmt);
14191 location_t loc = gimple_location (stmt);
14192 if (fndecl
14193 && TREE_CODE (fndecl) == FUNCTION_DECL
14194 && DECL_BUILT_IN (fndecl)
14195 && !gimple_call_va_arg_pack_p (stmt))
14196 {
14197 int nargs = gimple_call_num_args (stmt);
14198 tree *args = (nargs > 0
14199 ? gimple_call_arg_ptr (stmt, 0)
14200 : &error_mark_node);
14201
14202 if (avoid_folding_inline_builtin (fndecl))
14203 return NULL_TREE;
14204 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14205 {
14206 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14207 }
14208 else
14209 {
14210 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14211 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14212 if (!ret)
14213 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14214 if (ret)
14215 {
14216 /* Propagate location information from original call to
14217 expansion of builtin. Otherwise things like
14218 maybe_emit_chk_warning, that operate on the expansion
14219 of a builtin, will use the wrong location information. */
14220 if (gimple_has_location (stmt))
14221 {
14222 tree realret = ret;
14223 if (TREE_CODE (ret) == NOP_EXPR)
14224 realret = TREE_OPERAND (ret, 0);
14225 if (CAN_HAVE_LOCATION_P (realret)
14226 && !EXPR_HAS_LOCATION (realret))
14227 SET_EXPR_LOCATION (realret, loc);
14228 return realret;
14229 }
14230 return ret;
14231 }
14232 }
14233 }
14234 return NULL_TREE;
14235 }
14236
14237 /* Look up the function in builtin_decl that corresponds to DECL
14238 and set ASMSPEC as its user assembler name. DECL must be a
14239 function decl that declares a builtin. */
14240
14241 void
14242 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14243 {
14244 tree builtin;
14245 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14246 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14247 && asmspec != 0);
14248
14249 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14250 set_user_assembler_name (builtin, asmspec);
14251 switch (DECL_FUNCTION_CODE (decl))
14252 {
14253 case BUILT_IN_MEMCPY:
14254 init_block_move_fn (asmspec);
14255 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14256 break;
14257 case BUILT_IN_MEMSET:
14258 init_block_clear_fn (asmspec);
14259 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14260 break;
14261 case BUILT_IN_MEMMOVE:
14262 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14263 break;
14264 case BUILT_IN_MEMCMP:
14265 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14266 break;
14267 case BUILT_IN_ABORT:
14268 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14269 break;
14270 case BUILT_IN_FFS:
14271 if (INT_TYPE_SIZE < BITS_PER_WORD)
14272 {
14273 set_user_assembler_libfunc ("ffs", asmspec);
14274 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14275 MODE_INT, 0), "ffs");
14276 }
14277 break;
14278 default:
14279 break;
14280 }
14281 }
14282
14283 /* Return true if DECL is a builtin that expands to a constant or similarly
14284 simple code. */
14285 bool
14286 is_simple_builtin (tree decl)
14287 {
14288 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14289 switch (DECL_FUNCTION_CODE (decl))
14290 {
14291 /* Builtins that expand to constants. */
14292 case BUILT_IN_CONSTANT_P:
14293 case BUILT_IN_EXPECT:
14294 case BUILT_IN_OBJECT_SIZE:
14295 case BUILT_IN_UNREACHABLE:
14296 /* Simple register moves or loads from stack. */
14297 case BUILT_IN_ASSUME_ALIGNED:
14298 case BUILT_IN_RETURN_ADDRESS:
14299 case BUILT_IN_EXTRACT_RETURN_ADDR:
14300 case BUILT_IN_FROB_RETURN_ADDR:
14301 case BUILT_IN_RETURN:
14302 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14303 case BUILT_IN_FRAME_ADDRESS:
14304 case BUILT_IN_VA_END:
14305 case BUILT_IN_STACK_SAVE:
14306 case BUILT_IN_STACK_RESTORE:
14307 /* Exception state returns or moves registers around. */
14308 case BUILT_IN_EH_FILTER:
14309 case BUILT_IN_EH_POINTER:
14310 case BUILT_IN_EH_COPY_VALUES:
14311 return true;
14312
14313 default:
14314 return false;
14315 }
14316
14317 return false;
14318 }
14319
14320 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14321 most probably expanded inline into reasonably simple code. This is a
14322 superset of is_simple_builtin. */
14323 bool
14324 is_inexpensive_builtin (tree decl)
14325 {
14326 if (!decl)
14327 return false;
14328 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14329 return true;
14330 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14331 switch (DECL_FUNCTION_CODE (decl))
14332 {
14333 case BUILT_IN_ABS:
14334 case BUILT_IN_ALLOCA:
14335 case BUILT_IN_ALLOCA_WITH_ALIGN:
14336 case BUILT_IN_BSWAP16:
14337 case BUILT_IN_BSWAP32:
14338 case BUILT_IN_BSWAP64:
14339 case BUILT_IN_CLZ:
14340 case BUILT_IN_CLZIMAX:
14341 case BUILT_IN_CLZL:
14342 case BUILT_IN_CLZLL:
14343 case BUILT_IN_CTZ:
14344 case BUILT_IN_CTZIMAX:
14345 case BUILT_IN_CTZL:
14346 case BUILT_IN_CTZLL:
14347 case BUILT_IN_FFS:
14348 case BUILT_IN_FFSIMAX:
14349 case BUILT_IN_FFSL:
14350 case BUILT_IN_FFSLL:
14351 case BUILT_IN_IMAXABS:
14352 case BUILT_IN_FINITE:
14353 case BUILT_IN_FINITEF:
14354 case BUILT_IN_FINITEL:
14355 case BUILT_IN_FINITED32:
14356 case BUILT_IN_FINITED64:
14357 case BUILT_IN_FINITED128:
14358 case BUILT_IN_FPCLASSIFY:
14359 case BUILT_IN_ISFINITE:
14360 case BUILT_IN_ISINF_SIGN:
14361 case BUILT_IN_ISINF:
14362 case BUILT_IN_ISINFF:
14363 case BUILT_IN_ISINFL:
14364 case BUILT_IN_ISINFD32:
14365 case BUILT_IN_ISINFD64:
14366 case BUILT_IN_ISINFD128:
14367 case BUILT_IN_ISNAN:
14368 case BUILT_IN_ISNANF:
14369 case BUILT_IN_ISNANL:
14370 case BUILT_IN_ISNAND32:
14371 case BUILT_IN_ISNAND64:
14372 case BUILT_IN_ISNAND128:
14373 case BUILT_IN_ISNORMAL:
14374 case BUILT_IN_ISGREATER:
14375 case BUILT_IN_ISGREATEREQUAL:
14376 case BUILT_IN_ISLESS:
14377 case BUILT_IN_ISLESSEQUAL:
14378 case BUILT_IN_ISLESSGREATER:
14379 case BUILT_IN_ISUNORDERED:
14380 case BUILT_IN_VA_ARG_PACK:
14381 case BUILT_IN_VA_ARG_PACK_LEN:
14382 case BUILT_IN_VA_COPY:
14383 case BUILT_IN_TRAP:
14384 case BUILT_IN_SAVEREGS:
14385 case BUILT_IN_POPCOUNTL:
14386 case BUILT_IN_POPCOUNTLL:
14387 case BUILT_IN_POPCOUNTIMAX:
14388 case BUILT_IN_POPCOUNT:
14389 case BUILT_IN_PARITYL:
14390 case BUILT_IN_PARITYLL:
14391 case BUILT_IN_PARITYIMAX:
14392 case BUILT_IN_PARITY:
14393 case BUILT_IN_LABS:
14394 case BUILT_IN_LLABS:
14395 case BUILT_IN_PREFETCH:
14396 return true;
14397
14398 default:
14399 return is_simple_builtin (decl);
14400 }
14401
14402 return false;
14403 }