configure.ac (MPFR check): Bump minimum version to 2.3.0 and recommended version...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
53
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
61
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
64 {
65 #include "builtins.def"
66 };
67 #undef DEF_BUILTIN
68
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
76
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
195
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
204
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_free_warning (tree);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
219
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
243
244 static bool called_as_built_in (tree node)
245 {
246 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
247 if (strncmp (name, "__builtin_", 10) == 0)
248 return true;
249 if (strncmp (name, "__sync_", 7) == 0)
250 return true;
251 return false;
252 }
253
254 /* Return the alignment in bits of EXP, an object.
255 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
256 guessed alignment e.g. from type alignment. */
257
258 int
259 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
260 {
261 unsigned int inner;
262
263 inner = max_align;
264 if (handled_component_p (exp))
265 {
266 HOST_WIDE_INT bitsize, bitpos;
267 tree offset;
268 enum machine_mode mode;
269 int unsignedp, volatilep;
270
271 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
272 &mode, &unsignedp, &volatilep, true);
273 if (bitpos)
274 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
275 while (offset)
276 {
277 tree next_offset;
278
279 if (TREE_CODE (offset) == PLUS_EXPR)
280 {
281 next_offset = TREE_OPERAND (offset, 0);
282 offset = TREE_OPERAND (offset, 1);
283 }
284 else
285 next_offset = NULL;
286 if (host_integerp (offset, 1))
287 {
288 /* Any overflow in calculating offset_bits won't change
289 the alignment. */
290 unsigned offset_bits
291 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
292
293 if (offset_bits)
294 inner = MIN (inner, (offset_bits & -offset_bits));
295 }
296 else if (TREE_CODE (offset) == MULT_EXPR
297 && host_integerp (TREE_OPERAND (offset, 1), 1))
298 {
299 /* Any overflow in calculating offset_factor won't change
300 the alignment. */
301 unsigned offset_factor
302 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
303 * BITS_PER_UNIT);
304
305 if (offset_factor)
306 inner = MIN (inner, (offset_factor & -offset_factor));
307 }
308 else
309 {
310 inner = MIN (inner, BITS_PER_UNIT);
311 break;
312 }
313 offset = next_offset;
314 }
315 }
316 if (DECL_P (exp))
317 align = MIN (inner, DECL_ALIGN (exp));
318 #ifdef CONSTANT_ALIGNMENT
319 else if (CONSTANT_CLASS_P (exp))
320 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
321 #endif
322 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
323 || TREE_CODE (exp) == INDIRECT_REF)
324 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
325 else
326 align = MIN (align, inner);
327 return MIN (align, max_align);
328 }
329
330 /* Return the alignment in bits of EXP, a pointer valued expression.
331 But don't return more than MAX_ALIGN no matter what.
332 The alignment returned is, by default, the alignment of the thing that
333 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
334
335 Otherwise, look at the expression to see if we can do better, i.e., if the
336 expression is actually pointing at an object whose alignment is tighter. */
337
338 int
339 get_pointer_alignment (tree exp, unsigned int max_align)
340 {
341 unsigned int align, inner;
342
343 /* We rely on TER to compute accurate alignment information. */
344 if (!(optimize && flag_tree_ter))
345 return 0;
346
347 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
348 return 0;
349
350 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
351 align = MIN (align, max_align);
352
353 while (1)
354 {
355 switch (TREE_CODE (exp))
356 {
357 CASE_CONVERT:
358 exp = TREE_OPERAND (exp, 0);
359 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
360 return align;
361
362 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
363 align = MIN (inner, max_align);
364 break;
365
366 case POINTER_PLUS_EXPR:
367 /* If sum of pointer + int, restrict our maximum alignment to that
368 imposed by the integer. If not, we can't do any better than
369 ALIGN. */
370 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
371 return align;
372
373 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
374 & (max_align / BITS_PER_UNIT - 1))
375 != 0)
376 max_align >>= 1;
377
378 exp = TREE_OPERAND (exp, 0);
379 break;
380
381 case ADDR_EXPR:
382 /* See what we are pointing at and look at its alignment. */
383 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
384
385 default:
386 return align;
387 }
388 }
389 }
390
391 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
392 way, because it could contain a zero byte in the middle.
393 TREE_STRING_LENGTH is the size of the character array, not the string.
394
395 ONLY_VALUE should be nonzero if the result is not going to be emitted
396 into the instruction stream and zero if it is going to be expanded.
397 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
398 is returned, otherwise NULL, since
399 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
400 evaluate the side-effects.
401
402 The value returned is of type `ssizetype'.
403
404 Unfortunately, string_constant can't access the values of const char
405 arrays with initializers, so neither can we do so here. */
406
407 tree
408 c_strlen (tree src, int only_value)
409 {
410 tree offset_node;
411 HOST_WIDE_INT offset;
412 int max;
413 const char *ptr;
414
415 STRIP_NOPS (src);
416 if (TREE_CODE (src) == COND_EXPR
417 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
418 {
419 tree len1, len2;
420
421 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
422 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
423 if (tree_int_cst_equal (len1, len2))
424 return len1;
425 }
426
427 if (TREE_CODE (src) == COMPOUND_EXPR
428 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
429 return c_strlen (TREE_OPERAND (src, 1), only_value);
430
431 src = string_constant (src, &offset_node);
432 if (src == 0)
433 return NULL_TREE;
434
435 max = TREE_STRING_LENGTH (src) - 1;
436 ptr = TREE_STRING_POINTER (src);
437
438 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
439 {
440 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
441 compute the offset to the following null if we don't know where to
442 start searching for it. */
443 int i;
444
445 for (i = 0; i < max; i++)
446 if (ptr[i] == 0)
447 return NULL_TREE;
448
449 /* We don't know the starting offset, but we do know that the string
450 has no internal zero bytes. We can assume that the offset falls
451 within the bounds of the string; otherwise, the programmer deserves
452 what he gets. Subtract the offset from the length of the string,
453 and return that. This would perhaps not be valid if we were dealing
454 with named arrays in addition to literal string constants. */
455
456 return size_diffop (size_int (max), offset_node);
457 }
458
459 /* We have a known offset into the string. Start searching there for
460 a null character if we can represent it as a single HOST_WIDE_INT. */
461 if (offset_node == 0)
462 offset = 0;
463 else if (! host_integerp (offset_node, 0))
464 offset = -1;
465 else
466 offset = tree_low_cst (offset_node, 0);
467
468 /* If the offset is known to be out of bounds, warn, and call strlen at
469 runtime. */
470 if (offset < 0 || offset > max)
471 {
472 /* Suppress multiple warnings for propagated constant strings. */
473 if (! TREE_NO_WARNING (src))
474 {
475 warning (0, "offset outside bounds of constant string");
476 TREE_NO_WARNING (src) = 1;
477 }
478 return NULL_TREE;
479 }
480
481 /* Use strlen to search for the first zero byte. Since any strings
482 constructed with build_string will have nulls appended, we win even
483 if we get handed something like (char[4])"abcd".
484
485 Since OFFSET is our starting index into the string, no further
486 calculation is needed. */
487 return ssize_int (strlen (ptr + offset));
488 }
489
490 /* Return a char pointer for a C string if it is a string constant
491 or sum of string constant and integer constant. */
492
493 static const char *
494 c_getstr (tree src)
495 {
496 tree offset_node;
497
498 src = string_constant (src, &offset_node);
499 if (src == 0)
500 return 0;
501
502 if (offset_node == 0)
503 return TREE_STRING_POINTER (src);
504 else if (!host_integerp (offset_node, 1)
505 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
506 return 0;
507
508 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
509 }
510
511 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
512 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
513
514 static rtx
515 c_readstr (const char *str, enum machine_mode mode)
516 {
517 HOST_WIDE_INT c[2];
518 HOST_WIDE_INT ch;
519 unsigned int i, j;
520
521 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
522
523 c[0] = 0;
524 c[1] = 0;
525 ch = 1;
526 for (i = 0; i < GET_MODE_SIZE (mode); i++)
527 {
528 j = i;
529 if (WORDS_BIG_ENDIAN)
530 j = GET_MODE_SIZE (mode) - i - 1;
531 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
532 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
533 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
534 j *= BITS_PER_UNIT;
535 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
536
537 if (ch)
538 ch = (unsigned char) str[i];
539 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
540 }
541 return immed_double_const (c[0], c[1], mode);
542 }
543
544 /* Cast a target constant CST to target CHAR and if that value fits into
545 host char type, return zero and put that value into variable pointed to by
546 P. */
547
548 static int
549 target_char_cast (tree cst, char *p)
550 {
551 unsigned HOST_WIDE_INT val, hostval;
552
553 if (!host_integerp (cst, 1)
554 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
555 return 1;
556
557 val = tree_low_cst (cst, 1);
558 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
559 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
560
561 hostval = val;
562 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
563 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
564
565 if (val != hostval)
566 return 1;
567
568 *p = hostval;
569 return 0;
570 }
571
572 /* Similar to save_expr, but assumes that arbitrary code is not executed
573 in between the multiple evaluations. In particular, we assume that a
574 non-addressable local variable will not be modified. */
575
576 static tree
577 builtin_save_expr (tree exp)
578 {
579 if (TREE_ADDRESSABLE (exp) == 0
580 && (TREE_CODE (exp) == PARM_DECL
581 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
582 return exp;
583
584 return save_expr (exp);
585 }
586
587 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
588 times to get the address of either a higher stack frame, or a return
589 address located within it (depending on FNDECL_CODE). */
590
591 static rtx
592 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
593 {
594 int i;
595
596 #ifdef INITIAL_FRAME_ADDRESS_RTX
597 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
598 #else
599 rtx tem;
600
601 /* For a zero count with __builtin_return_address, we don't care what
602 frame address we return, because target-specific definitions will
603 override us. Therefore frame pointer elimination is OK, and using
604 the soft frame pointer is OK.
605
606 For a nonzero count, or a zero count with __builtin_frame_address,
607 we require a stable offset from the current frame pointer to the
608 previous one, so we must use the hard frame pointer, and
609 we must disable frame pointer elimination. */
610 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
611 tem = frame_pointer_rtx;
612 else
613 {
614 tem = hard_frame_pointer_rtx;
615
616 /* Tell reload not to eliminate the frame pointer. */
617 crtl->accesses_prior_frames = 1;
618 }
619 #endif
620
621 /* Some machines need special handling before we can access
622 arbitrary frames. For example, on the SPARC, we must first flush
623 all register windows to the stack. */
624 #ifdef SETUP_FRAME_ADDRESSES
625 if (count > 0)
626 SETUP_FRAME_ADDRESSES ();
627 #endif
628
629 /* On the SPARC, the return address is not in the frame, it is in a
630 register. There is no way to access it off of the current frame
631 pointer, but it can be accessed off the previous frame pointer by
632 reading the value from the register window save area. */
633 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
634 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
635 count--;
636 #endif
637
638 /* Scan back COUNT frames to the specified frame. */
639 for (i = 0; i < count; i++)
640 {
641 /* Assume the dynamic chain pointer is in the word that the
642 frame address points to, unless otherwise specified. */
643 #ifdef DYNAMIC_CHAIN_ADDRESS
644 tem = DYNAMIC_CHAIN_ADDRESS (tem);
645 #endif
646 tem = memory_address (Pmode, tem);
647 tem = gen_frame_mem (Pmode, tem);
648 tem = copy_to_reg (tem);
649 }
650
651 /* For __builtin_frame_address, return what we've got. But, on
652 the SPARC for example, we may have to add a bias. */
653 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
654 #ifdef FRAME_ADDR_RTX
655 return FRAME_ADDR_RTX (tem);
656 #else
657 return tem;
658 #endif
659
660 /* For __builtin_return_address, get the return address from that frame. */
661 #ifdef RETURN_ADDR_RTX
662 tem = RETURN_ADDR_RTX (count, tem);
663 #else
664 tem = memory_address (Pmode,
665 plus_constant (tem, GET_MODE_SIZE (Pmode)));
666 tem = gen_frame_mem (Pmode, tem);
667 #endif
668 return tem;
669 }
670
671 /* Alias set used for setjmp buffer. */
672 static alias_set_type setjmp_alias_set = -1;
673
674 /* Construct the leading half of a __builtin_setjmp call. Control will
675 return to RECEIVER_LABEL. This is also called directly by the SJLJ
676 exception handling code. */
677
678 void
679 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
680 {
681 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
682 rtx stack_save;
683 rtx mem;
684
685 if (setjmp_alias_set == -1)
686 setjmp_alias_set = new_alias_set ();
687
688 buf_addr = convert_memory_address (Pmode, buf_addr);
689
690 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
691
692 /* We store the frame pointer and the address of receiver_label in
693 the buffer and use the rest of it for the stack save area, which
694 is machine-dependent. */
695
696 mem = gen_rtx_MEM (Pmode, buf_addr);
697 set_mem_alias_set (mem, setjmp_alias_set);
698 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
699
700 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
701 set_mem_alias_set (mem, setjmp_alias_set);
702
703 emit_move_insn (validize_mem (mem),
704 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
705
706 stack_save = gen_rtx_MEM (sa_mode,
707 plus_constant (buf_addr,
708 2 * GET_MODE_SIZE (Pmode)));
709 set_mem_alias_set (stack_save, setjmp_alias_set);
710 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
711
712 /* If there is further processing to do, do it. */
713 #ifdef HAVE_builtin_setjmp_setup
714 if (HAVE_builtin_setjmp_setup)
715 emit_insn (gen_builtin_setjmp_setup (buf_addr));
716 #endif
717
718 /* Tell optimize_save_area_alloca that extra work is going to
719 need to go on during alloca. */
720 cfun->calls_setjmp = 1;
721
722 /* We have a nonlocal label. */
723 cfun->has_nonlocal_label = 1;
724 }
725
726 /* Construct the trailing part of a __builtin_setjmp call. This is
727 also called directly by the SJLJ exception handling code. */
728
729 void
730 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
731 {
732 /* Clobber the FP when we get here, so we have to make sure it's
733 marked as used by this function. */
734 emit_use (hard_frame_pointer_rtx);
735
736 /* Mark the static chain as clobbered here so life information
737 doesn't get messed up for it. */
738 emit_clobber (static_chain_rtx);
739
740 /* Now put in the code to restore the frame pointer, and argument
741 pointer, if needed. */
742 #ifdef HAVE_nonlocal_goto
743 if (! HAVE_nonlocal_goto)
744 #endif
745 {
746 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
747 /* This might change the hard frame pointer in ways that aren't
748 apparent to early optimization passes, so force a clobber. */
749 emit_clobber (hard_frame_pointer_rtx);
750 }
751
752 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
753 if (fixed_regs[ARG_POINTER_REGNUM])
754 {
755 #ifdef ELIMINABLE_REGS
756 size_t i;
757 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
758
759 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
760 if (elim_regs[i].from == ARG_POINTER_REGNUM
761 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
762 break;
763
764 if (i == ARRAY_SIZE (elim_regs))
765 #endif
766 {
767 /* Now restore our arg pointer from the address at which it
768 was saved in our stack frame. */
769 emit_move_insn (crtl->args.internal_arg_pointer,
770 copy_to_reg (get_arg_pointer_save_area ()));
771 }
772 }
773 #endif
774
775 #ifdef HAVE_builtin_setjmp_receiver
776 if (HAVE_builtin_setjmp_receiver)
777 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
778 else
779 #endif
780 #ifdef HAVE_nonlocal_goto_receiver
781 if (HAVE_nonlocal_goto_receiver)
782 emit_insn (gen_nonlocal_goto_receiver ());
783 else
784 #endif
785 { /* Nothing */ }
786
787 /* We must not allow the code we just generated to be reordered by
788 scheduling. Specifically, the update of the frame pointer must
789 happen immediately, not later. */
790 emit_insn (gen_blockage ());
791 }
792
793 /* __builtin_longjmp is passed a pointer to an array of five words (not
794 all will be used on all machines). It operates similarly to the C
795 library function of the same name, but is more efficient. Much of
796 the code below is copied from the handling of non-local gotos. */
797
798 static void
799 expand_builtin_longjmp (rtx buf_addr, rtx value)
800 {
801 rtx fp, lab, stack, insn, last;
802 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
803
804 /* DRAP is needed for stack realign if longjmp is expanded to current
805 function */
806 if (SUPPORTS_STACK_ALIGNMENT)
807 crtl->need_drap = true;
808
809 if (setjmp_alias_set == -1)
810 setjmp_alias_set = new_alias_set ();
811
812 buf_addr = convert_memory_address (Pmode, buf_addr);
813
814 buf_addr = force_reg (Pmode, buf_addr);
815
816 /* We used to store value in static_chain_rtx, but that fails if pointers
817 are smaller than integers. We instead require that the user must pass
818 a second argument of 1, because that is what builtin_setjmp will
819 return. This also makes EH slightly more efficient, since we are no
820 longer copying around a value that we don't care about. */
821 gcc_assert (value == const1_rtx);
822
823 last = get_last_insn ();
824 #ifdef HAVE_builtin_longjmp
825 if (HAVE_builtin_longjmp)
826 emit_insn (gen_builtin_longjmp (buf_addr));
827 else
828 #endif
829 {
830 fp = gen_rtx_MEM (Pmode, buf_addr);
831 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
832 GET_MODE_SIZE (Pmode)));
833
834 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
835 2 * GET_MODE_SIZE (Pmode)));
836 set_mem_alias_set (fp, setjmp_alias_set);
837 set_mem_alias_set (lab, setjmp_alias_set);
838 set_mem_alias_set (stack, setjmp_alias_set);
839
840 /* Pick up FP, label, and SP from the block and jump. This code is
841 from expand_goto in stmt.c; see there for detailed comments. */
842 #ifdef HAVE_nonlocal_goto
843 if (HAVE_nonlocal_goto)
844 /* We have to pass a value to the nonlocal_goto pattern that will
845 get copied into the static_chain pointer, but it does not matter
846 what that value is, because builtin_setjmp does not use it. */
847 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
848 else
849 #endif
850 {
851 lab = copy_to_reg (lab);
852
853 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
854 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
855
856 emit_move_insn (hard_frame_pointer_rtx, fp);
857 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
858
859 emit_use (hard_frame_pointer_rtx);
860 emit_use (stack_pointer_rtx);
861 emit_indirect_jump (lab);
862 }
863 }
864
865 /* Search backwards and mark the jump insn as a non-local goto.
866 Note that this precludes the use of __builtin_longjmp to a
867 __builtin_setjmp target in the same function. However, we've
868 already cautioned the user that these functions are for
869 internal exception handling use only. */
870 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
871 {
872 gcc_assert (insn != last);
873
874 if (JUMP_P (insn))
875 {
876 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
877 break;
878 }
879 else if (CALL_P (insn))
880 break;
881 }
882 }
883
884 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
885 and the address of the save area. */
886
887 static rtx
888 expand_builtin_nonlocal_goto (tree exp)
889 {
890 tree t_label, t_save_area;
891 rtx r_label, r_save_area, r_fp, r_sp, insn;
892
893 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
894 return NULL_RTX;
895
896 t_label = CALL_EXPR_ARG (exp, 0);
897 t_save_area = CALL_EXPR_ARG (exp, 1);
898
899 r_label = expand_normal (t_label);
900 r_label = convert_memory_address (Pmode, r_label);
901 r_save_area = expand_normal (t_save_area);
902 r_save_area = convert_memory_address (Pmode, r_save_area);
903 /* Copy the address of the save location to a register just in case it was based
904 on the frame pointer. */
905 r_save_area = copy_to_reg (r_save_area);
906 r_fp = gen_rtx_MEM (Pmode, r_save_area);
907 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
908 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
909
910 crtl->has_nonlocal_goto = 1;
911
912 #ifdef HAVE_nonlocal_goto
913 /* ??? We no longer need to pass the static chain value, afaik. */
914 if (HAVE_nonlocal_goto)
915 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
916 else
917 #endif
918 {
919 r_label = copy_to_reg (r_label);
920
921 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
922 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
923
924 /* Restore frame pointer for containing function.
925 This sets the actual hard register used for the frame pointer
926 to the location of the function's incoming static chain info.
927 The non-local goto handler will then adjust it to contain the
928 proper value and reload the argument pointer, if needed. */
929 emit_move_insn (hard_frame_pointer_rtx, r_fp);
930 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
931
932 /* USE of hard_frame_pointer_rtx added for consistency;
933 not clear if really needed. */
934 emit_use (hard_frame_pointer_rtx);
935 emit_use (stack_pointer_rtx);
936
937 /* If the architecture is using a GP register, we must
938 conservatively assume that the target function makes use of it.
939 The prologue of functions with nonlocal gotos must therefore
940 initialize the GP register to the appropriate value, and we
941 must then make sure that this value is live at the point
942 of the jump. (Note that this doesn't necessarily apply
943 to targets with a nonlocal_goto pattern; they are free
944 to implement it in their own way. Note also that this is
945 a no-op if the GP register is a global invariant.) */
946 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
947 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
948 emit_use (pic_offset_table_rtx);
949
950 emit_indirect_jump (r_label);
951 }
952
953 /* Search backwards to the jump insn and mark it as a
954 non-local goto. */
955 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
956 {
957 if (JUMP_P (insn))
958 {
959 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
960 break;
961 }
962 else if (CALL_P (insn))
963 break;
964 }
965
966 return const0_rtx;
967 }
968
969 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
970 (not all will be used on all machines) that was passed to __builtin_setjmp.
971 It updates the stack pointer in that block to correspond to the current
972 stack pointer. */
973
974 static void
975 expand_builtin_update_setjmp_buf (rtx buf_addr)
976 {
977 enum machine_mode sa_mode = Pmode;
978 rtx stack_save;
979
980
981 #ifdef HAVE_save_stack_nonlocal
982 if (HAVE_save_stack_nonlocal)
983 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
984 #endif
985 #ifdef STACK_SAVEAREA_MODE
986 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
987 #endif
988
989 stack_save
990 = gen_rtx_MEM (sa_mode,
991 memory_address
992 (sa_mode,
993 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
994
995 #ifdef HAVE_setjmp
996 if (HAVE_setjmp)
997 emit_insn (gen_setjmp ());
998 #endif
999
1000 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1001 }
1002
1003 /* Expand a call to __builtin_prefetch. For a target that does not support
1004 data prefetch, evaluate the memory address argument in case it has side
1005 effects. */
1006
1007 static void
1008 expand_builtin_prefetch (tree exp)
1009 {
1010 tree arg0, arg1, arg2;
1011 int nargs;
1012 rtx op0, op1, op2;
1013
1014 if (!validate_arglist (exp, POINTER_TYPE, 0))
1015 return;
1016
1017 arg0 = CALL_EXPR_ARG (exp, 0);
1018
1019 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1020 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1021 locality). */
1022 nargs = call_expr_nargs (exp);
1023 if (nargs > 1)
1024 arg1 = CALL_EXPR_ARG (exp, 1);
1025 else
1026 arg1 = integer_zero_node;
1027 if (nargs > 2)
1028 arg2 = CALL_EXPR_ARG (exp, 2);
1029 else
1030 arg2 = build_int_cst (NULL_TREE, 3);
1031
1032 /* Argument 0 is an address. */
1033 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1034
1035 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1036 if (TREE_CODE (arg1) != INTEGER_CST)
1037 {
1038 error ("second argument to %<__builtin_prefetch%> must be a constant");
1039 arg1 = integer_zero_node;
1040 }
1041 op1 = expand_normal (arg1);
1042 /* Argument 1 must be either zero or one. */
1043 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1044 {
1045 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1046 " using zero");
1047 op1 = const0_rtx;
1048 }
1049
1050 /* Argument 2 (locality) must be a compile-time constant int. */
1051 if (TREE_CODE (arg2) != INTEGER_CST)
1052 {
1053 error ("third argument to %<__builtin_prefetch%> must be a constant");
1054 arg2 = integer_zero_node;
1055 }
1056 op2 = expand_normal (arg2);
1057 /* Argument 2 must be 0, 1, 2, or 3. */
1058 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1059 {
1060 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1061 op2 = const0_rtx;
1062 }
1063
1064 #ifdef HAVE_prefetch
1065 if (HAVE_prefetch)
1066 {
1067 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1068 (op0,
1069 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1070 || (GET_MODE (op0) != Pmode))
1071 {
1072 op0 = convert_memory_address (Pmode, op0);
1073 op0 = force_reg (Pmode, op0);
1074 }
1075 emit_insn (gen_prefetch (op0, op1, op2));
1076 }
1077 #endif
1078
1079 /* Don't do anything with direct references to volatile memory, but
1080 generate code to handle other side effects. */
1081 if (!MEM_P (op0) && side_effects_p (op0))
1082 emit_insn (op0);
1083 }
1084
1085 /* Get a MEM rtx for expression EXP which is the address of an operand
1086 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1087 the maximum length of the block of memory that might be accessed or
1088 NULL if unknown. */
1089
1090 static rtx
1091 get_memory_rtx (tree exp, tree len)
1092 {
1093 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1094 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1095
1096 /* Get an expression we can use to find the attributes to assign to MEM.
1097 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1098 we can. First remove any nops. */
1099 while (CONVERT_EXPR_P (exp)
1100 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1101 exp = TREE_OPERAND (exp, 0);
1102
1103 if (TREE_CODE (exp) == ADDR_EXPR)
1104 exp = TREE_OPERAND (exp, 0);
1105 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1106 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1107 else
1108 exp = NULL;
1109
1110 /* Honor attributes derived from exp, except for the alias set
1111 (as builtin stringops may alias with anything) and the size
1112 (as stringops may access multiple array elements). */
1113 if (exp)
1114 {
1115 set_mem_attributes (mem, exp, 0);
1116
1117 /* Allow the string and memory builtins to overflow from one
1118 field into another, see http://gcc.gnu.org/PR23561.
1119 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1120 memory accessed by the string or memory builtin will fit
1121 within the field. */
1122 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1123 {
1124 tree mem_expr = MEM_EXPR (mem);
1125 HOST_WIDE_INT offset = -1, length = -1;
1126 tree inner = exp;
1127
1128 while (TREE_CODE (inner) == ARRAY_REF
1129 || CONVERT_EXPR_P (inner)
1130 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1131 || TREE_CODE (inner) == SAVE_EXPR)
1132 inner = TREE_OPERAND (inner, 0);
1133
1134 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1135
1136 if (MEM_OFFSET (mem)
1137 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1138 offset = INTVAL (MEM_OFFSET (mem));
1139
1140 if (offset >= 0 && len && host_integerp (len, 0))
1141 length = tree_low_cst (len, 0);
1142
1143 while (TREE_CODE (inner) == COMPONENT_REF)
1144 {
1145 tree field = TREE_OPERAND (inner, 1);
1146 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1147 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1148
1149 /* Bitfields are generally not byte-addressable. */
1150 gcc_assert (!DECL_BIT_FIELD (field)
1151 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1152 % BITS_PER_UNIT) == 0
1153 && host_integerp (DECL_SIZE (field), 0)
1154 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1155 % BITS_PER_UNIT) == 0));
1156
1157 /* If we can prove that the memory starting at XEXP (mem, 0) and
1158 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1159 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1160 fields without DECL_SIZE_UNIT like flexible array members. */
1161 if (length >= 0
1162 && DECL_SIZE_UNIT (field)
1163 && host_integerp (DECL_SIZE_UNIT (field), 0))
1164 {
1165 HOST_WIDE_INT size
1166 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1167 if (offset <= size
1168 && length <= size
1169 && offset + length <= size)
1170 break;
1171 }
1172
1173 if (offset >= 0
1174 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1175 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1176 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1177 / BITS_PER_UNIT;
1178 else
1179 {
1180 offset = -1;
1181 length = -1;
1182 }
1183
1184 mem_expr = TREE_OPERAND (mem_expr, 0);
1185 inner = TREE_OPERAND (inner, 0);
1186 }
1187
1188 if (mem_expr == NULL)
1189 offset = -1;
1190 if (mem_expr != MEM_EXPR (mem))
1191 {
1192 set_mem_expr (mem, mem_expr);
1193 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1194 }
1195 }
1196 set_mem_alias_set (mem, 0);
1197 set_mem_size (mem, NULL_RTX);
1198 }
1199
1200 return mem;
1201 }
1202 \f
1203 /* Built-in functions to perform an untyped call and return. */
1204
1205 /* For each register that may be used for calling a function, this
1206 gives a mode used to copy the register's value. VOIDmode indicates
1207 the register is not used for calling a function. If the machine
1208 has register windows, this gives only the outbound registers.
1209 INCOMING_REGNO gives the corresponding inbound register. */
1210 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1211
1212 /* For each register that may be used for returning values, this gives
1213 a mode used to copy the register's value. VOIDmode indicates the
1214 register is not used for returning values. If the machine has
1215 register windows, this gives only the outbound registers.
1216 INCOMING_REGNO gives the corresponding inbound register. */
1217 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1218
1219 /* For each register that may be used for calling a function, this
1220 gives the offset of that register into the block returned by
1221 __builtin_apply_args. 0 indicates that the register is not
1222 used for calling a function. */
1223 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1224
1225 /* Return the size required for the block returned by __builtin_apply_args,
1226 and initialize apply_args_mode. */
1227
1228 static int
1229 apply_args_size (void)
1230 {
1231 static int size = -1;
1232 int align;
1233 unsigned int regno;
1234 enum machine_mode mode;
1235
1236 /* The values computed by this function never change. */
1237 if (size < 0)
1238 {
1239 /* The first value is the incoming arg-pointer. */
1240 size = GET_MODE_SIZE (Pmode);
1241
1242 /* The second value is the structure value address unless this is
1243 passed as an "invisible" first argument. */
1244 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1245 size += GET_MODE_SIZE (Pmode);
1246
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_ARG_REGNO_P (regno))
1249 {
1250 mode = reg_raw_mode[regno];
1251
1252 gcc_assert (mode != VOIDmode);
1253
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 apply_args_reg_offset[regno] = size;
1258 size += GET_MODE_SIZE (mode);
1259 apply_args_mode[regno] = mode;
1260 }
1261 else
1262 {
1263 apply_args_mode[regno] = VOIDmode;
1264 apply_args_reg_offset[regno] = 0;
1265 }
1266 }
1267 return size;
1268 }
1269
1270 /* Return the size required for the block returned by __builtin_apply,
1271 and initialize apply_result_mode. */
1272
1273 static int
1274 apply_result_size (void)
1275 {
1276 static int size = -1;
1277 int align, regno;
1278 enum machine_mode mode;
1279
1280 /* The values computed by this function never change. */
1281 if (size < 0)
1282 {
1283 size = 0;
1284
1285 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1286 if (FUNCTION_VALUE_REGNO_P (regno))
1287 {
1288 mode = reg_raw_mode[regno];
1289
1290 gcc_assert (mode != VOIDmode);
1291
1292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1293 if (size % align != 0)
1294 size = CEIL (size, align) * align;
1295 size += GET_MODE_SIZE (mode);
1296 apply_result_mode[regno] = mode;
1297 }
1298 else
1299 apply_result_mode[regno] = VOIDmode;
1300
1301 /* Allow targets that use untyped_call and untyped_return to override
1302 the size so that machine-specific information can be stored here. */
1303 #ifdef APPLY_RESULT_SIZE
1304 size = APPLY_RESULT_SIZE;
1305 #endif
1306 }
1307 return size;
1308 }
1309
1310 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1311 /* Create a vector describing the result block RESULT. If SAVEP is true,
1312 the result block is used to save the values; otherwise it is used to
1313 restore the values. */
1314
1315 static rtx
1316 result_vector (int savep, rtx result)
1317 {
1318 int regno, size, align, nelts;
1319 enum machine_mode mode;
1320 rtx reg, mem;
1321 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1322
1323 size = nelts = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_result_mode[regno]) != VOIDmode)
1326 {
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1331 mem = adjust_address (result, mode, size);
1332 savevec[nelts++] = (savep
1333 ? gen_rtx_SET (VOIDmode, mem, reg)
1334 : gen_rtx_SET (VOIDmode, reg, mem));
1335 size += GET_MODE_SIZE (mode);
1336 }
1337 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1338 }
1339 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1340
1341 /* Save the state required to perform an untyped call with the same
1342 arguments as were passed to the current function. */
1343
1344 static rtx
1345 expand_builtin_apply_args_1 (void)
1346 {
1347 rtx registers, tem;
1348 int size, align, regno;
1349 enum machine_mode mode;
1350 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1351
1352 /* Create a block where the arg-pointer, structure value address,
1353 and argument registers can be saved. */
1354 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1355
1356 /* Walk past the arg-pointer and structure value address. */
1357 size = GET_MODE_SIZE (Pmode);
1358 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1359 size += GET_MODE_SIZE (Pmode);
1360
1361 /* Save each register used in calling a function to the block. */
1362 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1363 if ((mode = apply_args_mode[regno]) != VOIDmode)
1364 {
1365 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1366 if (size % align != 0)
1367 size = CEIL (size, align) * align;
1368
1369 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1370
1371 emit_move_insn (adjust_address (registers, mode, size), tem);
1372 size += GET_MODE_SIZE (mode);
1373 }
1374
1375 /* Save the arg pointer to the block. */
1376 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1377 #ifdef STACK_GROWS_DOWNWARD
1378 /* We need the pointer as the caller actually passed them to us, not
1379 as we might have pretended they were passed. Make sure it's a valid
1380 operand, as emit_move_insn isn't expected to handle a PLUS. */
1381 tem
1382 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1383 NULL_RTX);
1384 #endif
1385 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1386
1387 size = GET_MODE_SIZE (Pmode);
1388
1389 /* Save the structure value address unless this is passed as an
1390 "invisible" first argument. */
1391 if (struct_incoming_value)
1392 {
1393 emit_move_insn (adjust_address (registers, Pmode, size),
1394 copy_to_reg (struct_incoming_value));
1395 size += GET_MODE_SIZE (Pmode);
1396 }
1397
1398 /* Return the address of the block. */
1399 return copy_addr_to_reg (XEXP (registers, 0));
1400 }
1401
1402 /* __builtin_apply_args returns block of memory allocated on
1403 the stack into which is stored the arg pointer, structure
1404 value address, static chain, and all the registers that might
1405 possibly be used in performing a function call. The code is
1406 moved to the start of the function so the incoming values are
1407 saved. */
1408
1409 static rtx
1410 expand_builtin_apply_args (void)
1411 {
1412 /* Don't do __builtin_apply_args more than once in a function.
1413 Save the result of the first call and reuse it. */
1414 if (apply_args_value != 0)
1415 return apply_args_value;
1416 {
1417 /* When this function is called, it means that registers must be
1418 saved on entry to this function. So we migrate the
1419 call to the first insn of this function. */
1420 rtx temp;
1421 rtx seq;
1422
1423 start_sequence ();
1424 temp = expand_builtin_apply_args_1 ();
1425 seq = get_insns ();
1426 end_sequence ();
1427
1428 apply_args_value = temp;
1429
1430 /* Put the insns after the NOTE that starts the function.
1431 If this is inside a start_sequence, make the outer-level insn
1432 chain current, so the code is placed at the start of the
1433 function. */
1434 push_topmost_sequence ();
1435 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1436 pop_topmost_sequence ();
1437 return temp;
1438 }
1439 }
1440
1441 /* Perform an untyped call and save the state required to perform an
1442 untyped return of whatever value was returned by the given function. */
1443
1444 static rtx
1445 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1446 {
1447 int size, align, regno;
1448 enum machine_mode mode;
1449 rtx incoming_args, result, reg, dest, src, call_insn;
1450 rtx old_stack_level = 0;
1451 rtx call_fusage = 0;
1452 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1453
1454 arguments = convert_memory_address (Pmode, arguments);
1455
1456 /* Create a block where the return registers can be saved. */
1457 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1458
1459 /* Fetch the arg pointer from the ARGUMENTS block. */
1460 incoming_args = gen_reg_rtx (Pmode);
1461 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1462 #ifndef STACK_GROWS_DOWNWARD
1463 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1464 incoming_args, 0, OPTAB_LIB_WIDEN);
1465 #endif
1466
1467 /* Push a new argument block and copy the arguments. Do not allow
1468 the (potential) memcpy call below to interfere with our stack
1469 manipulations. */
1470 do_pending_stack_adjust ();
1471 NO_DEFER_POP;
1472
1473 /* Save the stack with nonlocal if available. */
1474 #ifdef HAVE_save_stack_nonlocal
1475 if (HAVE_save_stack_nonlocal)
1476 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1477 else
1478 #endif
1479 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1480
1481 /* Allocate a block of memory onto the stack and copy the memory
1482 arguments to the outgoing arguments address. */
1483 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1484
1485 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1486 may have already set current_function_calls_alloca to true.
1487 current_function_calls_alloca won't be set if argsize is zero,
1488 so we have to guarantee need_drap is true here. */
1489 if (SUPPORTS_STACK_ALIGNMENT)
1490 crtl->need_drap = true;
1491
1492 dest = virtual_outgoing_args_rtx;
1493 #ifndef STACK_GROWS_DOWNWARD
1494 if (GET_CODE (argsize) == CONST_INT)
1495 dest = plus_constant (dest, -INTVAL (argsize));
1496 else
1497 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1498 #endif
1499 dest = gen_rtx_MEM (BLKmode, dest);
1500 set_mem_align (dest, PARM_BOUNDARY);
1501 src = gen_rtx_MEM (BLKmode, incoming_args);
1502 set_mem_align (src, PARM_BOUNDARY);
1503 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1504
1505 /* Refer to the argument block. */
1506 apply_args_size ();
1507 arguments = gen_rtx_MEM (BLKmode, arguments);
1508 set_mem_align (arguments, PARM_BOUNDARY);
1509
1510 /* Walk past the arg-pointer and structure value address. */
1511 size = GET_MODE_SIZE (Pmode);
1512 if (struct_value)
1513 size += GET_MODE_SIZE (Pmode);
1514
1515 /* Restore each of the registers previously saved. Make USE insns
1516 for each of these registers for use in making the call. */
1517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1518 if ((mode = apply_args_mode[regno]) != VOIDmode)
1519 {
1520 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1521 if (size % align != 0)
1522 size = CEIL (size, align) * align;
1523 reg = gen_rtx_REG (mode, regno);
1524 emit_move_insn (reg, adjust_address (arguments, mode, size));
1525 use_reg (&call_fusage, reg);
1526 size += GET_MODE_SIZE (mode);
1527 }
1528
1529 /* Restore the structure value address unless this is passed as an
1530 "invisible" first argument. */
1531 size = GET_MODE_SIZE (Pmode);
1532 if (struct_value)
1533 {
1534 rtx value = gen_reg_rtx (Pmode);
1535 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1536 emit_move_insn (struct_value, value);
1537 if (REG_P (struct_value))
1538 use_reg (&call_fusage, struct_value);
1539 size += GET_MODE_SIZE (Pmode);
1540 }
1541
1542 /* All arguments and registers used for the call are set up by now! */
1543 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1544
1545 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1546 and we don't want to load it into a register as an optimization,
1547 because prepare_call_address already did it if it should be done. */
1548 if (GET_CODE (function) != SYMBOL_REF)
1549 function = memory_address (FUNCTION_MODE, function);
1550
1551 /* Generate the actual call instruction and save the return value. */
1552 #ifdef HAVE_untyped_call
1553 if (HAVE_untyped_call)
1554 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1555 result, result_vector (1, result)));
1556 else
1557 #endif
1558 #ifdef HAVE_call_value
1559 if (HAVE_call_value)
1560 {
1561 rtx valreg = 0;
1562
1563 /* Locate the unique return register. It is not possible to
1564 express a call that sets more than one return register using
1565 call_value; use untyped_call for that. In fact, untyped_call
1566 only needs to save the return registers in the given block. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_result_mode[regno]) != VOIDmode)
1569 {
1570 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1571
1572 valreg = gen_rtx_REG (mode, regno);
1573 }
1574
1575 emit_call_insn (GEN_CALL_VALUE (valreg,
1576 gen_rtx_MEM (FUNCTION_MODE, function),
1577 const0_rtx, NULL_RTX, const0_rtx));
1578
1579 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1580 }
1581 else
1582 #endif
1583 gcc_unreachable ();
1584
1585 /* Find the CALL insn we just emitted, and attach the register usage
1586 information. */
1587 call_insn = last_call_insn ();
1588 add_function_usage_to (call_insn, call_fusage);
1589
1590 /* Restore the stack. */
1591 #ifdef HAVE_save_stack_nonlocal
1592 if (HAVE_save_stack_nonlocal)
1593 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1594 else
1595 #endif
1596 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1597
1598 OK_DEFER_POP;
1599
1600 /* Return the address of the result block. */
1601 result = copy_addr_to_reg (XEXP (result, 0));
1602 return convert_memory_address (ptr_mode, result);
1603 }
1604
1605 /* Perform an untyped return. */
1606
1607 static void
1608 expand_builtin_return (rtx result)
1609 {
1610 int size, align, regno;
1611 enum machine_mode mode;
1612 rtx reg;
1613 rtx call_fusage = 0;
1614
1615 result = convert_memory_address (Pmode, result);
1616
1617 apply_result_size ();
1618 result = gen_rtx_MEM (BLKmode, result);
1619
1620 #ifdef HAVE_untyped_return
1621 if (HAVE_untyped_return)
1622 {
1623 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1624 emit_barrier ();
1625 return;
1626 }
1627 #endif
1628
1629 /* Restore the return value and note that each value is used. */
1630 size = 0;
1631 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1632 if ((mode = apply_result_mode[regno]) != VOIDmode)
1633 {
1634 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1635 if (size % align != 0)
1636 size = CEIL (size, align) * align;
1637 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1638 emit_move_insn (reg, adjust_address (result, mode, size));
1639
1640 push_to_sequence (call_fusage);
1641 emit_use (reg);
1642 call_fusage = get_insns ();
1643 end_sequence ();
1644 size += GET_MODE_SIZE (mode);
1645 }
1646
1647 /* Put the USE insns before the return. */
1648 emit_insn (call_fusage);
1649
1650 /* Return whatever values was restored by jumping directly to the end
1651 of the function. */
1652 expand_naked_return ();
1653 }
1654
1655 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1656
1657 static enum type_class
1658 type_to_class (tree type)
1659 {
1660 switch (TREE_CODE (type))
1661 {
1662 case VOID_TYPE: return void_type_class;
1663 case INTEGER_TYPE: return integer_type_class;
1664 case ENUMERAL_TYPE: return enumeral_type_class;
1665 case BOOLEAN_TYPE: return boolean_type_class;
1666 case POINTER_TYPE: return pointer_type_class;
1667 case REFERENCE_TYPE: return reference_type_class;
1668 case OFFSET_TYPE: return offset_type_class;
1669 case REAL_TYPE: return real_type_class;
1670 case COMPLEX_TYPE: return complex_type_class;
1671 case FUNCTION_TYPE: return function_type_class;
1672 case METHOD_TYPE: return method_type_class;
1673 case RECORD_TYPE: return record_type_class;
1674 case UNION_TYPE:
1675 case QUAL_UNION_TYPE: return union_type_class;
1676 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1677 ? string_type_class : array_type_class);
1678 case LANG_TYPE: return lang_type_class;
1679 default: return no_type_class;
1680 }
1681 }
1682
1683 /* Expand a call EXP to __builtin_classify_type. */
1684
1685 static rtx
1686 expand_builtin_classify_type (tree exp)
1687 {
1688 if (call_expr_nargs (exp))
1689 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1690 return GEN_INT (no_type_class);
1691 }
1692
1693 /* This helper macro, meant to be used in mathfn_built_in below,
1694 determines which among a set of three builtin math functions is
1695 appropriate for a given type mode. The `F' and `L' cases are
1696 automatically generated from the `double' case. */
1697 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1698 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1699 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1700 fcodel = BUILT_IN_MATHFN##L ; break;
1701 /* Similar to above, but appends _R after any F/L suffix. */
1702 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1703 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1704 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1705 fcodel = BUILT_IN_MATHFN##L_R ; break;
1706
1707 /* Return mathematic function equivalent to FN but operating directly
1708 on TYPE, if available. If IMPLICIT is true find the function in
1709 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1710 can't do the conversion, return zero. */
1711
1712 static tree
1713 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1714 {
1715 tree const *const fn_arr
1716 = implicit ? implicit_built_in_decls : built_in_decls;
1717 enum built_in_function fcode, fcodef, fcodel;
1718
1719 switch (fn)
1720 {
1721 CASE_MATHFN (BUILT_IN_ACOS)
1722 CASE_MATHFN (BUILT_IN_ACOSH)
1723 CASE_MATHFN (BUILT_IN_ASIN)
1724 CASE_MATHFN (BUILT_IN_ASINH)
1725 CASE_MATHFN (BUILT_IN_ATAN)
1726 CASE_MATHFN (BUILT_IN_ATAN2)
1727 CASE_MATHFN (BUILT_IN_ATANH)
1728 CASE_MATHFN (BUILT_IN_CBRT)
1729 CASE_MATHFN (BUILT_IN_CEIL)
1730 CASE_MATHFN (BUILT_IN_CEXPI)
1731 CASE_MATHFN (BUILT_IN_COPYSIGN)
1732 CASE_MATHFN (BUILT_IN_COS)
1733 CASE_MATHFN (BUILT_IN_COSH)
1734 CASE_MATHFN (BUILT_IN_DREM)
1735 CASE_MATHFN (BUILT_IN_ERF)
1736 CASE_MATHFN (BUILT_IN_ERFC)
1737 CASE_MATHFN (BUILT_IN_EXP)
1738 CASE_MATHFN (BUILT_IN_EXP10)
1739 CASE_MATHFN (BUILT_IN_EXP2)
1740 CASE_MATHFN (BUILT_IN_EXPM1)
1741 CASE_MATHFN (BUILT_IN_FABS)
1742 CASE_MATHFN (BUILT_IN_FDIM)
1743 CASE_MATHFN (BUILT_IN_FLOOR)
1744 CASE_MATHFN (BUILT_IN_FMA)
1745 CASE_MATHFN (BUILT_IN_FMAX)
1746 CASE_MATHFN (BUILT_IN_FMIN)
1747 CASE_MATHFN (BUILT_IN_FMOD)
1748 CASE_MATHFN (BUILT_IN_FREXP)
1749 CASE_MATHFN (BUILT_IN_GAMMA)
1750 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1751 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1752 CASE_MATHFN (BUILT_IN_HYPOT)
1753 CASE_MATHFN (BUILT_IN_ILOGB)
1754 CASE_MATHFN (BUILT_IN_INF)
1755 CASE_MATHFN (BUILT_IN_ISINF)
1756 CASE_MATHFN (BUILT_IN_J0)
1757 CASE_MATHFN (BUILT_IN_J1)
1758 CASE_MATHFN (BUILT_IN_JN)
1759 CASE_MATHFN (BUILT_IN_LCEIL)
1760 CASE_MATHFN (BUILT_IN_LDEXP)
1761 CASE_MATHFN (BUILT_IN_LFLOOR)
1762 CASE_MATHFN (BUILT_IN_LGAMMA)
1763 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1764 CASE_MATHFN (BUILT_IN_LLCEIL)
1765 CASE_MATHFN (BUILT_IN_LLFLOOR)
1766 CASE_MATHFN (BUILT_IN_LLRINT)
1767 CASE_MATHFN (BUILT_IN_LLROUND)
1768 CASE_MATHFN (BUILT_IN_LOG)
1769 CASE_MATHFN (BUILT_IN_LOG10)
1770 CASE_MATHFN (BUILT_IN_LOG1P)
1771 CASE_MATHFN (BUILT_IN_LOG2)
1772 CASE_MATHFN (BUILT_IN_LOGB)
1773 CASE_MATHFN (BUILT_IN_LRINT)
1774 CASE_MATHFN (BUILT_IN_LROUND)
1775 CASE_MATHFN (BUILT_IN_MODF)
1776 CASE_MATHFN (BUILT_IN_NAN)
1777 CASE_MATHFN (BUILT_IN_NANS)
1778 CASE_MATHFN (BUILT_IN_NEARBYINT)
1779 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1780 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1781 CASE_MATHFN (BUILT_IN_POW)
1782 CASE_MATHFN (BUILT_IN_POWI)
1783 CASE_MATHFN (BUILT_IN_POW10)
1784 CASE_MATHFN (BUILT_IN_REMAINDER)
1785 CASE_MATHFN (BUILT_IN_REMQUO)
1786 CASE_MATHFN (BUILT_IN_RINT)
1787 CASE_MATHFN (BUILT_IN_ROUND)
1788 CASE_MATHFN (BUILT_IN_SCALB)
1789 CASE_MATHFN (BUILT_IN_SCALBLN)
1790 CASE_MATHFN (BUILT_IN_SCALBN)
1791 CASE_MATHFN (BUILT_IN_SIGNBIT)
1792 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1793 CASE_MATHFN (BUILT_IN_SIN)
1794 CASE_MATHFN (BUILT_IN_SINCOS)
1795 CASE_MATHFN (BUILT_IN_SINH)
1796 CASE_MATHFN (BUILT_IN_SQRT)
1797 CASE_MATHFN (BUILT_IN_TAN)
1798 CASE_MATHFN (BUILT_IN_TANH)
1799 CASE_MATHFN (BUILT_IN_TGAMMA)
1800 CASE_MATHFN (BUILT_IN_TRUNC)
1801 CASE_MATHFN (BUILT_IN_Y0)
1802 CASE_MATHFN (BUILT_IN_Y1)
1803 CASE_MATHFN (BUILT_IN_YN)
1804
1805 default:
1806 return NULL_TREE;
1807 }
1808
1809 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1810 return fn_arr[fcode];
1811 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1812 return fn_arr[fcodef];
1813 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1814 return fn_arr[fcodel];
1815 else
1816 return NULL_TREE;
1817 }
1818
1819 /* Like mathfn_built_in_1(), but always use the implicit array. */
1820
1821 tree
1822 mathfn_built_in (tree type, enum built_in_function fn)
1823 {
1824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1825 }
1826
1827 /* If errno must be maintained, expand the RTL to check if the result,
1828 TARGET, of a built-in function call, EXP, is NaN, and if so set
1829 errno to EDOM. */
1830
1831 static void
1832 expand_errno_check (tree exp, rtx target)
1833 {
1834 rtx lab = gen_label_rtx ();
1835
1836 /* Test the result; if it is NaN, set errno=EDOM because
1837 the argument was not in the domain. */
1838 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1839 0, lab);
1840
1841 #ifdef TARGET_EDOM
1842 /* If this built-in doesn't throw an exception, set errno directly. */
1843 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1844 {
1845 #ifdef GEN_ERRNO_RTX
1846 rtx errno_rtx = GEN_ERRNO_RTX;
1847 #else
1848 rtx errno_rtx
1849 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1850 #endif
1851 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1852 emit_label (lab);
1853 return;
1854 }
1855 #endif
1856
1857 /* Make sure the library call isn't expanded as a tail call. */
1858 CALL_EXPR_TAILCALL (exp) = 0;
1859
1860 /* We can't set errno=EDOM directly; let the library call do it.
1861 Pop the arguments right away in case the call gets deleted. */
1862 NO_DEFER_POP;
1863 expand_call (exp, target, 0);
1864 OK_DEFER_POP;
1865 emit_label (lab);
1866 }
1867
1868 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1869 Return NULL_RTX if a normal call should be emitted rather than expanding
1870 the function in-line. EXP is the expression that is a call to the builtin
1871 function; if convenient, the result should be placed in TARGET.
1872 SUBTARGET may be used as the target for computing one of EXP's operands. */
1873
1874 static rtx
1875 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1876 {
1877 optab builtin_optab;
1878 rtx op0, insns, before_call;
1879 tree fndecl = get_callee_fndecl (exp);
1880 enum machine_mode mode;
1881 bool errno_set = false;
1882 tree arg;
1883
1884 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1885 return NULL_RTX;
1886
1887 arg = CALL_EXPR_ARG (exp, 0);
1888
1889 switch (DECL_FUNCTION_CODE (fndecl))
1890 {
1891 CASE_FLT_FN (BUILT_IN_SQRT):
1892 errno_set = ! tree_expr_nonnegative_p (arg);
1893 builtin_optab = sqrt_optab;
1894 break;
1895 CASE_FLT_FN (BUILT_IN_EXP):
1896 errno_set = true; builtin_optab = exp_optab; break;
1897 CASE_FLT_FN (BUILT_IN_EXP10):
1898 CASE_FLT_FN (BUILT_IN_POW10):
1899 errno_set = true; builtin_optab = exp10_optab; break;
1900 CASE_FLT_FN (BUILT_IN_EXP2):
1901 errno_set = true; builtin_optab = exp2_optab; break;
1902 CASE_FLT_FN (BUILT_IN_EXPM1):
1903 errno_set = true; builtin_optab = expm1_optab; break;
1904 CASE_FLT_FN (BUILT_IN_LOGB):
1905 errno_set = true; builtin_optab = logb_optab; break;
1906 CASE_FLT_FN (BUILT_IN_LOG):
1907 errno_set = true; builtin_optab = log_optab; break;
1908 CASE_FLT_FN (BUILT_IN_LOG10):
1909 errno_set = true; builtin_optab = log10_optab; break;
1910 CASE_FLT_FN (BUILT_IN_LOG2):
1911 errno_set = true; builtin_optab = log2_optab; break;
1912 CASE_FLT_FN (BUILT_IN_LOG1P):
1913 errno_set = true; builtin_optab = log1p_optab; break;
1914 CASE_FLT_FN (BUILT_IN_ASIN):
1915 builtin_optab = asin_optab; break;
1916 CASE_FLT_FN (BUILT_IN_ACOS):
1917 builtin_optab = acos_optab; break;
1918 CASE_FLT_FN (BUILT_IN_TAN):
1919 builtin_optab = tan_optab; break;
1920 CASE_FLT_FN (BUILT_IN_ATAN):
1921 builtin_optab = atan_optab; break;
1922 CASE_FLT_FN (BUILT_IN_FLOOR):
1923 builtin_optab = floor_optab; break;
1924 CASE_FLT_FN (BUILT_IN_CEIL):
1925 builtin_optab = ceil_optab; break;
1926 CASE_FLT_FN (BUILT_IN_TRUNC):
1927 builtin_optab = btrunc_optab; break;
1928 CASE_FLT_FN (BUILT_IN_ROUND):
1929 builtin_optab = round_optab; break;
1930 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1931 builtin_optab = nearbyint_optab;
1932 if (flag_trapping_math)
1933 break;
1934 /* Else fallthrough and expand as rint. */
1935 CASE_FLT_FN (BUILT_IN_RINT):
1936 builtin_optab = rint_optab; break;
1937 default:
1938 gcc_unreachable ();
1939 }
1940
1941 /* Make a suitable register to place result in. */
1942 mode = TYPE_MODE (TREE_TYPE (exp));
1943
1944 if (! flag_errno_math || ! HONOR_NANS (mode))
1945 errno_set = false;
1946
1947 /* Before working hard, check whether the instruction is available. */
1948 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1949 {
1950 target = gen_reg_rtx (mode);
1951
1952 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1953 need to expand the argument again. This way, we will not perform
1954 side-effects more the once. */
1955 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1956
1957 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1958
1959 start_sequence ();
1960
1961 /* Compute into TARGET.
1962 Set TARGET to wherever the result comes back. */
1963 target = expand_unop (mode, builtin_optab, op0, target, 0);
1964
1965 if (target != 0)
1966 {
1967 if (errno_set)
1968 expand_errno_check (exp, target);
1969
1970 /* Output the entire sequence. */
1971 insns = get_insns ();
1972 end_sequence ();
1973 emit_insn (insns);
1974 return target;
1975 }
1976
1977 /* If we were unable to expand via the builtin, stop the sequence
1978 (without outputting the insns) and call to the library function
1979 with the stabilized argument list. */
1980 end_sequence ();
1981 }
1982
1983 before_call = get_last_insn ();
1984
1985 return expand_call (exp, target, target == const0_rtx);
1986 }
1987
1988 /* Expand a call to the builtin binary math functions (pow and atan2).
1989 Return NULL_RTX if a normal call should be emitted rather than expanding the
1990 function in-line. EXP is the expression that is a call to the builtin
1991 function; if convenient, the result should be placed in TARGET.
1992 SUBTARGET may be used as the target for computing one of EXP's
1993 operands. */
1994
1995 static rtx
1996 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1997 {
1998 optab builtin_optab;
1999 rtx op0, op1, insns;
2000 int op1_type = REAL_TYPE;
2001 tree fndecl = get_callee_fndecl (exp);
2002 tree arg0, arg1;
2003 enum machine_mode mode;
2004 bool errno_set = true;
2005
2006 switch (DECL_FUNCTION_CODE (fndecl))
2007 {
2008 CASE_FLT_FN (BUILT_IN_SCALBN):
2009 CASE_FLT_FN (BUILT_IN_SCALBLN):
2010 CASE_FLT_FN (BUILT_IN_LDEXP):
2011 op1_type = INTEGER_TYPE;
2012 default:
2013 break;
2014 }
2015
2016 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2017 return NULL_RTX;
2018
2019 arg0 = CALL_EXPR_ARG (exp, 0);
2020 arg1 = CALL_EXPR_ARG (exp, 1);
2021
2022 switch (DECL_FUNCTION_CODE (fndecl))
2023 {
2024 CASE_FLT_FN (BUILT_IN_POW):
2025 builtin_optab = pow_optab; break;
2026 CASE_FLT_FN (BUILT_IN_ATAN2):
2027 builtin_optab = atan2_optab; break;
2028 CASE_FLT_FN (BUILT_IN_SCALB):
2029 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2030 return 0;
2031 builtin_optab = scalb_optab; break;
2032 CASE_FLT_FN (BUILT_IN_SCALBN):
2033 CASE_FLT_FN (BUILT_IN_SCALBLN):
2034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2035 return 0;
2036 /* Fall through... */
2037 CASE_FLT_FN (BUILT_IN_LDEXP):
2038 builtin_optab = ldexp_optab; break;
2039 CASE_FLT_FN (BUILT_IN_FMOD):
2040 builtin_optab = fmod_optab; break;
2041 CASE_FLT_FN (BUILT_IN_REMAINDER):
2042 CASE_FLT_FN (BUILT_IN_DREM):
2043 builtin_optab = remainder_optab; break;
2044 default:
2045 gcc_unreachable ();
2046 }
2047
2048 /* Make a suitable register to place result in. */
2049 mode = TYPE_MODE (TREE_TYPE (exp));
2050
2051 /* Before working hard, check whether the instruction is available. */
2052 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2053 return NULL_RTX;
2054
2055 target = gen_reg_rtx (mode);
2056
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2058 errno_set = false;
2059
2060 /* Always stabilize the argument list. */
2061 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2062 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2063
2064 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2065 op1 = expand_normal (arg1);
2066
2067 start_sequence ();
2068
2069 /* Compute into TARGET.
2070 Set TARGET to wherever the result comes back. */
2071 target = expand_binop (mode, builtin_optab, op0, op1,
2072 target, 0, OPTAB_DIRECT);
2073
2074 /* If we were unable to expand via the builtin, stop the sequence
2075 (without outputting the insns) and call to the library function
2076 with the stabilized argument list. */
2077 if (target == 0)
2078 {
2079 end_sequence ();
2080 return expand_call (exp, target, target == const0_rtx);
2081 }
2082
2083 if (errno_set)
2084 expand_errno_check (exp, target);
2085
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2088 end_sequence ();
2089 emit_insn (insns);
2090
2091 return target;
2092 }
2093
2094 /* Expand a call to the builtin sin and cos math functions.
2095 Return NULL_RTX if a normal call should be emitted rather than expanding the
2096 function in-line. EXP is the expression that is a call to the builtin
2097 function; if convenient, the result should be placed in TARGET.
2098 SUBTARGET may be used as the target for computing one of EXP's
2099 operands. */
2100
2101 static rtx
2102 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2103 {
2104 optab builtin_optab;
2105 rtx op0, insns;
2106 tree fndecl = get_callee_fndecl (exp);
2107 enum machine_mode mode;
2108 tree arg;
2109
2110 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2111 return NULL_RTX;
2112
2113 arg = CALL_EXPR_ARG (exp, 0);
2114
2115 switch (DECL_FUNCTION_CODE (fndecl))
2116 {
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 CASE_FLT_FN (BUILT_IN_COS):
2119 builtin_optab = sincos_optab; break;
2120 default:
2121 gcc_unreachable ();
2122 }
2123
2124 /* Make a suitable register to place result in. */
2125 mode = TYPE_MODE (TREE_TYPE (exp));
2126
2127 /* Check if sincos insn is available, otherwise fallback
2128 to sin or cos insn. */
2129 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2130 switch (DECL_FUNCTION_CODE (fndecl))
2131 {
2132 CASE_FLT_FN (BUILT_IN_SIN):
2133 builtin_optab = sin_optab; break;
2134 CASE_FLT_FN (BUILT_IN_COS):
2135 builtin_optab = cos_optab; break;
2136 default:
2137 gcc_unreachable ();
2138 }
2139
2140 /* Before working hard, check whether the instruction is available. */
2141 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2142 {
2143 target = gen_reg_rtx (mode);
2144
2145 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2146 need to expand the argument again. This way, we will not perform
2147 side-effects more the once. */
2148 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2149
2150 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2151
2152 start_sequence ();
2153
2154 /* Compute into TARGET.
2155 Set TARGET to wherever the result comes back. */
2156 if (builtin_optab == sincos_optab)
2157 {
2158 int result;
2159
2160 switch (DECL_FUNCTION_CODE (fndecl))
2161 {
2162 CASE_FLT_FN (BUILT_IN_SIN):
2163 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2164 break;
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2167 break;
2168 default:
2169 gcc_unreachable ();
2170 }
2171 gcc_assert (result);
2172 }
2173 else
2174 {
2175 target = expand_unop (mode, builtin_optab, op0, target, 0);
2176 }
2177
2178 if (target != 0)
2179 {
2180 /* Output the entire sequence. */
2181 insns = get_insns ();
2182 end_sequence ();
2183 emit_insn (insns);
2184 return target;
2185 }
2186
2187 /* If we were unable to expand via the builtin, stop the sequence
2188 (without outputting the insns) and call to the library function
2189 with the stabilized argument list. */
2190 end_sequence ();
2191 }
2192
2193 target = expand_call (exp, target, target == const0_rtx);
2194
2195 return target;
2196 }
2197
2198 /* Expand a call to one of the builtin math functions that operate on
2199 floating point argument and output an integer result (ilogb, isinf,
2200 isnan, etc).
2201 Return 0 if a normal call should be emitted rather than expanding the
2202 function in-line. EXP is the expression that is a call to the builtin
2203 function; if convenient, the result should be placed in TARGET.
2204 SUBTARGET may be used as the target for computing one of EXP's operands. */
2205
2206 static rtx
2207 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2208 {
2209 optab builtin_optab = 0;
2210 enum insn_code icode = CODE_FOR_nothing;
2211 rtx op0;
2212 tree fndecl = get_callee_fndecl (exp);
2213 enum machine_mode mode;
2214 bool errno_set = false;
2215 tree arg;
2216
2217 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2218 return NULL_RTX;
2219
2220 arg = CALL_EXPR_ARG (exp, 0);
2221
2222 switch (DECL_FUNCTION_CODE (fndecl))
2223 {
2224 CASE_FLT_FN (BUILT_IN_ILOGB):
2225 errno_set = true; builtin_optab = ilogb_optab; break;
2226 CASE_FLT_FN (BUILT_IN_ISINF):
2227 builtin_optab = isinf_optab; break;
2228 case BUILT_IN_ISNORMAL:
2229 case BUILT_IN_ISFINITE:
2230 CASE_FLT_FN (BUILT_IN_FINITE):
2231 /* These builtins have no optabs (yet). */
2232 break;
2233 default:
2234 gcc_unreachable ();
2235 }
2236
2237 /* There's no easy way to detect the case we need to set EDOM. */
2238 if (flag_errno_math && errno_set)
2239 return NULL_RTX;
2240
2241 /* Optab mode depends on the mode of the input argument. */
2242 mode = TYPE_MODE (TREE_TYPE (arg));
2243
2244 if (builtin_optab)
2245 icode = optab_handler (builtin_optab, mode)->insn_code;
2246
2247 /* Before working hard, check whether the instruction is available. */
2248 if (icode != CODE_FOR_nothing)
2249 {
2250 /* Make a suitable register to place result in. */
2251 if (!target
2252 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2253 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2254
2255 gcc_assert (insn_data[icode].operand[0].predicate
2256 (target, GET_MODE (target)));
2257
2258 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2259 need to expand the argument again. This way, we will not perform
2260 side-effects more the once. */
2261 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2262
2263 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2264
2265 if (mode != GET_MODE (op0))
2266 op0 = convert_to_mode (mode, op0, 0);
2267
2268 /* Compute into TARGET.
2269 Set TARGET to wherever the result comes back. */
2270 emit_unop_insn (icode, target, op0, UNKNOWN);
2271 return target;
2272 }
2273
2274 /* If there is no optab, try generic code. */
2275 switch (DECL_FUNCTION_CODE (fndecl))
2276 {
2277 tree result;
2278
2279 CASE_FLT_FN (BUILT_IN_ISINF):
2280 {
2281 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2282 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2283 tree const type = TREE_TYPE (arg);
2284 REAL_VALUE_TYPE r;
2285 char buf[128];
2286
2287 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2288 real_from_string (&r, buf);
2289 result = build_call_expr (isgr_fn, 2,
2290 fold_build1 (ABS_EXPR, type, arg),
2291 build_real (type, r));
2292 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2293 }
2294 CASE_FLT_FN (BUILT_IN_FINITE):
2295 case BUILT_IN_ISFINITE:
2296 {
2297 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2298 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2299 tree const type = TREE_TYPE (arg);
2300 REAL_VALUE_TYPE r;
2301 char buf[128];
2302
2303 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2304 real_from_string (&r, buf);
2305 result = build_call_expr (isle_fn, 2,
2306 fold_build1 (ABS_EXPR, type, arg),
2307 build_real (type, r));
2308 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2309 }
2310 case BUILT_IN_ISNORMAL:
2311 {
2312 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2313 islessequal(fabs(x),DBL_MAX). */
2314 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2315 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2316 tree const type = TREE_TYPE (arg);
2317 REAL_VALUE_TYPE rmax, rmin;
2318 char buf[128];
2319
2320 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2321 real_from_string (&rmax, buf);
2322 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2323 real_from_string (&rmin, buf);
2324 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2325 result = build_call_expr (isle_fn, 2, arg,
2326 build_real (type, rmax));
2327 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2328 build_call_expr (isge_fn, 2, arg,
2329 build_real (type, rmin)));
2330 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2331 }
2332 default:
2333 break;
2334 }
2335
2336 target = expand_call (exp, target, target == const0_rtx);
2337
2338 return target;
2339 }
2340
2341 /* Expand a call to the builtin sincos math function.
2342 Return NULL_RTX if a normal call should be emitted rather than expanding the
2343 function in-line. EXP is the expression that is a call to the builtin
2344 function. */
2345
2346 static rtx
2347 expand_builtin_sincos (tree exp)
2348 {
2349 rtx op0, op1, op2, target1, target2;
2350 enum machine_mode mode;
2351 tree arg, sinp, cosp;
2352 int result;
2353
2354 if (!validate_arglist (exp, REAL_TYPE,
2355 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2356 return NULL_RTX;
2357
2358 arg = CALL_EXPR_ARG (exp, 0);
2359 sinp = CALL_EXPR_ARG (exp, 1);
2360 cosp = CALL_EXPR_ARG (exp, 2);
2361
2362 /* Make a suitable register to place result in. */
2363 mode = TYPE_MODE (TREE_TYPE (arg));
2364
2365 /* Check if sincos insn is available, otherwise emit the call. */
2366 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2367 return NULL_RTX;
2368
2369 target1 = gen_reg_rtx (mode);
2370 target2 = gen_reg_rtx (mode);
2371
2372 op0 = expand_normal (arg);
2373 op1 = expand_normal (build_fold_indirect_ref (sinp));
2374 op2 = expand_normal (build_fold_indirect_ref (cosp));
2375
2376 /* Compute into target1 and target2.
2377 Set TARGET to wherever the result comes back. */
2378 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2379 gcc_assert (result);
2380
2381 /* Move target1 and target2 to the memory locations indicated
2382 by op1 and op2. */
2383 emit_move_insn (op1, target1);
2384 emit_move_insn (op2, target2);
2385
2386 return const0_rtx;
2387 }
2388
2389 /* Expand a call to the internal cexpi builtin to the sincos math function.
2390 EXP is the expression that is a call to the builtin function; if convenient,
2391 the result should be placed in TARGET. SUBTARGET may be used as the target
2392 for computing one of EXP's operands. */
2393
2394 static rtx
2395 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2396 {
2397 tree fndecl = get_callee_fndecl (exp);
2398 tree arg, type;
2399 enum machine_mode mode;
2400 rtx op0, op1, op2;
2401
2402 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2403 return NULL_RTX;
2404
2405 arg = CALL_EXPR_ARG (exp, 0);
2406 type = TREE_TYPE (arg);
2407 mode = TYPE_MODE (TREE_TYPE (arg));
2408
2409 /* Try expanding via a sincos optab, fall back to emitting a libcall
2410 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2411 is only generated from sincos, cexp or if we have either of them. */
2412 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2413 {
2414 op1 = gen_reg_rtx (mode);
2415 op2 = gen_reg_rtx (mode);
2416
2417 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2418
2419 /* Compute into op1 and op2. */
2420 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2421 }
2422 else if (TARGET_HAS_SINCOS)
2423 {
2424 tree call, fn = NULL_TREE;
2425 tree top1, top2;
2426 rtx op1a, op2a;
2427
2428 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2429 fn = built_in_decls[BUILT_IN_SINCOSF];
2430 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2431 fn = built_in_decls[BUILT_IN_SINCOS];
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2433 fn = built_in_decls[BUILT_IN_SINCOSL];
2434 else
2435 gcc_unreachable ();
2436
2437 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2438 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2439 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2440 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2441 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2442 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2443
2444 /* Make sure not to fold the sincos call again. */
2445 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2446 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2447 call, 3, arg, top1, top2));
2448 }
2449 else
2450 {
2451 tree call, fn = NULL_TREE, narg;
2452 tree ctype = build_complex_type (type);
2453
2454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2455 fn = built_in_decls[BUILT_IN_CEXPF];
2456 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2457 fn = built_in_decls[BUILT_IN_CEXP];
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2459 fn = built_in_decls[BUILT_IN_CEXPL];
2460 else
2461 gcc_unreachable ();
2462
2463 /* If we don't have a decl for cexp create one. This is the
2464 friendliest fallback if the user calls __builtin_cexpi
2465 without full target C99 function support. */
2466 if (fn == NULL_TREE)
2467 {
2468 tree fntype;
2469 const char *name = NULL;
2470
2471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2472 name = "cexpf";
2473 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2474 name = "cexp";
2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2476 name = "cexpl";
2477
2478 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2479 fn = build_fn_decl (name, fntype);
2480 }
2481
2482 narg = fold_build2 (COMPLEX_EXPR, ctype,
2483 build_real (type, dconst0), arg);
2484
2485 /* Make sure not to fold the cexp call again. */
2486 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2487 return expand_expr (build_call_nary (ctype, call, 1, narg),
2488 target, VOIDmode, EXPAND_NORMAL);
2489 }
2490
2491 /* Now build the proper return type. */
2492 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2493 make_tree (TREE_TYPE (arg), op2),
2494 make_tree (TREE_TYPE (arg), op1)),
2495 target, VOIDmode, EXPAND_NORMAL);
2496 }
2497
2498 /* Expand a call to one of the builtin rounding functions gcc defines
2499 as an extension (lfloor and lceil). As these are gcc extensions we
2500 do not need to worry about setting errno to EDOM.
2501 If expanding via optab fails, lower expression to (int)(floor(x)).
2502 EXP is the expression that is a call to the builtin function;
2503 if convenient, the result should be placed in TARGET. */
2504
2505 static rtx
2506 expand_builtin_int_roundingfn (tree exp, rtx target)
2507 {
2508 convert_optab builtin_optab;
2509 rtx op0, insns, tmp;
2510 tree fndecl = get_callee_fndecl (exp);
2511 enum built_in_function fallback_fn;
2512 tree fallback_fndecl;
2513 enum machine_mode mode;
2514 tree arg;
2515
2516 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2517 gcc_unreachable ();
2518
2519 arg = CALL_EXPR_ARG (exp, 0);
2520
2521 switch (DECL_FUNCTION_CODE (fndecl))
2522 {
2523 CASE_FLT_FN (BUILT_IN_LCEIL):
2524 CASE_FLT_FN (BUILT_IN_LLCEIL):
2525 builtin_optab = lceil_optab;
2526 fallback_fn = BUILT_IN_CEIL;
2527 break;
2528
2529 CASE_FLT_FN (BUILT_IN_LFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2531 builtin_optab = lfloor_optab;
2532 fallback_fn = BUILT_IN_FLOOR;
2533 break;
2534
2535 default:
2536 gcc_unreachable ();
2537 }
2538
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (exp));
2541
2542 target = gen_reg_rtx (mode);
2543
2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
2547 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2548
2549 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2550
2551 start_sequence ();
2552
2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target, op0, builtin_optab))
2555 {
2556 /* Output the entire sequence. */
2557 insns = get_insns ();
2558 end_sequence ();
2559 emit_insn (insns);
2560 return target;
2561 }
2562
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2565 end_sequence ();
2566
2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2569
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl == NULL_TREE)
2575 {
2576 tree fntype;
2577 const char *name = NULL;
2578
2579 switch (DECL_FUNCTION_CODE (fndecl))
2580 {
2581 case BUILT_IN_LCEIL:
2582 case BUILT_IN_LLCEIL:
2583 name = "ceil";
2584 break;
2585 case BUILT_IN_LCEILF:
2586 case BUILT_IN_LLCEILF:
2587 name = "ceilf";
2588 break;
2589 case BUILT_IN_LCEILL:
2590 case BUILT_IN_LLCEILL:
2591 name = "ceill";
2592 break;
2593 case BUILT_IN_LFLOOR:
2594 case BUILT_IN_LLFLOOR:
2595 name = "floor";
2596 break;
2597 case BUILT_IN_LFLOORF:
2598 case BUILT_IN_LLFLOORF:
2599 name = "floorf";
2600 break;
2601 case BUILT_IN_LFLOORL:
2602 case BUILT_IN_LLFLOORL:
2603 name = "floorl";
2604 break;
2605 default:
2606 gcc_unreachable ();
2607 }
2608
2609 fntype = build_function_type_list (TREE_TYPE (arg),
2610 TREE_TYPE (arg), NULL_TREE);
2611 fallback_fndecl = build_fn_decl (name, fntype);
2612 }
2613
2614 exp = build_call_expr (fallback_fndecl, 1, arg);
2615
2616 tmp = expand_normal (exp);
2617
2618 /* Truncate the result of floating point optab to integer
2619 via expand_fix (). */
2620 target = gen_reg_rtx (mode);
2621 expand_fix (target, tmp, 0);
2622
2623 return target;
2624 }
2625
2626 /* Expand a call to one of the builtin math functions doing integer
2627 conversion (lrint).
2628 Return 0 if a normal call should be emitted rather than expanding the
2629 function in-line. EXP is the expression that is a call to the builtin
2630 function; if convenient, the result should be placed in TARGET. */
2631
2632 static rtx
2633 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2634 {
2635 convert_optab builtin_optab;
2636 rtx op0, insns;
2637 tree fndecl = get_callee_fndecl (exp);
2638 tree arg;
2639 enum machine_mode mode;
2640
2641 /* There's no easy way to detect the case we need to set EDOM. */
2642 if (flag_errno_math)
2643 return NULL_RTX;
2644
2645 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2646 gcc_unreachable ();
2647
2648 arg = CALL_EXPR_ARG (exp, 0);
2649
2650 switch (DECL_FUNCTION_CODE (fndecl))
2651 {
2652 CASE_FLT_FN (BUILT_IN_LRINT):
2653 CASE_FLT_FN (BUILT_IN_LLRINT):
2654 builtin_optab = lrint_optab; break;
2655 CASE_FLT_FN (BUILT_IN_LROUND):
2656 CASE_FLT_FN (BUILT_IN_LLROUND):
2657 builtin_optab = lround_optab; break;
2658 default:
2659 gcc_unreachable ();
2660 }
2661
2662 /* Make a suitable register to place result in. */
2663 mode = TYPE_MODE (TREE_TYPE (exp));
2664
2665 target = gen_reg_rtx (mode);
2666
2667 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2668 need to expand the argument again. This way, we will not perform
2669 side-effects more the once. */
2670 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2671
2672 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2673
2674 start_sequence ();
2675
2676 if (expand_sfix_optab (target, op0, builtin_optab))
2677 {
2678 /* Output the entire sequence. */
2679 insns = get_insns ();
2680 end_sequence ();
2681 emit_insn (insns);
2682 return target;
2683 }
2684
2685 /* If we were unable to expand via the builtin, stop the sequence
2686 (without outputting the insns) and call to the library function
2687 with the stabilized argument list. */
2688 end_sequence ();
2689
2690 target = expand_call (exp, target, target == const0_rtx);
2691
2692 return target;
2693 }
2694
2695 /* To evaluate powi(x,n), the floating point value x raised to the
2696 constant integer exponent n, we use a hybrid algorithm that
2697 combines the "window method" with look-up tables. For an
2698 introduction to exponentiation algorithms and "addition chains",
2699 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2700 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2701 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2702 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2703
2704 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2705 multiplications to inline before calling the system library's pow
2706 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2707 so this default never requires calling pow, powf or powl. */
2708
2709 #ifndef POWI_MAX_MULTS
2710 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2711 #endif
2712
2713 /* The size of the "optimal power tree" lookup table. All
2714 exponents less than this value are simply looked up in the
2715 powi_table below. This threshold is also used to size the
2716 cache of pseudo registers that hold intermediate results. */
2717 #define POWI_TABLE_SIZE 256
2718
2719 /* The size, in bits of the window, used in the "window method"
2720 exponentiation algorithm. This is equivalent to a radix of
2721 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2722 #define POWI_WINDOW_SIZE 3
2723
2724 /* The following table is an efficient representation of an
2725 "optimal power tree". For each value, i, the corresponding
2726 value, j, in the table states than an optimal evaluation
2727 sequence for calculating pow(x,i) can be found by evaluating
2728 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2729 100 integers is given in Knuth's "Seminumerical algorithms". */
2730
2731 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2732 {
2733 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2734 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2735 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2736 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2737 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2738 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2739 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2740 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2741 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2742 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2743 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2744 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2745 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2746 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2747 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2748 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2749 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2750 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2751 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2752 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2753 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2754 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2755 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2756 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2757 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2758 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2759 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2760 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2761 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2762 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2763 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2764 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2765 };
2766
2767
2768 /* Return the number of multiplications required to calculate
2769 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2770 subroutine of powi_cost. CACHE is an array indicating
2771 which exponents have already been calculated. */
2772
2773 static int
2774 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2775 {
2776 /* If we've already calculated this exponent, then this evaluation
2777 doesn't require any additional multiplications. */
2778 if (cache[n])
2779 return 0;
2780
2781 cache[n] = true;
2782 return powi_lookup_cost (n - powi_table[n], cache)
2783 + powi_lookup_cost (powi_table[n], cache) + 1;
2784 }
2785
2786 /* Return the number of multiplications required to calculate
2787 powi(x,n) for an arbitrary x, given the exponent N. This
2788 function needs to be kept in sync with expand_powi below. */
2789
2790 static int
2791 powi_cost (HOST_WIDE_INT n)
2792 {
2793 bool cache[POWI_TABLE_SIZE];
2794 unsigned HOST_WIDE_INT digit;
2795 unsigned HOST_WIDE_INT val;
2796 int result;
2797
2798 if (n == 0)
2799 return 0;
2800
2801 /* Ignore the reciprocal when calculating the cost. */
2802 val = (n < 0) ? -n : n;
2803
2804 /* Initialize the exponent cache. */
2805 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2806 cache[1] = true;
2807
2808 result = 0;
2809
2810 while (val >= POWI_TABLE_SIZE)
2811 {
2812 if (val & 1)
2813 {
2814 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2815 result += powi_lookup_cost (digit, cache)
2816 + POWI_WINDOW_SIZE + 1;
2817 val >>= POWI_WINDOW_SIZE;
2818 }
2819 else
2820 {
2821 val >>= 1;
2822 result++;
2823 }
2824 }
2825
2826 return result + powi_lookup_cost (val, cache);
2827 }
2828
2829 /* Recursive subroutine of expand_powi. This function takes the array,
2830 CACHE, of already calculated exponents and an exponent N and returns
2831 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2832
2833 static rtx
2834 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2835 {
2836 unsigned HOST_WIDE_INT digit;
2837 rtx target, result;
2838 rtx op0, op1;
2839
2840 if (n < POWI_TABLE_SIZE)
2841 {
2842 if (cache[n])
2843 return cache[n];
2844
2845 target = gen_reg_rtx (mode);
2846 cache[n] = target;
2847
2848 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2849 op1 = expand_powi_1 (mode, powi_table[n], cache);
2850 }
2851 else if (n & 1)
2852 {
2853 target = gen_reg_rtx (mode);
2854 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2855 op0 = expand_powi_1 (mode, n - digit, cache);
2856 op1 = expand_powi_1 (mode, digit, cache);
2857 }
2858 else
2859 {
2860 target = gen_reg_rtx (mode);
2861 op0 = expand_powi_1 (mode, n >> 1, cache);
2862 op1 = op0;
2863 }
2864
2865 result = expand_mult (mode, op0, op1, target, 0);
2866 if (result != target)
2867 emit_move_insn (target, result);
2868 return target;
2869 }
2870
2871 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2872 floating point operand in mode MODE, and N is the exponent. This
2873 function needs to be kept in sync with powi_cost above. */
2874
2875 static rtx
2876 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2877 {
2878 unsigned HOST_WIDE_INT val;
2879 rtx cache[POWI_TABLE_SIZE];
2880 rtx result;
2881
2882 if (n == 0)
2883 return CONST1_RTX (mode);
2884
2885 val = (n < 0) ? -n : n;
2886
2887 memset (cache, 0, sizeof (cache));
2888 cache[1] = x;
2889
2890 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2891
2892 /* If the original exponent was negative, reciprocate the result. */
2893 if (n < 0)
2894 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2895 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2896
2897 return result;
2898 }
2899
2900 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2901 a normal call should be emitted rather than expanding the function
2902 in-line. EXP is the expression that is a call to the builtin
2903 function; if convenient, the result should be placed in TARGET. */
2904
2905 static rtx
2906 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2907 {
2908 tree arg0, arg1;
2909 tree fn, narg0;
2910 tree type = TREE_TYPE (exp);
2911 REAL_VALUE_TYPE cint, c, c2;
2912 HOST_WIDE_INT n;
2913 rtx op, op2;
2914 enum machine_mode mode = TYPE_MODE (type);
2915
2916 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2917 return NULL_RTX;
2918
2919 arg0 = CALL_EXPR_ARG (exp, 0);
2920 arg1 = CALL_EXPR_ARG (exp, 1);
2921
2922 if (TREE_CODE (arg1) != REAL_CST
2923 || TREE_OVERFLOW (arg1))
2924 return expand_builtin_mathfn_2 (exp, target, subtarget);
2925
2926 /* Handle constant exponents. */
2927
2928 /* For integer valued exponents we can expand to an optimal multiplication
2929 sequence using expand_powi. */
2930 c = TREE_REAL_CST (arg1);
2931 n = real_to_integer (&c);
2932 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2933 if (real_identical (&c, &cint)
2934 && ((n >= -1 && n <= 2)
2935 || (flag_unsafe_math_optimizations
2936 && optimize_insn_for_speed_p ()
2937 && powi_cost (n) <= POWI_MAX_MULTS)))
2938 {
2939 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2940 if (n != 1)
2941 {
2942 op = force_reg (mode, op);
2943 op = expand_powi (op, mode, n);
2944 }
2945 return op;
2946 }
2947
2948 narg0 = builtin_save_expr (arg0);
2949
2950 /* If the exponent is not integer valued, check if it is half of an integer.
2951 In this case we can expand to sqrt (x) * x**(n/2). */
2952 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2953 if (fn != NULL_TREE)
2954 {
2955 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2956 n = real_to_integer (&c2);
2957 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2958 if (real_identical (&c2, &cint)
2959 && ((flag_unsafe_math_optimizations
2960 && optimize_insn_for_speed_p ()
2961 && powi_cost (n/2) <= POWI_MAX_MULTS)
2962 || n == 1))
2963 {
2964 tree call_expr = build_call_expr (fn, 1, narg0);
2965 /* Use expand_expr in case the newly built call expression
2966 was folded to a non-call. */
2967 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2968 if (n != 1)
2969 {
2970 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2971 op2 = force_reg (mode, op2);
2972 op2 = expand_powi (op2, mode, abs (n / 2));
2973 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2974 0, OPTAB_LIB_WIDEN);
2975 /* If the original exponent was negative, reciprocate the
2976 result. */
2977 if (n < 0)
2978 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2979 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2980 }
2981 return op;
2982 }
2983 }
2984
2985 /* Try if the exponent is a third of an integer. In this case
2986 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2987 different from pow (x, 1./3.) due to rounding and behavior
2988 with negative x we need to constrain this transformation to
2989 unsafe math and positive x or finite math. */
2990 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2991 if (fn != NULL_TREE
2992 && flag_unsafe_math_optimizations
2993 && (tree_expr_nonnegative_p (arg0)
2994 || !HONOR_NANS (mode)))
2995 {
2996 REAL_VALUE_TYPE dconst3;
2997 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2998 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2999 real_round (&c2, mode, &c2);
3000 n = real_to_integer (&c2);
3001 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3002 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3003 real_convert (&c2, mode, &c2);
3004 if (real_identical (&c2, &c)
3005 && ((optimize_insn_for_speed_p ()
3006 && powi_cost (n/3) <= POWI_MAX_MULTS)
3007 || n == 1))
3008 {
3009 tree call_expr = build_call_expr (fn, 1,narg0);
3010 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3011 if (abs (n) % 3 == 2)
3012 op = expand_simple_binop (mode, MULT, op, op, op,
3013 0, OPTAB_LIB_WIDEN);
3014 if (n != 1)
3015 {
3016 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3017 op2 = force_reg (mode, op2);
3018 op2 = expand_powi (op2, mode, abs (n / 3));
3019 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3020 0, OPTAB_LIB_WIDEN);
3021 /* If the original exponent was negative, reciprocate the
3022 result. */
3023 if (n < 0)
3024 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3025 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3026 }
3027 return op;
3028 }
3029 }
3030
3031 /* Fall back to optab expansion. */
3032 return expand_builtin_mathfn_2 (exp, target, subtarget);
3033 }
3034
3035 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3036 a normal call should be emitted rather than expanding the function
3037 in-line. EXP is the expression that is a call to the builtin
3038 function; if convenient, the result should be placed in TARGET. */
3039
3040 static rtx
3041 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3042 {
3043 tree arg0, arg1;
3044 rtx op0, op1;
3045 enum machine_mode mode;
3046 enum machine_mode mode2;
3047
3048 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3049 return NULL_RTX;
3050
3051 arg0 = CALL_EXPR_ARG (exp, 0);
3052 arg1 = CALL_EXPR_ARG (exp, 1);
3053 mode = TYPE_MODE (TREE_TYPE (exp));
3054
3055 /* Handle constant power. */
3056
3057 if (TREE_CODE (arg1) == INTEGER_CST
3058 && !TREE_OVERFLOW (arg1))
3059 {
3060 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3061
3062 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3063 Otherwise, check the number of multiplications required. */
3064 if ((TREE_INT_CST_HIGH (arg1) == 0
3065 || TREE_INT_CST_HIGH (arg1) == -1)
3066 && ((n >= -1 && n <= 2)
3067 || (optimize_insn_for_speed_p ()
3068 && powi_cost (n) <= POWI_MAX_MULTS)))
3069 {
3070 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3071 op0 = force_reg (mode, op0);
3072 return expand_powi (op0, mode, n);
3073 }
3074 }
3075
3076 /* Emit a libcall to libgcc. */
3077
3078 /* Mode of the 2nd argument must match that of an int. */
3079 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3080
3081 if (target == NULL_RTX)
3082 target = gen_reg_rtx (mode);
3083
3084 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3085 if (GET_MODE (op0) != mode)
3086 op0 = convert_to_mode (mode, op0, 0);
3087 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3088 if (GET_MODE (op1) != mode2)
3089 op1 = convert_to_mode (mode2, op1, 0);
3090
3091 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3092 target, LCT_CONST, mode, 2,
3093 op0, mode, op1, mode2);
3094
3095 return target;
3096 }
3097
3098 /* Expand expression EXP which is a call to the strlen builtin. Return
3099 NULL_RTX if we failed the caller should emit a normal call, otherwise
3100 try to get the result in TARGET, if convenient. */
3101
3102 static rtx
3103 expand_builtin_strlen (tree exp, rtx target,
3104 enum machine_mode target_mode)
3105 {
3106 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3107 return NULL_RTX;
3108 else
3109 {
3110 rtx pat;
3111 tree len;
3112 tree src = CALL_EXPR_ARG (exp, 0);
3113 rtx result, src_reg, char_rtx, before_strlen;
3114 enum machine_mode insn_mode = target_mode, char_mode;
3115 enum insn_code icode = CODE_FOR_nothing;
3116 int align;
3117
3118 /* If the length can be computed at compile-time, return it. */
3119 len = c_strlen (src, 0);
3120 if (len)
3121 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3122
3123 /* If the length can be computed at compile-time and is constant
3124 integer, but there are side-effects in src, evaluate
3125 src for side-effects, then return len.
3126 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3127 can be optimized into: i++; x = 3; */
3128 len = c_strlen (src, 1);
3129 if (len && TREE_CODE (len) == INTEGER_CST)
3130 {
3131 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3132 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 }
3134
3135 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3136
3137 /* If SRC is not a pointer type, don't do this operation inline. */
3138 if (align == 0)
3139 return NULL_RTX;
3140
3141 /* Bail out if we can't compute strlen in the right mode. */
3142 while (insn_mode != VOIDmode)
3143 {
3144 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3145 if (icode != CODE_FOR_nothing)
3146 break;
3147
3148 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3149 }
3150 if (insn_mode == VOIDmode)
3151 return NULL_RTX;
3152
3153 /* Make a place to write the result of the instruction. */
3154 result = target;
3155 if (! (result != 0
3156 && REG_P (result)
3157 && GET_MODE (result) == insn_mode
3158 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3159 result = gen_reg_rtx (insn_mode);
3160
3161 /* Make a place to hold the source address. We will not expand
3162 the actual source until we are sure that the expansion will
3163 not fail -- there are trees that cannot be expanded twice. */
3164 src_reg = gen_reg_rtx (Pmode);
3165
3166 /* Mark the beginning of the strlen sequence so we can emit the
3167 source operand later. */
3168 before_strlen = get_last_insn ();
3169
3170 char_rtx = const0_rtx;
3171 char_mode = insn_data[(int) icode].operand[2].mode;
3172 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3173 char_mode))
3174 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3175
3176 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3177 char_rtx, GEN_INT (align));
3178 if (! pat)
3179 return NULL_RTX;
3180 emit_insn (pat);
3181
3182 /* Now that we are assured of success, expand the source. */
3183 start_sequence ();
3184 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3185 if (pat != src_reg)
3186 emit_move_insn (src_reg, pat);
3187 pat = get_insns ();
3188 end_sequence ();
3189
3190 if (before_strlen)
3191 emit_insn_after (pat, before_strlen);
3192 else
3193 emit_insn_before (pat, get_insns ());
3194
3195 /* Return the value in the proper mode for this function. */
3196 if (GET_MODE (result) == target_mode)
3197 target = result;
3198 else if (target != 0)
3199 convert_move (target, result, 0);
3200 else
3201 target = convert_to_mode (target_mode, result, 0);
3202
3203 return target;
3204 }
3205 }
3206
3207 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3208 caller should emit a normal call, otherwise try to get the result
3209 in TARGET, if convenient (and in mode MODE if that's convenient). */
3210
3211 static rtx
3212 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3213 {
3214 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3215 {
3216 tree type = TREE_TYPE (exp);
3217 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3218 CALL_EXPR_ARG (exp, 1), type);
3219 if (result)
3220 return expand_expr (result, target, mode, EXPAND_NORMAL);
3221 }
3222 return NULL_RTX;
3223 }
3224
3225 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3226 caller should emit a normal call, otherwise try to get the result
3227 in TARGET, if convenient (and in mode MODE if that's convenient). */
3228
3229 static rtx
3230 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3231 {
3232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3233 {
3234 tree type = TREE_TYPE (exp);
3235 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3236 CALL_EXPR_ARG (exp, 1), type);
3237 if (result)
3238 return expand_expr (result, target, mode, EXPAND_NORMAL);
3239
3240 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3241 }
3242 return NULL_RTX;
3243 }
3244
3245 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3246 caller should emit a normal call, otherwise try to get the result
3247 in TARGET, if convenient (and in mode MODE if that's convenient). */
3248
3249 static rtx
3250 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3251 {
3252 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3253 {
3254 tree type = TREE_TYPE (exp);
3255 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3256 CALL_EXPR_ARG (exp, 1), type);
3257 if (result)
3258 return expand_expr (result, target, mode, EXPAND_NORMAL);
3259 }
3260 return NULL_RTX;
3261 }
3262
3263 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3264 caller should emit a normal call, otherwise try to get the result
3265 in TARGET, if convenient (and in mode MODE if that's convenient). */
3266
3267 static rtx
3268 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3269 {
3270 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3271 {
3272 tree type = TREE_TYPE (exp);
3273 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3275 if (result)
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3277 }
3278 return NULL_RTX;
3279 }
3280
3281 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3282 bytes from constant string DATA + OFFSET and return it as target
3283 constant. */
3284
3285 static rtx
3286 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3287 enum machine_mode mode)
3288 {
3289 const char *str = (const char *) data;
3290
3291 gcc_assert (offset >= 0
3292 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3293 <= strlen (str) + 1));
3294
3295 return c_readstr (str + offset, mode);
3296 }
3297
3298 /* Expand a call EXP to the memcpy builtin.
3299 Return NULL_RTX if we failed, the caller should emit a normal call,
3300 otherwise try to get the result in TARGET, if convenient (and in
3301 mode MODE if that's convenient). */
3302
3303 static rtx
3304 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3305 {
3306 tree fndecl = get_callee_fndecl (exp);
3307
3308 if (!validate_arglist (exp,
3309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3310 return NULL_RTX;
3311 else
3312 {
3313 tree dest = CALL_EXPR_ARG (exp, 0);
3314 tree src = CALL_EXPR_ARG (exp, 1);
3315 tree len = CALL_EXPR_ARG (exp, 2);
3316 const char *src_str;
3317 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3318 unsigned int dest_align
3319 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3320 rtx dest_mem, src_mem, dest_addr, len_rtx;
3321 tree result = fold_builtin_memory_op (dest, src, len,
3322 TREE_TYPE (TREE_TYPE (fndecl)),
3323 false, /*endp=*/0);
3324 HOST_WIDE_INT expected_size = -1;
3325 unsigned int expected_align = 0;
3326 tree_ann_common_t ann;
3327
3328 if (result)
3329 {
3330 while (TREE_CODE (result) == COMPOUND_EXPR)
3331 {
3332 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3333 EXPAND_NORMAL);
3334 result = TREE_OPERAND (result, 1);
3335 }
3336 return expand_expr (result, target, mode, EXPAND_NORMAL);
3337 }
3338
3339 /* If DEST is not a pointer type, call the normal function. */
3340 if (dest_align == 0)
3341 return NULL_RTX;
3342
3343 /* If either SRC is not a pointer type, don't do this
3344 operation in-line. */
3345 if (src_align == 0)
3346 return NULL_RTX;
3347
3348 ann = tree_common_ann (exp);
3349 if (ann)
3350 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3351
3352 if (expected_align < dest_align)
3353 expected_align = dest_align;
3354 dest_mem = get_memory_rtx (dest, len);
3355 set_mem_align (dest_mem, dest_align);
3356 len_rtx = expand_normal (len);
3357 src_str = c_getstr (src);
3358
3359 /* If SRC is a string constant and block move would be done
3360 by pieces, we can avoid loading the string from memory
3361 and only stored the computed constants. */
3362 if (src_str
3363 && GET_CODE (len_rtx) == CONST_INT
3364 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3365 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3366 CONST_CAST (char *, src_str),
3367 dest_align, false))
3368 {
3369 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3370 builtin_memcpy_read_str,
3371 CONST_CAST (char *, src_str),
3372 dest_align, false, 0);
3373 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3374 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3375 return dest_mem;
3376 }
3377
3378 src_mem = get_memory_rtx (src, len);
3379 set_mem_align (src_mem, src_align);
3380
3381 /* Copy word part most expediently. */
3382 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3383 CALL_EXPR_TAILCALL (exp)
3384 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3385 expected_align, expected_size);
3386
3387 if (dest_addr == 0)
3388 {
3389 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3390 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3391 }
3392 return dest_addr;
3393 }
3394 }
3395
3396 /* Expand a call EXP to the mempcpy builtin.
3397 Return NULL_RTX if we failed; the caller should emit a normal call,
3398 otherwise try to get the result in TARGET, if convenient (and in
3399 mode MODE if that's convenient). If ENDP is 0 return the
3400 destination pointer, if ENDP is 1 return the end pointer ala
3401 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3402 stpcpy. */
3403
3404 static rtx
3405 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3406 {
3407 if (!validate_arglist (exp,
3408 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3409 return NULL_RTX;
3410 else
3411 {
3412 tree dest = CALL_EXPR_ARG (exp, 0);
3413 tree src = CALL_EXPR_ARG (exp, 1);
3414 tree len = CALL_EXPR_ARG (exp, 2);
3415 return expand_builtin_mempcpy_args (dest, src, len,
3416 TREE_TYPE (exp),
3417 target, mode, /*endp=*/ 1);
3418 }
3419 }
3420
3421 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3422 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3423 so that this can also be called without constructing an actual CALL_EXPR.
3424 TYPE is the return type of the call. The other arguments and return value
3425 are the same as for expand_builtin_mempcpy. */
3426
3427 static rtx
3428 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3429 rtx target, enum machine_mode mode, int endp)
3430 {
3431 /* If return value is ignored, transform mempcpy into memcpy. */
3432 if (target == const0_rtx)
3433 {
3434 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3435
3436 if (!fn)
3437 return NULL_RTX;
3438
3439 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3440 target, mode, EXPAND_NORMAL);
3441 }
3442 else
3443 {
3444 const char *src_str;
3445 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3446 unsigned int dest_align
3447 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3448 rtx dest_mem, src_mem, len_rtx;
3449 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3450
3451 if (result)
3452 {
3453 while (TREE_CODE (result) == COMPOUND_EXPR)
3454 {
3455 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3456 EXPAND_NORMAL);
3457 result = TREE_OPERAND (result, 1);
3458 }
3459 return expand_expr (result, target, mode, EXPAND_NORMAL);
3460 }
3461
3462 /* If either SRC or DEST is not a pointer type, don't do this
3463 operation in-line. */
3464 if (dest_align == 0 || src_align == 0)
3465 return NULL_RTX;
3466
3467 /* If LEN is not constant, call the normal function. */
3468 if (! host_integerp (len, 1))
3469 return NULL_RTX;
3470
3471 len_rtx = expand_normal (len);
3472 src_str = c_getstr (src);
3473
3474 /* If SRC is a string constant and block move would be done
3475 by pieces, we can avoid loading the string from memory
3476 and only stored the computed constants. */
3477 if (src_str
3478 && GET_CODE (len_rtx) == CONST_INT
3479 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3480 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3481 CONST_CAST (char *, src_str),
3482 dest_align, false))
3483 {
3484 dest_mem = get_memory_rtx (dest, len);
3485 set_mem_align (dest_mem, dest_align);
3486 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3487 builtin_memcpy_read_str,
3488 CONST_CAST (char *, src_str),
3489 dest_align, false, endp);
3490 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3491 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3492 return dest_mem;
3493 }
3494
3495 if (GET_CODE (len_rtx) == CONST_INT
3496 && can_move_by_pieces (INTVAL (len_rtx),
3497 MIN (dest_align, src_align)))
3498 {
3499 dest_mem = get_memory_rtx (dest, len);
3500 set_mem_align (dest_mem, dest_align);
3501 src_mem = get_memory_rtx (src, len);
3502 set_mem_align (src_mem, src_align);
3503 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3504 MIN (dest_align, src_align), endp);
3505 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3506 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3507 return dest_mem;
3508 }
3509
3510 return NULL_RTX;
3511 }
3512 }
3513
3514 /* Expand expression EXP, which is a call to the memmove builtin. Return
3515 NULL_RTX if we failed; the caller should emit a normal call. */
3516
3517 static rtx
3518 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3519 {
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3522 return NULL_RTX;
3523 else
3524 {
3525 tree dest = CALL_EXPR_ARG (exp, 0);
3526 tree src = CALL_EXPR_ARG (exp, 1);
3527 tree len = CALL_EXPR_ARG (exp, 2);
3528 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3529 target, mode, ignore);
3530 }
3531 }
3532
3533 /* Helper function to do the actual work for expand_builtin_memmove. The
3534 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3535 so that this can also be called without constructing an actual CALL_EXPR.
3536 TYPE is the return type of the call. The other arguments and return value
3537 are the same as for expand_builtin_memmove. */
3538
3539 static rtx
3540 expand_builtin_memmove_args (tree dest, tree src, tree len,
3541 tree type, rtx target, enum machine_mode mode,
3542 int ignore)
3543 {
3544 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3545
3546 if (result)
3547 {
3548 STRIP_TYPE_NOPS (result);
3549 while (TREE_CODE (result) == COMPOUND_EXPR)
3550 {
3551 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3552 EXPAND_NORMAL);
3553 result = TREE_OPERAND (result, 1);
3554 }
3555 return expand_expr (result, target, mode, EXPAND_NORMAL);
3556 }
3557
3558 /* Otherwise, call the normal function. */
3559 return NULL_RTX;
3560 }
3561
3562 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3563 NULL_RTX if we failed the caller should emit a normal call. */
3564
3565 static rtx
3566 expand_builtin_bcopy (tree exp, int ignore)
3567 {
3568 tree type = TREE_TYPE (exp);
3569 tree src, dest, size;
3570
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3574
3575 src = CALL_EXPR_ARG (exp, 0);
3576 dest = CALL_EXPR_ARG (exp, 1);
3577 size = CALL_EXPR_ARG (exp, 2);
3578
3579 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3580 This is done this way so that if it isn't expanded inline, we fall
3581 back to calling bcopy instead of memmove. */
3582 return expand_builtin_memmove_args (dest, src,
3583 fold_convert (sizetype, size),
3584 type, const0_rtx, VOIDmode,
3585 ignore);
3586 }
3587
3588 #ifndef HAVE_movstr
3589 # define HAVE_movstr 0
3590 # define CODE_FOR_movstr CODE_FOR_nothing
3591 #endif
3592
3593 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3594 we failed, the caller should emit a normal call, otherwise try to
3595 get the result in TARGET, if convenient. If ENDP is 0 return the
3596 destination pointer, if ENDP is 1 return the end pointer ala
3597 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3598 stpcpy. */
3599
3600 static rtx
3601 expand_movstr (tree dest, tree src, rtx target, int endp)
3602 {
3603 rtx end;
3604 rtx dest_mem;
3605 rtx src_mem;
3606 rtx insn;
3607 const struct insn_data * data;
3608
3609 if (!HAVE_movstr)
3610 return NULL_RTX;
3611
3612 dest_mem = get_memory_rtx (dest, NULL);
3613 src_mem = get_memory_rtx (src, NULL);
3614 if (!endp)
3615 {
3616 target = force_reg (Pmode, XEXP (dest_mem, 0));
3617 dest_mem = replace_equiv_address (dest_mem, target);
3618 end = gen_reg_rtx (Pmode);
3619 }
3620 else
3621 {
3622 if (target == 0 || target == const0_rtx)
3623 {
3624 end = gen_reg_rtx (Pmode);
3625 if (target == 0)
3626 target = end;
3627 }
3628 else
3629 end = target;
3630 }
3631
3632 data = insn_data + CODE_FOR_movstr;
3633
3634 if (data->operand[0].mode != VOIDmode)
3635 end = gen_lowpart (data->operand[0].mode, end);
3636
3637 insn = data->genfun (end, dest_mem, src_mem);
3638
3639 gcc_assert (insn);
3640
3641 emit_insn (insn);
3642
3643 /* movstr is supposed to set end to the address of the NUL
3644 terminator. If the caller requested a mempcpy-like return value,
3645 adjust it. */
3646 if (endp == 1 && target != const0_rtx)
3647 {
3648 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3650 }
3651
3652 return target;
3653 }
3654
3655 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3656 NULL_RTX if we failed the caller should emit a normal call, otherwise
3657 try to get the result in TARGET, if convenient (and in mode MODE if that's
3658 convenient). */
3659
3660 static rtx
3661 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3662 {
3663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3664 {
3665 tree dest = CALL_EXPR_ARG (exp, 0);
3666 tree src = CALL_EXPR_ARG (exp, 1);
3667 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3668 }
3669 return NULL_RTX;
3670 }
3671
3672 /* Helper function to do the actual work for expand_builtin_strcpy. The
3673 arguments to the builtin_strcpy call DEST and SRC are broken out
3674 so that this can also be called without constructing an actual CALL_EXPR.
3675 The other arguments and return value are the same as for
3676 expand_builtin_strcpy. */
3677
3678 static rtx
3679 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3680 rtx target, enum machine_mode mode)
3681 {
3682 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3683 if (result)
3684 return expand_expr (result, target, mode, EXPAND_NORMAL);
3685 return expand_movstr (dest, src, target, /*endp=*/0);
3686
3687 }
3688
3689 /* Expand a call EXP to the stpcpy builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call,
3691 otherwise try to get the result in TARGET, if convenient (and in
3692 mode MODE if that's convenient). */
3693
3694 static rtx
3695 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3696 {
3697 tree dst, src;
3698
3699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3700 return NULL_RTX;
3701
3702 dst = CALL_EXPR_ARG (exp, 0);
3703 src = CALL_EXPR_ARG (exp, 1);
3704
3705 /* If return value is ignored, transform stpcpy into strcpy. */
3706 if (target == const0_rtx)
3707 {
3708 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3709 if (!fn)
3710 return NULL_RTX;
3711
3712 return expand_expr (build_call_expr (fn, 2, dst, src),
3713 target, mode, EXPAND_NORMAL);
3714 }
3715 else
3716 {
3717 tree len, lenp1;
3718 rtx ret;
3719
3720 /* Ensure we get an actual string whose length can be evaluated at
3721 compile-time, not an expression containing a string. This is
3722 because the latter will potentially produce pessimized code
3723 when used to produce the return value. */
3724 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3725 return expand_movstr (dst, src, target, /*endp=*/2);
3726
3727 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3728 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3729 target, mode, /*endp=*/2);
3730
3731 if (ret)
3732 return ret;
3733
3734 if (TREE_CODE (len) == INTEGER_CST)
3735 {
3736 rtx len_rtx = expand_normal (len);
3737
3738 if (GET_CODE (len_rtx) == CONST_INT)
3739 {
3740 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3741 dst, src, target, mode);
3742
3743 if (ret)
3744 {
3745 if (! target)
3746 {
3747 if (mode != VOIDmode)
3748 target = gen_reg_rtx (mode);
3749 else
3750 target = gen_reg_rtx (GET_MODE (ret));
3751 }
3752 if (GET_MODE (target) != GET_MODE (ret))
3753 ret = gen_lowpart (GET_MODE (target), ret);
3754
3755 ret = plus_constant (ret, INTVAL (len_rtx));
3756 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3757 gcc_assert (ret);
3758
3759 return target;
3760 }
3761 }
3762 }
3763
3764 return expand_movstr (dst, src, target, /*endp=*/2);
3765 }
3766 }
3767
3768 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3769 bytes from constant string DATA + OFFSET and return it as target
3770 constant. */
3771
3772 rtx
3773 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3774 enum machine_mode mode)
3775 {
3776 const char *str = (const char *) data;
3777
3778 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3779 return const0_rtx;
3780
3781 return c_readstr (str + offset, mode);
3782 }
3783
3784 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3785 NULL_RTX if we failed the caller should emit a normal call. */
3786
3787 static rtx
3788 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3789 {
3790 tree fndecl = get_callee_fndecl (exp);
3791
3792 if (validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3794 {
3795 tree dest = CALL_EXPR_ARG (exp, 0);
3796 tree src = CALL_EXPR_ARG (exp, 1);
3797 tree len = CALL_EXPR_ARG (exp, 2);
3798 tree slen = c_strlen (src, 1);
3799 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3800
3801 if (result)
3802 {
3803 while (TREE_CODE (result) == COMPOUND_EXPR)
3804 {
3805 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3806 EXPAND_NORMAL);
3807 result = TREE_OPERAND (result, 1);
3808 }
3809 return expand_expr (result, target, mode, EXPAND_NORMAL);
3810 }
3811
3812 /* We must be passed a constant len and src parameter. */
3813 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3814 return NULL_RTX;
3815
3816 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3817
3818 /* We're required to pad with trailing zeros if the requested
3819 len is greater than strlen(s2)+1. In that case try to
3820 use store_by_pieces, if it fails, punt. */
3821 if (tree_int_cst_lt (slen, len))
3822 {
3823 unsigned int dest_align
3824 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3825 const char *p = c_getstr (src);
3826 rtx dest_mem;
3827
3828 if (!p || dest_align == 0 || !host_integerp (len, 1)
3829 || !can_store_by_pieces (tree_low_cst (len, 1),
3830 builtin_strncpy_read_str,
3831 CONST_CAST (char *, p),
3832 dest_align, false))
3833 return NULL_RTX;
3834
3835 dest_mem = get_memory_rtx (dest, len);
3836 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3837 builtin_strncpy_read_str,
3838 CONST_CAST (char *, p), dest_align, false, 0);
3839 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3840 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3841 return dest_mem;
3842 }
3843 }
3844 return NULL_RTX;
3845 }
3846
3847 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3848 bytes from constant string DATA + OFFSET and return it as target
3849 constant. */
3850
3851 rtx
3852 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3853 enum machine_mode mode)
3854 {
3855 const char *c = (const char *) data;
3856 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3857
3858 memset (p, *c, GET_MODE_SIZE (mode));
3859
3860 return c_readstr (p, mode);
3861 }
3862
3863 /* Callback routine for store_by_pieces. Return the RTL of a register
3864 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3865 char value given in the RTL register data. For example, if mode is
3866 4 bytes wide, return the RTL for 0x01010101*data. */
3867
3868 static rtx
3869 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3870 enum machine_mode mode)
3871 {
3872 rtx target, coeff;
3873 size_t size;
3874 char *p;
3875
3876 size = GET_MODE_SIZE (mode);
3877 if (size == 1)
3878 return (rtx) data;
3879
3880 p = XALLOCAVEC (char, size);
3881 memset (p, 1, size);
3882 coeff = c_readstr (p, mode);
3883
3884 target = convert_to_mode (mode, (rtx) data, 1);
3885 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3886 return force_reg (mode, target);
3887 }
3888
3889 /* Expand expression EXP, which is a call to the memset builtin. Return
3890 NULL_RTX if we failed the caller should emit a normal call, otherwise
3891 try to get the result in TARGET, if convenient (and in mode MODE if that's
3892 convenient). */
3893
3894 static rtx
3895 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3896 {
3897 if (!validate_arglist (exp,
3898 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3899 return NULL_RTX;
3900 else
3901 {
3902 tree dest = CALL_EXPR_ARG (exp, 0);
3903 tree val = CALL_EXPR_ARG (exp, 1);
3904 tree len = CALL_EXPR_ARG (exp, 2);
3905 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3906 }
3907 }
3908
3909 /* Helper function to do the actual work for expand_builtin_memset. The
3910 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3911 so that this can also be called without constructing an actual CALL_EXPR.
3912 The other arguments and return value are the same as for
3913 expand_builtin_memset. */
3914
3915 static rtx
3916 expand_builtin_memset_args (tree dest, tree val, tree len,
3917 rtx target, enum machine_mode mode, tree orig_exp)
3918 {
3919 tree fndecl, fn;
3920 enum built_in_function fcode;
3921 char c;
3922 unsigned int dest_align;
3923 rtx dest_mem, dest_addr, len_rtx;
3924 HOST_WIDE_INT expected_size = -1;
3925 unsigned int expected_align = 0;
3926 tree_ann_common_t ann;
3927
3928 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3929
3930 /* If DEST is not a pointer type, don't do this operation in-line. */
3931 if (dest_align == 0)
3932 return NULL_RTX;
3933
3934 ann = tree_common_ann (orig_exp);
3935 if (ann)
3936 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3937
3938 if (expected_align < dest_align)
3939 expected_align = dest_align;
3940
3941 /* If the LEN parameter is zero, return DEST. */
3942 if (integer_zerop (len))
3943 {
3944 /* Evaluate and ignore VAL in case it has side-effects. */
3945 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3946 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3947 }
3948
3949 /* Stabilize the arguments in case we fail. */
3950 dest = builtin_save_expr (dest);
3951 val = builtin_save_expr (val);
3952 len = builtin_save_expr (len);
3953
3954 len_rtx = expand_normal (len);
3955 dest_mem = get_memory_rtx (dest, len);
3956
3957 if (TREE_CODE (val) != INTEGER_CST)
3958 {
3959 rtx val_rtx;
3960
3961 val_rtx = expand_normal (val);
3962 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3963 val_rtx, 0);
3964
3965 /* Assume that we can memset by pieces if we can store
3966 * the coefficients by pieces (in the required modes).
3967 * We can't pass builtin_memset_gen_str as that emits RTL. */
3968 c = 1;
3969 if (host_integerp (len, 1)
3970 && can_store_by_pieces (tree_low_cst (len, 1),
3971 builtin_memset_read_str, &c, dest_align,
3972 true))
3973 {
3974 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3975 val_rtx);
3976 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3977 builtin_memset_gen_str, val_rtx, dest_align,
3978 true, 0);
3979 }
3980 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3981 dest_align, expected_align,
3982 expected_size))
3983 goto do_libcall;
3984
3985 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3986 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3987 return dest_mem;
3988 }
3989
3990 if (target_char_cast (val, &c))
3991 goto do_libcall;
3992
3993 if (c)
3994 {
3995 if (host_integerp (len, 1)
3996 && can_store_by_pieces (tree_low_cst (len, 1),
3997 builtin_memset_read_str, &c, dest_align,
3998 true))
3999 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4000 builtin_memset_read_str, &c, dest_align, true, 0);
4001 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4002 dest_align, expected_align,
4003 expected_size))
4004 goto do_libcall;
4005
4006 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4007 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4008 return dest_mem;
4009 }
4010
4011 set_mem_align (dest_mem, dest_align);
4012 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4013 CALL_EXPR_TAILCALL (orig_exp)
4014 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4015 expected_align, expected_size);
4016
4017 if (dest_addr == 0)
4018 {
4019 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4020 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4021 }
4022
4023 return dest_addr;
4024
4025 do_libcall:
4026 fndecl = get_callee_fndecl (orig_exp);
4027 fcode = DECL_FUNCTION_CODE (fndecl);
4028 if (fcode == BUILT_IN_MEMSET)
4029 fn = build_call_expr (fndecl, 3, dest, val, len);
4030 else if (fcode == BUILT_IN_BZERO)
4031 fn = build_call_expr (fndecl, 2, dest, len);
4032 else
4033 gcc_unreachable ();
4034 if (TREE_CODE (fn) == CALL_EXPR)
4035 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4036 return expand_call (fn, target, target == const0_rtx);
4037 }
4038
4039 /* Expand expression EXP, which is a call to the bzero builtin. Return
4040 NULL_RTX if we failed the caller should emit a normal call. */
4041
4042 static rtx
4043 expand_builtin_bzero (tree exp)
4044 {
4045 tree dest, size;
4046
4047 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4048 return NULL_RTX;
4049
4050 dest = CALL_EXPR_ARG (exp, 0);
4051 size = CALL_EXPR_ARG (exp, 1);
4052
4053 /* New argument list transforming bzero(ptr x, int y) to
4054 memset(ptr x, int 0, size_t y). This is done this way
4055 so that if it isn't expanded inline, we fallback to
4056 calling bzero instead of memset. */
4057
4058 return expand_builtin_memset_args (dest, integer_zero_node,
4059 fold_convert (sizetype, size),
4060 const0_rtx, VOIDmode, exp);
4061 }
4062
4063 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4064 caller should emit a normal call, otherwise try to get the result
4065 in TARGET, if convenient (and in mode MODE if that's convenient). */
4066
4067 static rtx
4068 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4069 {
4070 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4071 INTEGER_TYPE, VOID_TYPE))
4072 {
4073 tree type = TREE_TYPE (exp);
4074 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4075 CALL_EXPR_ARG (exp, 1),
4076 CALL_EXPR_ARG (exp, 2), type);
4077 if (result)
4078 return expand_expr (result, target, mode, EXPAND_NORMAL);
4079 }
4080 return NULL_RTX;
4081 }
4082
4083 /* Expand expression EXP, which is a call to the memcmp built-in function.
4084 Return NULL_RTX if we failed and the
4085 caller should emit a normal call, otherwise try to get the result in
4086 TARGET, if convenient (and in mode MODE, if that's convenient). */
4087
4088 static rtx
4089 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4090 {
4091 if (!validate_arglist (exp,
4092 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4093 return NULL_RTX;
4094 else
4095 {
4096 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4097 CALL_EXPR_ARG (exp, 1),
4098 CALL_EXPR_ARG (exp, 2));
4099 if (result)
4100 return expand_expr (result, target, mode, EXPAND_NORMAL);
4101 }
4102
4103 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4104 {
4105 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4106 rtx result;
4107 rtx insn;
4108 tree arg1 = CALL_EXPR_ARG (exp, 0);
4109 tree arg2 = CALL_EXPR_ARG (exp, 1);
4110 tree len = CALL_EXPR_ARG (exp, 2);
4111
4112 int arg1_align
4113 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4114 int arg2_align
4115 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4116 enum machine_mode insn_mode;
4117
4118 #ifdef HAVE_cmpmemsi
4119 if (HAVE_cmpmemsi)
4120 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4121 else
4122 #endif
4123 #ifdef HAVE_cmpstrnsi
4124 if (HAVE_cmpstrnsi)
4125 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4126 else
4127 #endif
4128 return NULL_RTX;
4129
4130 /* If we don't have POINTER_TYPE, call the function. */
4131 if (arg1_align == 0 || arg2_align == 0)
4132 return NULL_RTX;
4133
4134 /* Make a place to write the result of the instruction. */
4135 result = target;
4136 if (! (result != 0
4137 && REG_P (result) && GET_MODE (result) == insn_mode
4138 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4139 result = gen_reg_rtx (insn_mode);
4140
4141 arg1_rtx = get_memory_rtx (arg1, len);
4142 arg2_rtx = get_memory_rtx (arg2, len);
4143 arg3_rtx = expand_normal (len);
4144
4145 /* Set MEM_SIZE as appropriate. */
4146 if (GET_CODE (arg3_rtx) == CONST_INT)
4147 {
4148 set_mem_size (arg1_rtx, arg3_rtx);
4149 set_mem_size (arg2_rtx, arg3_rtx);
4150 }
4151
4152 #ifdef HAVE_cmpmemsi
4153 if (HAVE_cmpmemsi)
4154 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4155 GEN_INT (MIN (arg1_align, arg2_align)));
4156 else
4157 #endif
4158 #ifdef HAVE_cmpstrnsi
4159 if (HAVE_cmpstrnsi)
4160 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4161 GEN_INT (MIN (arg1_align, arg2_align)));
4162 else
4163 #endif
4164 gcc_unreachable ();
4165
4166 if (insn)
4167 emit_insn (insn);
4168 else
4169 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4170 TYPE_MODE (integer_type_node), 3,
4171 XEXP (arg1_rtx, 0), Pmode,
4172 XEXP (arg2_rtx, 0), Pmode,
4173 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4174 TYPE_UNSIGNED (sizetype)),
4175 TYPE_MODE (sizetype));
4176
4177 /* Return the value in the proper mode for this function. */
4178 mode = TYPE_MODE (TREE_TYPE (exp));
4179 if (GET_MODE (result) == mode)
4180 return result;
4181 else if (target != 0)
4182 {
4183 convert_move (target, result, 0);
4184 return target;
4185 }
4186 else
4187 return convert_to_mode (mode, result, 0);
4188 }
4189 #endif
4190
4191 return NULL_RTX;
4192 }
4193
4194 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4195 if we failed the caller should emit a normal call, otherwise try to get
4196 the result in TARGET, if convenient. */
4197
4198 static rtx
4199 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4200 {
4201 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4202 return NULL_RTX;
4203 else
4204 {
4205 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4206 CALL_EXPR_ARG (exp, 1));
4207 if (result)
4208 return expand_expr (result, target, mode, EXPAND_NORMAL);
4209 }
4210
4211 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4212 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4213 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4214 {
4215 rtx arg1_rtx, arg2_rtx;
4216 rtx result, insn = NULL_RTX;
4217 tree fndecl, fn;
4218 tree arg1 = CALL_EXPR_ARG (exp, 0);
4219 tree arg2 = CALL_EXPR_ARG (exp, 1);
4220
4221 int arg1_align
4222 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4223 int arg2_align
4224 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4225
4226 /* If we don't have POINTER_TYPE, call the function. */
4227 if (arg1_align == 0 || arg2_align == 0)
4228 return NULL_RTX;
4229
4230 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4231 arg1 = builtin_save_expr (arg1);
4232 arg2 = builtin_save_expr (arg2);
4233
4234 arg1_rtx = get_memory_rtx (arg1, NULL);
4235 arg2_rtx = get_memory_rtx (arg2, NULL);
4236
4237 #ifdef HAVE_cmpstrsi
4238 /* Try to call cmpstrsi. */
4239 if (HAVE_cmpstrsi)
4240 {
4241 enum machine_mode insn_mode
4242 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4243
4244 /* Make a place to write the result of the instruction. */
4245 result = target;
4246 if (! (result != 0
4247 && REG_P (result) && GET_MODE (result) == insn_mode
4248 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4249 result = gen_reg_rtx (insn_mode);
4250
4251 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4252 GEN_INT (MIN (arg1_align, arg2_align)));
4253 }
4254 #endif
4255 #ifdef HAVE_cmpstrnsi
4256 /* Try to determine at least one length and call cmpstrnsi. */
4257 if (!insn && HAVE_cmpstrnsi)
4258 {
4259 tree len;
4260 rtx arg3_rtx;
4261
4262 enum machine_mode insn_mode
4263 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4264 tree len1 = c_strlen (arg1, 1);
4265 tree len2 = c_strlen (arg2, 1);
4266
4267 if (len1)
4268 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4269 if (len2)
4270 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4271
4272 /* If we don't have a constant length for the first, use the length
4273 of the second, if we know it. We don't require a constant for
4274 this case; some cost analysis could be done if both are available
4275 but neither is constant. For now, assume they're equally cheap,
4276 unless one has side effects. If both strings have constant lengths,
4277 use the smaller. */
4278
4279 if (!len1)
4280 len = len2;
4281 else if (!len2)
4282 len = len1;
4283 else if (TREE_SIDE_EFFECTS (len1))
4284 len = len2;
4285 else if (TREE_SIDE_EFFECTS (len2))
4286 len = len1;
4287 else if (TREE_CODE (len1) != INTEGER_CST)
4288 len = len2;
4289 else if (TREE_CODE (len2) != INTEGER_CST)
4290 len = len1;
4291 else if (tree_int_cst_lt (len1, len2))
4292 len = len1;
4293 else
4294 len = len2;
4295
4296 /* If both arguments have side effects, we cannot optimize. */
4297 if (!len || TREE_SIDE_EFFECTS (len))
4298 goto do_libcall;
4299
4300 arg3_rtx = expand_normal (len);
4301
4302 /* Make a place to write the result of the instruction. */
4303 result = target;
4304 if (! (result != 0
4305 && REG_P (result) && GET_MODE (result) == insn_mode
4306 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4307 result = gen_reg_rtx (insn_mode);
4308
4309 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4310 GEN_INT (MIN (arg1_align, arg2_align)));
4311 }
4312 #endif
4313
4314 if (insn)
4315 {
4316 emit_insn (insn);
4317
4318 /* Return the value in the proper mode for this function. */
4319 mode = TYPE_MODE (TREE_TYPE (exp));
4320 if (GET_MODE (result) == mode)
4321 return result;
4322 if (target == 0)
4323 return convert_to_mode (mode, result, 0);
4324 convert_move (target, result, 0);
4325 return target;
4326 }
4327
4328 /* Expand the library call ourselves using a stabilized argument
4329 list to avoid re-evaluating the function's arguments twice. */
4330 #ifdef HAVE_cmpstrnsi
4331 do_libcall:
4332 #endif
4333 fndecl = get_callee_fndecl (exp);
4334 fn = build_call_expr (fndecl, 2, arg1, arg2);
4335 if (TREE_CODE (fn) == CALL_EXPR)
4336 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4337 return expand_call (fn, target, target == const0_rtx);
4338 }
4339 #endif
4340 return NULL_RTX;
4341 }
4342
4343 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4344 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4345 the result in TARGET, if convenient. */
4346
4347 static rtx
4348 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4349 {
4350 if (!validate_arglist (exp,
4351 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4352 return NULL_RTX;
4353 else
4354 {
4355 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4356 CALL_EXPR_ARG (exp, 1),
4357 CALL_EXPR_ARG (exp, 2));
4358 if (result)
4359 return expand_expr (result, target, mode, EXPAND_NORMAL);
4360 }
4361
4362 /* If c_strlen can determine an expression for one of the string
4363 lengths, and it doesn't have side effects, then emit cmpstrnsi
4364 using length MIN(strlen(string)+1, arg3). */
4365 #ifdef HAVE_cmpstrnsi
4366 if (HAVE_cmpstrnsi)
4367 {
4368 tree len, len1, len2;
4369 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4370 rtx result, insn;
4371 tree fndecl, fn;
4372 tree arg1 = CALL_EXPR_ARG (exp, 0);
4373 tree arg2 = CALL_EXPR_ARG (exp, 1);
4374 tree arg3 = CALL_EXPR_ARG (exp, 2);
4375
4376 int arg1_align
4377 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4378 int arg2_align
4379 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4380 enum machine_mode insn_mode
4381 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4382
4383 len1 = c_strlen (arg1, 1);
4384 len2 = c_strlen (arg2, 1);
4385
4386 if (len1)
4387 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4388 if (len2)
4389 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4390
4391 /* If we don't have a constant length for the first, use the length
4392 of the second, if we know it. We don't require a constant for
4393 this case; some cost analysis could be done if both are available
4394 but neither is constant. For now, assume they're equally cheap,
4395 unless one has side effects. If both strings have constant lengths,
4396 use the smaller. */
4397
4398 if (!len1)
4399 len = len2;
4400 else if (!len2)
4401 len = len1;
4402 else if (TREE_SIDE_EFFECTS (len1))
4403 len = len2;
4404 else if (TREE_SIDE_EFFECTS (len2))
4405 len = len1;
4406 else if (TREE_CODE (len1) != INTEGER_CST)
4407 len = len2;
4408 else if (TREE_CODE (len2) != INTEGER_CST)
4409 len = len1;
4410 else if (tree_int_cst_lt (len1, len2))
4411 len = len1;
4412 else
4413 len = len2;
4414
4415 /* If both arguments have side effects, we cannot optimize. */
4416 if (!len || TREE_SIDE_EFFECTS (len))
4417 return NULL_RTX;
4418
4419 /* The actual new length parameter is MIN(len,arg3). */
4420 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4421 fold_convert (TREE_TYPE (len), arg3));
4422
4423 /* If we don't have POINTER_TYPE, call the function. */
4424 if (arg1_align == 0 || arg2_align == 0)
4425 return NULL_RTX;
4426
4427 /* Make a place to write the result of the instruction. */
4428 result = target;
4429 if (! (result != 0
4430 && REG_P (result) && GET_MODE (result) == insn_mode
4431 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4432 result = gen_reg_rtx (insn_mode);
4433
4434 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4435 arg1 = builtin_save_expr (arg1);
4436 arg2 = builtin_save_expr (arg2);
4437 len = builtin_save_expr (len);
4438
4439 arg1_rtx = get_memory_rtx (arg1, len);
4440 arg2_rtx = get_memory_rtx (arg2, len);
4441 arg3_rtx = expand_normal (len);
4442 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4443 GEN_INT (MIN (arg1_align, arg2_align)));
4444 if (insn)
4445 {
4446 emit_insn (insn);
4447
4448 /* Return the value in the proper mode for this function. */
4449 mode = TYPE_MODE (TREE_TYPE (exp));
4450 if (GET_MODE (result) == mode)
4451 return result;
4452 if (target == 0)
4453 return convert_to_mode (mode, result, 0);
4454 convert_move (target, result, 0);
4455 return target;
4456 }
4457
4458 /* Expand the library call ourselves using a stabilized argument
4459 list to avoid re-evaluating the function's arguments twice. */
4460 fndecl = get_callee_fndecl (exp);
4461 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4462 if (TREE_CODE (fn) == CALL_EXPR)
4463 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4464 return expand_call (fn, target, target == const0_rtx);
4465 }
4466 #endif
4467 return NULL_RTX;
4468 }
4469
4470 /* Expand expression EXP, which is a call to the strcat builtin.
4471 Return NULL_RTX if we failed the caller should emit a normal call,
4472 otherwise try to get the result in TARGET, if convenient. */
4473
4474 static rtx
4475 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4476 {
4477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4478 return NULL_RTX;
4479 else
4480 {
4481 tree dst = CALL_EXPR_ARG (exp, 0);
4482 tree src = CALL_EXPR_ARG (exp, 1);
4483 const char *p = c_getstr (src);
4484
4485 /* If the string length is zero, return the dst parameter. */
4486 if (p && *p == '\0')
4487 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4488
4489 if (optimize_insn_for_speed_p ())
4490 {
4491 /* See if we can store by pieces into (dst + strlen(dst)). */
4492 tree newsrc, newdst,
4493 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4494 rtx insns;
4495
4496 /* Stabilize the argument list. */
4497 newsrc = builtin_save_expr (src);
4498 dst = builtin_save_expr (dst);
4499
4500 start_sequence ();
4501
4502 /* Create strlen (dst). */
4503 newdst = build_call_expr (strlen_fn, 1, dst);
4504 /* Create (dst p+ strlen (dst)). */
4505
4506 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4507 newdst = builtin_save_expr (newdst);
4508
4509 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4510 {
4511 end_sequence (); /* Stop sequence. */
4512 return NULL_RTX;
4513 }
4514
4515 /* Output the entire sequence. */
4516 insns = get_insns ();
4517 end_sequence ();
4518 emit_insn (insns);
4519
4520 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4521 }
4522
4523 return NULL_RTX;
4524 }
4525 }
4526
4527 /* Expand expression EXP, which is a call to the strncat builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4530
4531 static rtx
4532 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4533 {
4534 if (validate_arglist (exp,
4535 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4536 {
4537 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4538 CALL_EXPR_ARG (exp, 1),
4539 CALL_EXPR_ARG (exp, 2));
4540 if (result)
4541 return expand_expr (result, target, mode, EXPAND_NORMAL);
4542 }
4543 return NULL_RTX;
4544 }
4545
4546 /* Expand expression EXP, which is a call to the strspn builtin.
4547 Return NULL_RTX if we failed the caller should emit a normal call,
4548 otherwise try to get the result in TARGET, if convenient. */
4549
4550 static rtx
4551 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4552 {
4553 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4554 {
4555 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4556 CALL_EXPR_ARG (exp, 1));
4557 if (result)
4558 return expand_expr (result, target, mode, EXPAND_NORMAL);
4559 }
4560 return NULL_RTX;
4561 }
4562
4563 /* Expand expression EXP, which is a call to the strcspn builtin.
4564 Return NULL_RTX if we failed the caller should emit a normal call,
4565 otherwise try to get the result in TARGET, if convenient. */
4566
4567 static rtx
4568 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4569 {
4570 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4571 {
4572 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4573 CALL_EXPR_ARG (exp, 1));
4574 if (result)
4575 return expand_expr (result, target, mode, EXPAND_NORMAL);
4576 }
4577 return NULL_RTX;
4578 }
4579
4580 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4581 if that's convenient. */
4582
4583 rtx
4584 expand_builtin_saveregs (void)
4585 {
4586 rtx val, seq;
4587
4588 /* Don't do __builtin_saveregs more than once in a function.
4589 Save the result of the first call and reuse it. */
4590 if (saveregs_value != 0)
4591 return saveregs_value;
4592
4593 /* When this function is called, it means that registers must be
4594 saved on entry to this function. So we migrate the call to the
4595 first insn of this function. */
4596
4597 start_sequence ();
4598
4599 /* Do whatever the machine needs done in this case. */
4600 val = targetm.calls.expand_builtin_saveregs ();
4601
4602 seq = get_insns ();
4603 end_sequence ();
4604
4605 saveregs_value = val;
4606
4607 /* Put the insns after the NOTE that starts the function. If this
4608 is inside a start_sequence, make the outer-level insn chain current, so
4609 the code is placed at the start of the function. */
4610 push_topmost_sequence ();
4611 emit_insn_after (seq, entry_of_function ());
4612 pop_topmost_sequence ();
4613
4614 return val;
4615 }
4616
4617 /* __builtin_args_info (N) returns word N of the arg space info
4618 for the current function. The number and meanings of words
4619 is controlled by the definition of CUMULATIVE_ARGS. */
4620
4621 static rtx
4622 expand_builtin_args_info (tree exp)
4623 {
4624 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4625 int *word_ptr = (int *) &crtl->args.info;
4626
4627 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4628
4629 if (call_expr_nargs (exp) != 0)
4630 {
4631 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4632 error ("argument of %<__builtin_args_info%> must be constant");
4633 else
4634 {
4635 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4636
4637 if (wordnum < 0 || wordnum >= nwords)
4638 error ("argument of %<__builtin_args_info%> out of range");
4639 else
4640 return GEN_INT (word_ptr[wordnum]);
4641 }
4642 }
4643 else
4644 error ("missing argument in %<__builtin_args_info%>");
4645
4646 return const0_rtx;
4647 }
4648
4649 /* Expand a call to __builtin_next_arg. */
4650
4651 static rtx
4652 expand_builtin_next_arg (void)
4653 {
4654 /* Checking arguments is already done in fold_builtin_next_arg
4655 that must be called before this function. */
4656 return expand_binop (ptr_mode, add_optab,
4657 crtl->args.internal_arg_pointer,
4658 crtl->args.arg_offset_rtx,
4659 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4660 }
4661
4662 /* Make it easier for the backends by protecting the valist argument
4663 from multiple evaluations. */
4664
4665 static tree
4666 stabilize_va_list (tree valist, int needs_lvalue)
4667 {
4668 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4669
4670 gcc_assert (vatype != NULL_TREE);
4671
4672 if (TREE_CODE (vatype) == ARRAY_TYPE)
4673 {
4674 if (TREE_SIDE_EFFECTS (valist))
4675 valist = save_expr (valist);
4676
4677 /* For this case, the backends will be expecting a pointer to
4678 vatype, but it's possible we've actually been given an array
4679 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4680 So fix it. */
4681 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4682 {
4683 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4684 valist = build_fold_addr_expr_with_type (valist, p1);
4685 }
4686 }
4687 else
4688 {
4689 tree pt;
4690
4691 if (! needs_lvalue)
4692 {
4693 if (! TREE_SIDE_EFFECTS (valist))
4694 return valist;
4695
4696 pt = build_pointer_type (vatype);
4697 valist = fold_build1 (ADDR_EXPR, pt, valist);
4698 TREE_SIDE_EFFECTS (valist) = 1;
4699 }
4700
4701 if (TREE_SIDE_EFFECTS (valist))
4702 valist = save_expr (valist);
4703 valist = build_fold_indirect_ref (valist);
4704 }
4705
4706 return valist;
4707 }
4708
4709 /* The "standard" definition of va_list is void*. */
4710
4711 tree
4712 std_build_builtin_va_list (void)
4713 {
4714 return ptr_type_node;
4715 }
4716
4717 /* The "standard" abi va_list is va_list_type_node. */
4718
4719 tree
4720 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4721 {
4722 return va_list_type_node;
4723 }
4724
4725 /* The "standard" type of va_list is va_list_type_node. */
4726
4727 tree
4728 std_canonical_va_list_type (tree type)
4729 {
4730 tree wtype, htype;
4731
4732 if (INDIRECT_REF_P (type))
4733 type = TREE_TYPE (type);
4734 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4735 type = TREE_TYPE (type);
4736 wtype = va_list_type_node;
4737 htype = type;
4738 /* Treat structure va_list types. */
4739 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4740 htype = TREE_TYPE (htype);
4741 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4742 {
4743 /* If va_list is an array type, the argument may have decayed
4744 to a pointer type, e.g. by being passed to another function.
4745 In that case, unwrap both types so that we can compare the
4746 underlying records. */
4747 if (TREE_CODE (htype) == ARRAY_TYPE
4748 || POINTER_TYPE_P (htype))
4749 {
4750 wtype = TREE_TYPE (wtype);
4751 htype = TREE_TYPE (htype);
4752 }
4753 }
4754 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4755 return va_list_type_node;
4756
4757 return NULL_TREE;
4758 }
4759
4760 /* The "standard" implementation of va_start: just assign `nextarg' to
4761 the variable. */
4762
4763 void
4764 std_expand_builtin_va_start (tree valist, rtx nextarg)
4765 {
4766 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4767 convert_move (va_r, nextarg, 0);
4768 }
4769
4770 /* Expand EXP, a call to __builtin_va_start. */
4771
4772 static rtx
4773 expand_builtin_va_start (tree exp)
4774 {
4775 rtx nextarg;
4776 tree valist;
4777
4778 if (call_expr_nargs (exp) < 2)
4779 {
4780 error ("too few arguments to function %<va_start%>");
4781 return const0_rtx;
4782 }
4783
4784 if (fold_builtin_next_arg (exp, true))
4785 return const0_rtx;
4786
4787 nextarg = expand_builtin_next_arg ();
4788 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4789
4790 if (targetm.expand_builtin_va_start)
4791 targetm.expand_builtin_va_start (valist, nextarg);
4792 else
4793 std_expand_builtin_va_start (valist, nextarg);
4794
4795 return const0_rtx;
4796 }
4797
4798 /* The "standard" implementation of va_arg: read the value from the
4799 current (padded) address and increment by the (padded) size. */
4800
4801 tree
4802 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4803 gimple_seq *post_p)
4804 {
4805 tree addr, t, type_size, rounded_size, valist_tmp;
4806 unsigned HOST_WIDE_INT align, boundary;
4807 bool indirect;
4808
4809 #ifdef ARGS_GROW_DOWNWARD
4810 /* All of the alignment and movement below is for args-grow-up machines.
4811 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4812 implement their own specialized gimplify_va_arg_expr routines. */
4813 gcc_unreachable ();
4814 #endif
4815
4816 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4817 if (indirect)
4818 type = build_pointer_type (type);
4819
4820 align = PARM_BOUNDARY / BITS_PER_UNIT;
4821 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4822
4823 /* When we align parameter on stack for caller, if the parameter
4824 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4825 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4826 here with caller. */
4827 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4828 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4829
4830 boundary /= BITS_PER_UNIT;
4831
4832 /* Hoist the valist value into a temporary for the moment. */
4833 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4834
4835 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4836 requires greater alignment, we must perform dynamic alignment. */
4837 if (boundary > align
4838 && !integer_zerop (TYPE_SIZE (type)))
4839 {
4840 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4841 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4842 valist_tmp, size_int (boundary - 1)));
4843 gimplify_and_add (t, pre_p);
4844
4845 t = fold_convert (sizetype, valist_tmp);
4846 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4847 fold_convert (TREE_TYPE (valist),
4848 fold_build2 (BIT_AND_EXPR, sizetype, t,
4849 size_int (-boundary))));
4850 gimplify_and_add (t, pre_p);
4851 }
4852 else
4853 boundary = align;
4854
4855 /* If the actual alignment is less than the alignment of the type,
4856 adjust the type accordingly so that we don't assume strict alignment
4857 when dereferencing the pointer. */
4858 boundary *= BITS_PER_UNIT;
4859 if (boundary < TYPE_ALIGN (type))
4860 {
4861 type = build_variant_type_copy (type);
4862 TYPE_ALIGN (type) = boundary;
4863 }
4864
4865 /* Compute the rounded size of the type. */
4866 type_size = size_in_bytes (type);
4867 rounded_size = round_up (type_size, align);
4868
4869 /* Reduce rounded_size so it's sharable with the postqueue. */
4870 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4871
4872 /* Get AP. */
4873 addr = valist_tmp;
4874 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4875 {
4876 /* Small args are padded downward. */
4877 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4878 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4879 size_binop (MINUS_EXPR, rounded_size, type_size));
4880 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4881 }
4882
4883 /* Compute new value for AP. */
4884 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4885 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4886 gimplify_and_add (t, pre_p);
4887
4888 addr = fold_convert (build_pointer_type (type), addr);
4889
4890 if (indirect)
4891 addr = build_va_arg_indirect_ref (addr);
4892
4893 return build_va_arg_indirect_ref (addr);
4894 }
4895
4896 /* Build an indirect-ref expression over the given TREE, which represents a
4897 piece of a va_arg() expansion. */
4898 tree
4899 build_va_arg_indirect_ref (tree addr)
4900 {
4901 addr = build_fold_indirect_ref (addr);
4902
4903 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4904 mf_mark (addr);
4905
4906 return addr;
4907 }
4908
4909 /* Return a dummy expression of type TYPE in order to keep going after an
4910 error. */
4911
4912 static tree
4913 dummy_object (tree type)
4914 {
4915 tree t = build_int_cst (build_pointer_type (type), 0);
4916 return build1 (INDIRECT_REF, type, t);
4917 }
4918
4919 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4920 builtin function, but a very special sort of operator. */
4921
4922 enum gimplify_status
4923 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4924 {
4925 tree promoted_type, have_va_type;
4926 tree valist = TREE_OPERAND (*expr_p, 0);
4927 tree type = TREE_TYPE (*expr_p);
4928 tree t;
4929
4930 /* Verify that valist is of the proper type. */
4931 have_va_type = TREE_TYPE (valist);
4932 if (have_va_type == error_mark_node)
4933 return GS_ERROR;
4934 have_va_type = targetm.canonical_va_list_type (have_va_type);
4935
4936 if (have_va_type == NULL_TREE)
4937 {
4938 error ("first argument to %<va_arg%> not of type %<va_list%>");
4939 return GS_ERROR;
4940 }
4941
4942 /* Generate a diagnostic for requesting data of a type that cannot
4943 be passed through `...' due to type promotion at the call site. */
4944 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4945 != type)
4946 {
4947 static bool gave_help;
4948 bool warned;
4949
4950 /* Unfortunately, this is merely undefined, rather than a constraint
4951 violation, so we cannot make this an error. If this call is never
4952 executed, the program is still strictly conforming. */
4953 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4954 type, promoted_type);
4955 if (!gave_help && warned)
4956 {
4957 gave_help = true;
4958 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4959 promoted_type, type);
4960 }
4961
4962 /* We can, however, treat "undefined" any way we please.
4963 Call abort to encourage the user to fix the program. */
4964 if (warned)
4965 inform (input_location, "if this code is reached, the program will abort");
4966 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4967 gimplify_and_add (t, pre_p);
4968
4969 /* This is dead code, but go ahead and finish so that the
4970 mode of the result comes out right. */
4971 *expr_p = dummy_object (type);
4972 return GS_ALL_DONE;
4973 }
4974 else
4975 {
4976 /* Make it easier for the backends by protecting the valist argument
4977 from multiple evaluations. */
4978 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4979 {
4980 /* For this case, the backends will be expecting a pointer to
4981 TREE_TYPE (abi), but it's possible we've
4982 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4983 So fix it. */
4984 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4985 {
4986 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4987 valist = build_fold_addr_expr_with_type (valist, p1);
4988 }
4989
4990 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4991 }
4992 else
4993 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4994
4995 if (!targetm.gimplify_va_arg_expr)
4996 /* FIXME: Once most targets are converted we should merely
4997 assert this is non-null. */
4998 return GS_ALL_DONE;
4999
5000 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5001 return GS_OK;
5002 }
5003 }
5004
5005 /* Expand EXP, a call to __builtin_va_end. */
5006
5007 static rtx
5008 expand_builtin_va_end (tree exp)
5009 {
5010 tree valist = CALL_EXPR_ARG (exp, 0);
5011
5012 /* Evaluate for side effects, if needed. I hate macros that don't
5013 do that. */
5014 if (TREE_SIDE_EFFECTS (valist))
5015 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5016
5017 return const0_rtx;
5018 }
5019
5020 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5021 builtin rather than just as an assignment in stdarg.h because of the
5022 nastiness of array-type va_list types. */
5023
5024 static rtx
5025 expand_builtin_va_copy (tree exp)
5026 {
5027 tree dst, src, t;
5028
5029 dst = CALL_EXPR_ARG (exp, 0);
5030 src = CALL_EXPR_ARG (exp, 1);
5031
5032 dst = stabilize_va_list (dst, 1);
5033 src = stabilize_va_list (src, 0);
5034
5035 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5036
5037 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5038 {
5039 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5040 TREE_SIDE_EFFECTS (t) = 1;
5041 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5042 }
5043 else
5044 {
5045 rtx dstb, srcb, size;
5046
5047 /* Evaluate to pointers. */
5048 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5049 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5050 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5051 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5052
5053 dstb = convert_memory_address (Pmode, dstb);
5054 srcb = convert_memory_address (Pmode, srcb);
5055
5056 /* "Dereference" to BLKmode memories. */
5057 dstb = gen_rtx_MEM (BLKmode, dstb);
5058 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5059 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5060 srcb = gen_rtx_MEM (BLKmode, srcb);
5061 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5062 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5063
5064 /* Copy. */
5065 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5066 }
5067
5068 return const0_rtx;
5069 }
5070
5071 /* Expand a call to one of the builtin functions __builtin_frame_address or
5072 __builtin_return_address. */
5073
5074 static rtx
5075 expand_builtin_frame_address (tree fndecl, tree exp)
5076 {
5077 /* The argument must be a nonnegative integer constant.
5078 It counts the number of frames to scan up the stack.
5079 The value is the return address saved in that frame. */
5080 if (call_expr_nargs (exp) == 0)
5081 /* Warning about missing arg was already issued. */
5082 return const0_rtx;
5083 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5084 {
5085 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5086 error ("invalid argument to %<__builtin_frame_address%>");
5087 else
5088 error ("invalid argument to %<__builtin_return_address%>");
5089 return const0_rtx;
5090 }
5091 else
5092 {
5093 rtx tem
5094 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5095 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5096
5097 /* Some ports cannot access arbitrary stack frames. */
5098 if (tem == NULL)
5099 {
5100 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5101 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5102 else
5103 warning (0, "unsupported argument to %<__builtin_return_address%>");
5104 return const0_rtx;
5105 }
5106
5107 /* For __builtin_frame_address, return what we've got. */
5108 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5109 return tem;
5110
5111 if (!REG_P (tem)
5112 && ! CONSTANT_P (tem))
5113 tem = copy_to_mode_reg (Pmode, tem);
5114 return tem;
5115 }
5116 }
5117
5118 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5119 we failed and the caller should emit a normal call, otherwise try to get
5120 the result in TARGET, if convenient. */
5121
5122 static rtx
5123 expand_builtin_alloca (tree exp, rtx target)
5124 {
5125 rtx op0;
5126 rtx result;
5127
5128 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5129 should always expand to function calls. These can be intercepted
5130 in libmudflap. */
5131 if (flag_mudflap)
5132 return NULL_RTX;
5133
5134 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5135 return NULL_RTX;
5136
5137 /* Compute the argument. */
5138 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5139
5140 /* Allocate the desired space. */
5141 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5142 result = convert_memory_address (ptr_mode, result);
5143
5144 return result;
5145 }
5146
5147 /* Expand a call to a bswap builtin with argument ARG0. MODE
5148 is the mode to expand with. */
5149
5150 static rtx
5151 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5152 {
5153 enum machine_mode mode;
5154 tree arg;
5155 rtx op0;
5156
5157 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5158 return NULL_RTX;
5159
5160 arg = CALL_EXPR_ARG (exp, 0);
5161 mode = TYPE_MODE (TREE_TYPE (arg));
5162 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5163
5164 target = expand_unop (mode, bswap_optab, op0, target, 1);
5165
5166 gcc_assert (target);
5167
5168 return convert_to_mode (mode, target, 0);
5169 }
5170
5171 /* Expand a call to a unary builtin in EXP.
5172 Return NULL_RTX if a normal call should be emitted rather than expanding the
5173 function in-line. If convenient, the result should be placed in TARGET.
5174 SUBTARGET may be used as the target for computing one of EXP's operands. */
5175
5176 static rtx
5177 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5178 rtx subtarget, optab op_optab)
5179 {
5180 rtx op0;
5181
5182 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5183 return NULL_RTX;
5184
5185 /* Compute the argument. */
5186 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5187 VOIDmode, EXPAND_NORMAL);
5188 /* Compute op, into TARGET if possible.
5189 Set TARGET to wherever the result comes back. */
5190 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5191 op_optab, op0, target, 1);
5192 gcc_assert (target);
5193
5194 return convert_to_mode (target_mode, target, 0);
5195 }
5196
5197 /* If the string passed to fputs is a constant and is one character
5198 long, we attempt to transform this call into __builtin_fputc(). */
5199
5200 static rtx
5201 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5202 {
5203 /* Verify the arguments in the original call. */
5204 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5205 {
5206 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5207 CALL_EXPR_ARG (exp, 1),
5208 (target == const0_rtx),
5209 unlocked, NULL_TREE);
5210 if (result)
5211 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5212 }
5213 return NULL_RTX;
5214 }
5215
5216 /* Expand a call to __builtin_expect. We just return our argument
5217 as the builtin_expect semantic should've been already executed by
5218 tree branch prediction pass. */
5219
5220 static rtx
5221 expand_builtin_expect (tree exp, rtx target)
5222 {
5223 tree arg, c;
5224
5225 if (call_expr_nargs (exp) < 2)
5226 return const0_rtx;
5227 arg = CALL_EXPR_ARG (exp, 0);
5228 c = CALL_EXPR_ARG (exp, 1);
5229
5230 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5231 /* When guessing was done, the hints should be already stripped away. */
5232 gcc_assert (!flag_guess_branch_prob
5233 || optimize == 0 || errorcount || sorrycount);
5234 return target;
5235 }
5236
5237 void
5238 expand_builtin_trap (void)
5239 {
5240 #ifdef HAVE_trap
5241 if (HAVE_trap)
5242 emit_insn (gen_trap ());
5243 else
5244 #endif
5245 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5246 emit_barrier ();
5247 }
5248
5249 /* Expand EXP, a call to fabs, fabsf or fabsl.
5250 Return NULL_RTX if a normal call should be emitted rather than expanding
5251 the function inline. If convenient, the result should be placed
5252 in TARGET. SUBTARGET may be used as the target for computing
5253 the operand. */
5254
5255 static rtx
5256 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5257 {
5258 enum machine_mode mode;
5259 tree arg;
5260 rtx op0;
5261
5262 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5263 return NULL_RTX;
5264
5265 arg = CALL_EXPR_ARG (exp, 0);
5266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5267 mode = TYPE_MODE (TREE_TYPE (arg));
5268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5269 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5270 }
5271
5272 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5273 Return NULL is a normal call should be emitted rather than expanding the
5274 function inline. If convenient, the result should be placed in TARGET.
5275 SUBTARGET may be used as the target for computing the operand. */
5276
5277 static rtx
5278 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5279 {
5280 rtx op0, op1;
5281 tree arg;
5282
5283 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5284 return NULL_RTX;
5285
5286 arg = CALL_EXPR_ARG (exp, 0);
5287 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5288
5289 arg = CALL_EXPR_ARG (exp, 1);
5290 op1 = expand_normal (arg);
5291
5292 return expand_copysign (op0, op1, target);
5293 }
5294
5295 /* Create a new constant string literal and return a char* pointer to it.
5296 The STRING_CST value is the LEN characters at STR. */
5297 tree
5298 build_string_literal (int len, const char *str)
5299 {
5300 tree t, elem, index, type;
5301
5302 t = build_string (len, str);
5303 elem = build_type_variant (char_type_node, 1, 0);
5304 index = build_index_type (size_int (len - 1));
5305 type = build_array_type (elem, index);
5306 TREE_TYPE (t) = type;
5307 TREE_CONSTANT (t) = 1;
5308 TREE_READONLY (t) = 1;
5309 TREE_STATIC (t) = 1;
5310
5311 type = build_pointer_type (elem);
5312 t = build1 (ADDR_EXPR, type,
5313 build4 (ARRAY_REF, elem,
5314 t, integer_zero_node, NULL_TREE, NULL_TREE));
5315 return t;
5316 }
5317
5318 /* Expand EXP, a call to printf or printf_unlocked.
5319 Return NULL_RTX if a normal call should be emitted rather than transforming
5320 the function inline. If convenient, the result should be placed in
5321 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5322 call. */
5323 static rtx
5324 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5325 bool unlocked)
5326 {
5327 /* If we're using an unlocked function, assume the other unlocked
5328 functions exist explicitly. */
5329 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5330 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5331 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5332 : implicit_built_in_decls[BUILT_IN_PUTS];
5333 const char *fmt_str;
5334 tree fn = 0;
5335 tree fmt, arg;
5336 int nargs = call_expr_nargs (exp);
5337
5338 /* If the return value is used, don't do the transformation. */
5339 if (target != const0_rtx)
5340 return NULL_RTX;
5341
5342 /* Verify the required arguments in the original call. */
5343 if (nargs == 0)
5344 return NULL_RTX;
5345 fmt = CALL_EXPR_ARG (exp, 0);
5346 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5347 return NULL_RTX;
5348
5349 /* Check whether the format is a literal string constant. */
5350 fmt_str = c_getstr (fmt);
5351 if (fmt_str == NULL)
5352 return NULL_RTX;
5353
5354 if (!init_target_chars ())
5355 return NULL_RTX;
5356
5357 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5358 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5359 {
5360 if ((nargs != 2)
5361 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5362 return NULL_RTX;
5363 if (fn_puts)
5364 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5365 }
5366 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5367 else if (strcmp (fmt_str, target_percent_c) == 0)
5368 {
5369 if ((nargs != 2)
5370 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5371 return NULL_RTX;
5372 if (fn_putchar)
5373 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5374 }
5375 else
5376 {
5377 /* We can't handle anything else with % args or %% ... yet. */
5378 if (strchr (fmt_str, target_percent))
5379 return NULL_RTX;
5380
5381 if (nargs > 1)
5382 return NULL_RTX;
5383
5384 /* If the format specifier was "", printf does nothing. */
5385 if (fmt_str[0] == '\0')
5386 return const0_rtx;
5387 /* If the format specifier has length of 1, call putchar. */
5388 if (fmt_str[1] == '\0')
5389 {
5390 /* Given printf("c"), (where c is any one character,)
5391 convert "c"[0] to an int and pass that to the replacement
5392 function. */
5393 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5394 if (fn_putchar)
5395 fn = build_call_expr (fn_putchar, 1, arg);
5396 }
5397 else
5398 {
5399 /* If the format specifier was "string\n", call puts("string"). */
5400 size_t len = strlen (fmt_str);
5401 if ((unsigned char)fmt_str[len - 1] == target_newline)
5402 {
5403 /* Create a NUL-terminated string that's one char shorter
5404 than the original, stripping off the trailing '\n'. */
5405 char *newstr = XALLOCAVEC (char, len);
5406 memcpy (newstr, fmt_str, len - 1);
5407 newstr[len - 1] = 0;
5408 arg = build_string_literal (len, newstr);
5409 if (fn_puts)
5410 fn = build_call_expr (fn_puts, 1, arg);
5411 }
5412 else
5413 /* We'd like to arrange to call fputs(string,stdout) here,
5414 but we need stdout and don't have a way to get it yet. */
5415 return NULL_RTX;
5416 }
5417 }
5418
5419 if (!fn)
5420 return NULL_RTX;
5421 if (TREE_CODE (fn) == CALL_EXPR)
5422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5423 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5424 }
5425
5426 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5427 Return NULL_RTX if a normal call should be emitted rather than transforming
5428 the function inline. If convenient, the result should be placed in
5429 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5430 call. */
5431 static rtx
5432 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5433 bool unlocked)
5434 {
5435 /* If we're using an unlocked function, assume the other unlocked
5436 functions exist explicitly. */
5437 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5438 : implicit_built_in_decls[BUILT_IN_FPUTC];
5439 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5440 : implicit_built_in_decls[BUILT_IN_FPUTS];
5441 const char *fmt_str;
5442 tree fn = 0;
5443 tree fmt, fp, arg;
5444 int nargs = call_expr_nargs (exp);
5445
5446 /* If the return value is used, don't do the transformation. */
5447 if (target != const0_rtx)
5448 return NULL_RTX;
5449
5450 /* Verify the required arguments in the original call. */
5451 if (nargs < 2)
5452 return NULL_RTX;
5453 fp = CALL_EXPR_ARG (exp, 0);
5454 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5455 return NULL_RTX;
5456 fmt = CALL_EXPR_ARG (exp, 1);
5457 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5458 return NULL_RTX;
5459
5460 /* Check whether the format is a literal string constant. */
5461 fmt_str = c_getstr (fmt);
5462 if (fmt_str == NULL)
5463 return NULL_RTX;
5464
5465 if (!init_target_chars ())
5466 return NULL_RTX;
5467
5468 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5469 if (strcmp (fmt_str, target_percent_s) == 0)
5470 {
5471 if ((nargs != 3)
5472 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5473 return NULL_RTX;
5474 arg = CALL_EXPR_ARG (exp, 2);
5475 if (fn_fputs)
5476 fn = build_call_expr (fn_fputs, 2, arg, fp);
5477 }
5478 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5479 else if (strcmp (fmt_str, target_percent_c) == 0)
5480 {
5481 if ((nargs != 3)
5482 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5483 return NULL_RTX;
5484 arg = CALL_EXPR_ARG (exp, 2);
5485 if (fn_fputc)
5486 fn = build_call_expr (fn_fputc, 2, arg, fp);
5487 }
5488 else
5489 {
5490 /* We can't handle anything else with % args or %% ... yet. */
5491 if (strchr (fmt_str, target_percent))
5492 return NULL_RTX;
5493
5494 if (nargs > 2)
5495 return NULL_RTX;
5496
5497 /* If the format specifier was "", fprintf does nothing. */
5498 if (fmt_str[0] == '\0')
5499 {
5500 /* Evaluate and ignore FILE* argument for side-effects. */
5501 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5502 return const0_rtx;
5503 }
5504
5505 /* When "string" doesn't contain %, replace all cases of
5506 fprintf(stream,string) with fputs(string,stream). The fputs
5507 builtin will take care of special cases like length == 1. */
5508 if (fn_fputs)
5509 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5510 }
5511
5512 if (!fn)
5513 return NULL_RTX;
5514 if (TREE_CODE (fn) == CALL_EXPR)
5515 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5516 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5517 }
5518
5519 /* Expand a call EXP to sprintf. Return NULL_RTX if
5520 a normal call should be emitted rather than expanding the function
5521 inline. If convenient, the result should be placed in TARGET with
5522 mode MODE. */
5523
5524 static rtx
5525 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5526 {
5527 tree dest, fmt;
5528 const char *fmt_str;
5529 int nargs = call_expr_nargs (exp);
5530
5531 /* Verify the required arguments in the original call. */
5532 if (nargs < 2)
5533 return NULL_RTX;
5534 dest = CALL_EXPR_ARG (exp, 0);
5535 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5536 return NULL_RTX;
5537 fmt = CALL_EXPR_ARG (exp, 0);
5538 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5539 return NULL_RTX;
5540
5541 /* Check whether the format is a literal string constant. */
5542 fmt_str = c_getstr (fmt);
5543 if (fmt_str == NULL)
5544 return NULL_RTX;
5545
5546 if (!init_target_chars ())
5547 return NULL_RTX;
5548
5549 /* If the format doesn't contain % args or %%, use strcpy. */
5550 if (strchr (fmt_str, target_percent) == 0)
5551 {
5552 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5553 tree exp;
5554
5555 if ((nargs > 2) || ! fn)
5556 return NULL_RTX;
5557 expand_expr (build_call_expr (fn, 2, dest, fmt),
5558 const0_rtx, VOIDmode, EXPAND_NORMAL);
5559 if (target == const0_rtx)
5560 return const0_rtx;
5561 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5562 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5563 }
5564 /* If the format is "%s", use strcpy if the result isn't used. */
5565 else if (strcmp (fmt_str, target_percent_s) == 0)
5566 {
5567 tree fn, arg, len;
5568 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5569
5570 if (! fn)
5571 return NULL_RTX;
5572 if (nargs != 3)
5573 return NULL_RTX;
5574 arg = CALL_EXPR_ARG (exp, 2);
5575 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5576 return NULL_RTX;
5577
5578 if (target != const0_rtx)
5579 {
5580 len = c_strlen (arg, 1);
5581 if (! len || TREE_CODE (len) != INTEGER_CST)
5582 return NULL_RTX;
5583 }
5584 else
5585 len = NULL_TREE;
5586
5587 expand_expr (build_call_expr (fn, 2, dest, arg),
5588 const0_rtx, VOIDmode, EXPAND_NORMAL);
5589
5590 if (target == const0_rtx)
5591 return const0_rtx;
5592 return expand_expr (len, target, mode, EXPAND_NORMAL);
5593 }
5594
5595 return NULL_RTX;
5596 }
5597
5598 /* Expand a call to either the entry or exit function profiler. */
5599
5600 static rtx
5601 expand_builtin_profile_func (bool exitp)
5602 {
5603 rtx this_rtx, which;
5604
5605 this_rtx = DECL_RTL (current_function_decl);
5606 gcc_assert (MEM_P (this_rtx));
5607 this_rtx = XEXP (this_rtx, 0);
5608
5609 if (exitp)
5610 which = profile_function_exit_libfunc;
5611 else
5612 which = profile_function_entry_libfunc;
5613
5614 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5615 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5616 0),
5617 Pmode);
5618
5619 return const0_rtx;
5620 }
5621
5622 /* Expand a call to __builtin___clear_cache. */
5623
5624 static rtx
5625 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5626 {
5627 #ifndef HAVE_clear_cache
5628 #ifdef CLEAR_INSN_CACHE
5629 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5630 does something. Just do the default expansion to a call to
5631 __clear_cache(). */
5632 return NULL_RTX;
5633 #else
5634 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5635 does nothing. There is no need to call it. Do nothing. */
5636 return const0_rtx;
5637 #endif /* CLEAR_INSN_CACHE */
5638 #else
5639 /* We have a "clear_cache" insn, and it will handle everything. */
5640 tree begin, end;
5641 rtx begin_rtx, end_rtx;
5642 enum insn_code icode;
5643
5644 /* We must not expand to a library call. If we did, any
5645 fallback library function in libgcc that might contain a call to
5646 __builtin___clear_cache() would recurse infinitely. */
5647 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5648 {
5649 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5650 return const0_rtx;
5651 }
5652
5653 if (HAVE_clear_cache)
5654 {
5655 icode = CODE_FOR_clear_cache;
5656
5657 begin = CALL_EXPR_ARG (exp, 0);
5658 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5659 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5660 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5661 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5662
5663 end = CALL_EXPR_ARG (exp, 1);
5664 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5665 end_rtx = convert_memory_address (Pmode, end_rtx);
5666 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5667 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5668
5669 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5670 }
5671 return const0_rtx;
5672 #endif /* HAVE_clear_cache */
5673 }
5674
5675 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5676
5677 static rtx
5678 round_trampoline_addr (rtx tramp)
5679 {
5680 rtx temp, addend, mask;
5681
5682 /* If we don't need too much alignment, we'll have been guaranteed
5683 proper alignment by get_trampoline_type. */
5684 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5685 return tramp;
5686
5687 /* Round address up to desired boundary. */
5688 temp = gen_reg_rtx (Pmode);
5689 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5690 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5691
5692 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5693 temp, 0, OPTAB_LIB_WIDEN);
5694 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5695 temp, 0, OPTAB_LIB_WIDEN);
5696
5697 return tramp;
5698 }
5699
5700 static rtx
5701 expand_builtin_init_trampoline (tree exp)
5702 {
5703 tree t_tramp, t_func, t_chain;
5704 rtx r_tramp, r_func, r_chain;
5705 #ifdef TRAMPOLINE_TEMPLATE
5706 rtx blktramp;
5707 #endif
5708
5709 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5710 POINTER_TYPE, VOID_TYPE))
5711 return NULL_RTX;
5712
5713 t_tramp = CALL_EXPR_ARG (exp, 0);
5714 t_func = CALL_EXPR_ARG (exp, 1);
5715 t_chain = CALL_EXPR_ARG (exp, 2);
5716
5717 r_tramp = expand_normal (t_tramp);
5718 r_func = expand_normal (t_func);
5719 r_chain = expand_normal (t_chain);
5720
5721 /* Generate insns to initialize the trampoline. */
5722 r_tramp = round_trampoline_addr (r_tramp);
5723 #ifdef TRAMPOLINE_TEMPLATE
5724 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5725 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5726 emit_block_move (blktramp, assemble_trampoline_template (),
5727 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5728 #endif
5729 trampolines_created = 1;
5730 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5731
5732 return const0_rtx;
5733 }
5734
5735 static rtx
5736 expand_builtin_adjust_trampoline (tree exp)
5737 {
5738 rtx tramp;
5739
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5741 return NULL_RTX;
5742
5743 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5744 tramp = round_trampoline_addr (tramp);
5745 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5746 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5747 #endif
5748
5749 return tramp;
5750 }
5751
5752 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5753 function. The function first checks whether the back end provides
5754 an insn to implement signbit for the respective mode. If not, it
5755 checks whether the floating point format of the value is such that
5756 the sign bit can be extracted. If that is not the case, the
5757 function returns NULL_RTX to indicate that a normal call should be
5758 emitted rather than expanding the function in-line. EXP is the
5759 expression that is a call to the builtin function; if convenient,
5760 the result should be placed in TARGET. */
5761 static rtx
5762 expand_builtin_signbit (tree exp, rtx target)
5763 {
5764 const struct real_format *fmt;
5765 enum machine_mode fmode, imode, rmode;
5766 HOST_WIDE_INT hi, lo;
5767 tree arg;
5768 int word, bitpos;
5769 enum insn_code icode;
5770 rtx temp;
5771
5772 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5773 return NULL_RTX;
5774
5775 arg = CALL_EXPR_ARG (exp, 0);
5776 fmode = TYPE_MODE (TREE_TYPE (arg));
5777 rmode = TYPE_MODE (TREE_TYPE (exp));
5778 fmt = REAL_MODE_FORMAT (fmode);
5779
5780 arg = builtin_save_expr (arg);
5781
5782 /* Expand the argument yielding a RTX expression. */
5783 temp = expand_normal (arg);
5784
5785 /* Check if the back end provides an insn that handles signbit for the
5786 argument's mode. */
5787 icode = signbit_optab->handlers [(int) fmode].insn_code;
5788 if (icode != CODE_FOR_nothing)
5789 {
5790 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5791 emit_unop_insn (icode, target, temp, UNKNOWN);
5792 return target;
5793 }
5794
5795 /* For floating point formats without a sign bit, implement signbit
5796 as "ARG < 0.0". */
5797 bitpos = fmt->signbit_ro;
5798 if (bitpos < 0)
5799 {
5800 /* But we can't do this if the format supports signed zero. */
5801 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5802 return NULL_RTX;
5803
5804 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5805 build_real (TREE_TYPE (arg), dconst0));
5806 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5807 }
5808
5809 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5810 {
5811 imode = int_mode_for_mode (fmode);
5812 if (imode == BLKmode)
5813 return NULL_RTX;
5814 temp = gen_lowpart (imode, temp);
5815 }
5816 else
5817 {
5818 imode = word_mode;
5819 /* Handle targets with different FP word orders. */
5820 if (FLOAT_WORDS_BIG_ENDIAN)
5821 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5822 else
5823 word = bitpos / BITS_PER_WORD;
5824 temp = operand_subword_force (temp, word, fmode);
5825 bitpos = bitpos % BITS_PER_WORD;
5826 }
5827
5828 /* Force the intermediate word_mode (or narrower) result into a
5829 register. This avoids attempting to create paradoxical SUBREGs
5830 of floating point modes below. */
5831 temp = force_reg (imode, temp);
5832
5833 /* If the bitpos is within the "result mode" lowpart, the operation
5834 can be implement with a single bitwise AND. Otherwise, we need
5835 a right shift and an AND. */
5836
5837 if (bitpos < GET_MODE_BITSIZE (rmode))
5838 {
5839 if (bitpos < HOST_BITS_PER_WIDE_INT)
5840 {
5841 hi = 0;
5842 lo = (HOST_WIDE_INT) 1 << bitpos;
5843 }
5844 else
5845 {
5846 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5847 lo = 0;
5848 }
5849
5850 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5851 temp = gen_lowpart (rmode, temp);
5852 temp = expand_binop (rmode, and_optab, temp,
5853 immed_double_const (lo, hi, rmode),
5854 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5855 }
5856 else
5857 {
5858 /* Perform a logical right shift to place the signbit in the least
5859 significant bit, then truncate the result to the desired mode
5860 and mask just this bit. */
5861 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5862 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5863 temp = gen_lowpart (rmode, temp);
5864 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5865 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5866 }
5867
5868 return temp;
5869 }
5870
5871 /* Expand fork or exec calls. TARGET is the desired target of the
5872 call. EXP is the call. FN is the
5873 identificator of the actual function. IGNORE is nonzero if the
5874 value is to be ignored. */
5875
5876 static rtx
5877 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5878 {
5879 tree id, decl;
5880 tree call;
5881
5882 /* If we are not profiling, just call the function. */
5883 if (!profile_arc_flag)
5884 return NULL_RTX;
5885
5886 /* Otherwise call the wrapper. This should be equivalent for the rest of
5887 compiler, so the code does not diverge, and the wrapper may run the
5888 code necessary for keeping the profiling sane. */
5889
5890 switch (DECL_FUNCTION_CODE (fn))
5891 {
5892 case BUILT_IN_FORK:
5893 id = get_identifier ("__gcov_fork");
5894 break;
5895
5896 case BUILT_IN_EXECL:
5897 id = get_identifier ("__gcov_execl");
5898 break;
5899
5900 case BUILT_IN_EXECV:
5901 id = get_identifier ("__gcov_execv");
5902 break;
5903
5904 case BUILT_IN_EXECLP:
5905 id = get_identifier ("__gcov_execlp");
5906 break;
5907
5908 case BUILT_IN_EXECLE:
5909 id = get_identifier ("__gcov_execle");
5910 break;
5911
5912 case BUILT_IN_EXECVP:
5913 id = get_identifier ("__gcov_execvp");
5914 break;
5915
5916 case BUILT_IN_EXECVE:
5917 id = get_identifier ("__gcov_execve");
5918 break;
5919
5920 default:
5921 gcc_unreachable ();
5922 }
5923
5924 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5925 DECL_EXTERNAL (decl) = 1;
5926 TREE_PUBLIC (decl) = 1;
5927 DECL_ARTIFICIAL (decl) = 1;
5928 TREE_NOTHROW (decl) = 1;
5929 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5930 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5931 call = rewrite_call_expr (exp, 0, decl, 0);
5932 return expand_call (call, target, ignore);
5933 }
5934
5935
5936 \f
5937 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5938 the pointer in these functions is void*, the tree optimizers may remove
5939 casts. The mode computed in expand_builtin isn't reliable either, due
5940 to __sync_bool_compare_and_swap.
5941
5942 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5943 group of builtins. This gives us log2 of the mode size. */
5944
5945 static inline enum machine_mode
5946 get_builtin_sync_mode (int fcode_diff)
5947 {
5948 /* The size is not negotiable, so ask not to get BLKmode in return
5949 if the target indicates that a smaller size would be better. */
5950 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5951 }
5952
5953 /* Expand the memory expression LOC and return the appropriate memory operand
5954 for the builtin_sync operations. */
5955
5956 static rtx
5957 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5958 {
5959 rtx addr, mem;
5960
5961 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5962
5963 /* Note that we explicitly do not want any alias information for this
5964 memory, so that we kill all other live memories. Otherwise we don't
5965 satisfy the full barrier semantics of the intrinsic. */
5966 mem = validize_mem (gen_rtx_MEM (mode, addr));
5967
5968 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5969 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5970 MEM_VOLATILE_P (mem) = 1;
5971
5972 return mem;
5973 }
5974
5975 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5976 EXP is the CALL_EXPR. CODE is the rtx code
5977 that corresponds to the arithmetic or logical operation from the name;
5978 an exception here is that NOT actually means NAND. TARGET is an optional
5979 place for us to store the results; AFTER is true if this is the
5980 fetch_and_xxx form. IGNORE is true if we don't actually care about
5981 the result of the operation at all. */
5982
5983 static rtx
5984 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5985 enum rtx_code code, bool after,
5986 rtx target, bool ignore)
5987 {
5988 rtx val, mem;
5989 enum machine_mode old_mode;
5990
5991 /* Expand the operands. */
5992 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5993
5994 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5995 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5996 of CONST_INTs, where we know the old_mode only from the call argument. */
5997 old_mode = GET_MODE (val);
5998 if (old_mode == VOIDmode)
5999 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6000 val = convert_modes (mode, old_mode, val, 1);
6001
6002 if (ignore)
6003 return expand_sync_operation (mem, val, code);
6004 else
6005 return expand_sync_fetch_operation (mem, val, code, after, target);
6006 }
6007
6008 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6009 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6010 true if this is the boolean form. TARGET is a place for us to store the
6011 results; this is NOT optional if IS_BOOL is true. */
6012
6013 static rtx
6014 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6015 bool is_bool, rtx target)
6016 {
6017 rtx old_val, new_val, mem;
6018 enum machine_mode old_mode;
6019
6020 /* Expand the operands. */
6021 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6022
6023
6024 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6025 mode, EXPAND_NORMAL);
6026 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6027 of CONST_INTs, where we know the old_mode only from the call argument. */
6028 old_mode = GET_MODE (old_val);
6029 if (old_mode == VOIDmode)
6030 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6031 old_val = convert_modes (mode, old_mode, old_val, 1);
6032
6033 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6034 mode, EXPAND_NORMAL);
6035 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6036 of CONST_INTs, where we know the old_mode only from the call argument. */
6037 old_mode = GET_MODE (new_val);
6038 if (old_mode == VOIDmode)
6039 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6040 new_val = convert_modes (mode, old_mode, new_val, 1);
6041
6042 if (is_bool)
6043 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6044 else
6045 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6046 }
6047
6048 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6049 general form is actually an atomic exchange, and some targets only
6050 support a reduced form with the second argument being a constant 1.
6051 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6052 the results. */
6053
6054 static rtx
6055 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6056 rtx target)
6057 {
6058 rtx val, mem;
6059 enum machine_mode old_mode;
6060
6061 /* Expand the operands. */
6062 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6063 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6064 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6065 of CONST_INTs, where we know the old_mode only from the call argument. */
6066 old_mode = GET_MODE (val);
6067 if (old_mode == VOIDmode)
6068 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6069 val = convert_modes (mode, old_mode, val, 1);
6070
6071 return expand_sync_lock_test_and_set (mem, val, target);
6072 }
6073
6074 /* Expand the __sync_synchronize intrinsic. */
6075
6076 static void
6077 expand_builtin_synchronize (void)
6078 {
6079 tree x;
6080
6081 #ifdef HAVE_memory_barrier
6082 if (HAVE_memory_barrier)
6083 {
6084 emit_insn (gen_memory_barrier ());
6085 return;
6086 }
6087 #endif
6088
6089 if (synchronize_libfunc != NULL_RTX)
6090 {
6091 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6092 return;
6093 }
6094
6095 /* If no explicit memory barrier instruction is available, create an
6096 empty asm stmt with a memory clobber. */
6097 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6098 tree_cons (NULL, build_string (6, "memory"), NULL));
6099 ASM_VOLATILE_P (x) = 1;
6100 expand_asm_expr (x);
6101 }
6102
6103 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6104
6105 static void
6106 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6107 {
6108 enum insn_code icode;
6109 rtx mem, insn;
6110 rtx val = const0_rtx;
6111
6112 /* Expand the operands. */
6113 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6114
6115 /* If there is an explicit operation in the md file, use it. */
6116 icode = sync_lock_release[mode];
6117 if (icode != CODE_FOR_nothing)
6118 {
6119 if (!insn_data[icode].operand[1].predicate (val, mode))
6120 val = force_reg (mode, val);
6121
6122 insn = GEN_FCN (icode) (mem, val);
6123 if (insn)
6124 {
6125 emit_insn (insn);
6126 return;
6127 }
6128 }
6129
6130 /* Otherwise we can implement this operation by emitting a barrier
6131 followed by a store of zero. */
6132 expand_builtin_synchronize ();
6133 emit_move_insn (mem, val);
6134 }
6135 \f
6136 /* Expand an expression EXP that calls a built-in function,
6137 with result going to TARGET if that's convenient
6138 (and in mode MODE if that's convenient).
6139 SUBTARGET may be used as the target for computing one of EXP's operands.
6140 IGNORE is nonzero if the value is to be ignored. */
6141
6142 rtx
6143 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6144 int ignore)
6145 {
6146 tree fndecl = get_callee_fndecl (exp);
6147 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6148 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6149
6150 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6151 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6152
6153 /* When not optimizing, generate calls to library functions for a certain
6154 set of builtins. */
6155 if (!optimize
6156 && !called_as_built_in (fndecl)
6157 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6158 && fcode != BUILT_IN_ALLOCA
6159 && fcode != BUILT_IN_FREE)
6160 return expand_call (exp, target, ignore);
6161
6162 /* The built-in function expanders test for target == const0_rtx
6163 to determine whether the function's result will be ignored. */
6164 if (ignore)
6165 target = const0_rtx;
6166
6167 /* If the result of a pure or const built-in function is ignored, and
6168 none of its arguments are volatile, we can avoid expanding the
6169 built-in call and just evaluate the arguments for side-effects. */
6170 if (target == const0_rtx
6171 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6172 {
6173 bool volatilep = false;
6174 tree arg;
6175 call_expr_arg_iterator iter;
6176
6177 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6178 if (TREE_THIS_VOLATILE (arg))
6179 {
6180 volatilep = true;
6181 break;
6182 }
6183
6184 if (! volatilep)
6185 {
6186 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6187 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6188 return const0_rtx;
6189 }
6190 }
6191
6192 switch (fcode)
6193 {
6194 CASE_FLT_FN (BUILT_IN_FABS):
6195 target = expand_builtin_fabs (exp, target, subtarget);
6196 if (target)
6197 return target;
6198 break;
6199
6200 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6201 target = expand_builtin_copysign (exp, target, subtarget);
6202 if (target)
6203 return target;
6204 break;
6205
6206 /* Just do a normal library call if we were unable to fold
6207 the values. */
6208 CASE_FLT_FN (BUILT_IN_CABS):
6209 break;
6210
6211 CASE_FLT_FN (BUILT_IN_EXP):
6212 CASE_FLT_FN (BUILT_IN_EXP10):
6213 CASE_FLT_FN (BUILT_IN_POW10):
6214 CASE_FLT_FN (BUILT_IN_EXP2):
6215 CASE_FLT_FN (BUILT_IN_EXPM1):
6216 CASE_FLT_FN (BUILT_IN_LOGB):
6217 CASE_FLT_FN (BUILT_IN_LOG):
6218 CASE_FLT_FN (BUILT_IN_LOG10):
6219 CASE_FLT_FN (BUILT_IN_LOG2):
6220 CASE_FLT_FN (BUILT_IN_LOG1P):
6221 CASE_FLT_FN (BUILT_IN_TAN):
6222 CASE_FLT_FN (BUILT_IN_ASIN):
6223 CASE_FLT_FN (BUILT_IN_ACOS):
6224 CASE_FLT_FN (BUILT_IN_ATAN):
6225 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6226 because of possible accuracy problems. */
6227 if (! flag_unsafe_math_optimizations)
6228 break;
6229 CASE_FLT_FN (BUILT_IN_SQRT):
6230 CASE_FLT_FN (BUILT_IN_FLOOR):
6231 CASE_FLT_FN (BUILT_IN_CEIL):
6232 CASE_FLT_FN (BUILT_IN_TRUNC):
6233 CASE_FLT_FN (BUILT_IN_ROUND):
6234 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6235 CASE_FLT_FN (BUILT_IN_RINT):
6236 target = expand_builtin_mathfn (exp, target, subtarget);
6237 if (target)
6238 return target;
6239 break;
6240
6241 CASE_FLT_FN (BUILT_IN_ILOGB):
6242 if (! flag_unsafe_math_optimizations)
6243 break;
6244 CASE_FLT_FN (BUILT_IN_ISINF):
6245 CASE_FLT_FN (BUILT_IN_FINITE):
6246 case BUILT_IN_ISFINITE:
6247 case BUILT_IN_ISNORMAL:
6248 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6249 if (target)
6250 return target;
6251 break;
6252
6253 CASE_FLT_FN (BUILT_IN_LCEIL):
6254 CASE_FLT_FN (BUILT_IN_LLCEIL):
6255 CASE_FLT_FN (BUILT_IN_LFLOOR):
6256 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6257 target = expand_builtin_int_roundingfn (exp, target);
6258 if (target)
6259 return target;
6260 break;
6261
6262 CASE_FLT_FN (BUILT_IN_LRINT):
6263 CASE_FLT_FN (BUILT_IN_LLRINT):
6264 CASE_FLT_FN (BUILT_IN_LROUND):
6265 CASE_FLT_FN (BUILT_IN_LLROUND):
6266 target = expand_builtin_int_roundingfn_2 (exp, target);
6267 if (target)
6268 return target;
6269 break;
6270
6271 CASE_FLT_FN (BUILT_IN_POW):
6272 target = expand_builtin_pow (exp, target, subtarget);
6273 if (target)
6274 return target;
6275 break;
6276
6277 CASE_FLT_FN (BUILT_IN_POWI):
6278 target = expand_builtin_powi (exp, target, subtarget);
6279 if (target)
6280 return target;
6281 break;
6282
6283 CASE_FLT_FN (BUILT_IN_ATAN2):
6284 CASE_FLT_FN (BUILT_IN_LDEXP):
6285 CASE_FLT_FN (BUILT_IN_SCALB):
6286 CASE_FLT_FN (BUILT_IN_SCALBN):
6287 CASE_FLT_FN (BUILT_IN_SCALBLN):
6288 if (! flag_unsafe_math_optimizations)
6289 break;
6290
6291 CASE_FLT_FN (BUILT_IN_FMOD):
6292 CASE_FLT_FN (BUILT_IN_REMAINDER):
6293 CASE_FLT_FN (BUILT_IN_DREM):
6294 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6295 if (target)
6296 return target;
6297 break;
6298
6299 CASE_FLT_FN (BUILT_IN_CEXPI):
6300 target = expand_builtin_cexpi (exp, target, subtarget);
6301 gcc_assert (target);
6302 return target;
6303
6304 CASE_FLT_FN (BUILT_IN_SIN):
6305 CASE_FLT_FN (BUILT_IN_COS):
6306 if (! flag_unsafe_math_optimizations)
6307 break;
6308 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6309 if (target)
6310 return target;
6311 break;
6312
6313 CASE_FLT_FN (BUILT_IN_SINCOS):
6314 if (! flag_unsafe_math_optimizations)
6315 break;
6316 target = expand_builtin_sincos (exp);
6317 if (target)
6318 return target;
6319 break;
6320
6321 case BUILT_IN_APPLY_ARGS:
6322 return expand_builtin_apply_args ();
6323
6324 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6325 FUNCTION with a copy of the parameters described by
6326 ARGUMENTS, and ARGSIZE. It returns a block of memory
6327 allocated on the stack into which is stored all the registers
6328 that might possibly be used for returning the result of a
6329 function. ARGUMENTS is the value returned by
6330 __builtin_apply_args. ARGSIZE is the number of bytes of
6331 arguments that must be copied. ??? How should this value be
6332 computed? We'll also need a safe worst case value for varargs
6333 functions. */
6334 case BUILT_IN_APPLY:
6335 if (!validate_arglist (exp, POINTER_TYPE,
6336 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6337 && !validate_arglist (exp, REFERENCE_TYPE,
6338 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6339 return const0_rtx;
6340 else
6341 {
6342 rtx ops[3];
6343
6344 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6345 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6346 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6347
6348 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6349 }
6350
6351 /* __builtin_return (RESULT) causes the function to return the
6352 value described by RESULT. RESULT is address of the block of
6353 memory returned by __builtin_apply. */
6354 case BUILT_IN_RETURN:
6355 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6356 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6357 return const0_rtx;
6358
6359 case BUILT_IN_SAVEREGS:
6360 return expand_builtin_saveregs ();
6361
6362 case BUILT_IN_ARGS_INFO:
6363 return expand_builtin_args_info (exp);
6364
6365 case BUILT_IN_VA_ARG_PACK:
6366 /* All valid uses of __builtin_va_arg_pack () are removed during
6367 inlining. */
6368 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6369 return const0_rtx;
6370
6371 case BUILT_IN_VA_ARG_PACK_LEN:
6372 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6373 inlining. */
6374 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6375 return const0_rtx;
6376
6377 /* Return the address of the first anonymous stack arg. */
6378 case BUILT_IN_NEXT_ARG:
6379 if (fold_builtin_next_arg (exp, false))
6380 return const0_rtx;
6381 return expand_builtin_next_arg ();
6382
6383 case BUILT_IN_CLEAR_CACHE:
6384 target = expand_builtin___clear_cache (exp);
6385 if (target)
6386 return target;
6387 break;
6388
6389 case BUILT_IN_CLASSIFY_TYPE:
6390 return expand_builtin_classify_type (exp);
6391
6392 case BUILT_IN_CONSTANT_P:
6393 return const0_rtx;
6394
6395 case BUILT_IN_FRAME_ADDRESS:
6396 case BUILT_IN_RETURN_ADDRESS:
6397 return expand_builtin_frame_address (fndecl, exp);
6398
6399 /* Returns the address of the area where the structure is returned.
6400 0 otherwise. */
6401 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6402 if (call_expr_nargs (exp) != 0
6403 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6404 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6405 return const0_rtx;
6406 else
6407 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6408
6409 case BUILT_IN_ALLOCA:
6410 target = expand_builtin_alloca (exp, target);
6411 if (target)
6412 return target;
6413 break;
6414
6415 case BUILT_IN_STACK_SAVE:
6416 return expand_stack_save ();
6417
6418 case BUILT_IN_STACK_RESTORE:
6419 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6420 return const0_rtx;
6421
6422 case BUILT_IN_BSWAP32:
6423 case BUILT_IN_BSWAP64:
6424 target = expand_builtin_bswap (exp, target, subtarget);
6425
6426 if (target)
6427 return target;
6428 break;
6429
6430 CASE_INT_FN (BUILT_IN_FFS):
6431 case BUILT_IN_FFSIMAX:
6432 target = expand_builtin_unop (target_mode, exp, target,
6433 subtarget, ffs_optab);
6434 if (target)
6435 return target;
6436 break;
6437
6438 CASE_INT_FN (BUILT_IN_CLZ):
6439 case BUILT_IN_CLZIMAX:
6440 target = expand_builtin_unop (target_mode, exp, target,
6441 subtarget, clz_optab);
6442 if (target)
6443 return target;
6444 break;
6445
6446 CASE_INT_FN (BUILT_IN_CTZ):
6447 case BUILT_IN_CTZIMAX:
6448 target = expand_builtin_unop (target_mode, exp, target,
6449 subtarget, ctz_optab);
6450 if (target)
6451 return target;
6452 break;
6453
6454 CASE_INT_FN (BUILT_IN_POPCOUNT):
6455 case BUILT_IN_POPCOUNTIMAX:
6456 target = expand_builtin_unop (target_mode, exp, target,
6457 subtarget, popcount_optab);
6458 if (target)
6459 return target;
6460 break;
6461
6462 CASE_INT_FN (BUILT_IN_PARITY):
6463 case BUILT_IN_PARITYIMAX:
6464 target = expand_builtin_unop (target_mode, exp, target,
6465 subtarget, parity_optab);
6466 if (target)
6467 return target;
6468 break;
6469
6470 case BUILT_IN_STRLEN:
6471 target = expand_builtin_strlen (exp, target, target_mode);
6472 if (target)
6473 return target;
6474 break;
6475
6476 case BUILT_IN_STRCPY:
6477 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_STRNCPY:
6483 target = expand_builtin_strncpy (exp, target, mode);
6484 if (target)
6485 return target;
6486 break;
6487
6488 case BUILT_IN_STPCPY:
6489 target = expand_builtin_stpcpy (exp, target, mode);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_STRCAT:
6495 target = expand_builtin_strcat (fndecl, exp, target, mode);
6496 if (target)
6497 return target;
6498 break;
6499
6500 case BUILT_IN_STRNCAT:
6501 target = expand_builtin_strncat (exp, target, mode);
6502 if (target)
6503 return target;
6504 break;
6505
6506 case BUILT_IN_STRSPN:
6507 target = expand_builtin_strspn (exp, target, mode);
6508 if (target)
6509 return target;
6510 break;
6511
6512 case BUILT_IN_STRCSPN:
6513 target = expand_builtin_strcspn (exp, target, mode);
6514 if (target)
6515 return target;
6516 break;
6517
6518 case BUILT_IN_STRSTR:
6519 target = expand_builtin_strstr (exp, target, mode);
6520 if (target)
6521 return target;
6522 break;
6523
6524 case BUILT_IN_STRPBRK:
6525 target = expand_builtin_strpbrk (exp, target, mode);
6526 if (target)
6527 return target;
6528 break;
6529
6530 case BUILT_IN_INDEX:
6531 case BUILT_IN_STRCHR:
6532 target = expand_builtin_strchr (exp, target, mode);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_RINDEX:
6538 case BUILT_IN_STRRCHR:
6539 target = expand_builtin_strrchr (exp, target, mode);
6540 if (target)
6541 return target;
6542 break;
6543
6544 case BUILT_IN_MEMCPY:
6545 target = expand_builtin_memcpy (exp, target, mode);
6546 if (target)
6547 return target;
6548 break;
6549
6550 case BUILT_IN_MEMPCPY:
6551 target = expand_builtin_mempcpy (exp, target, mode);
6552 if (target)
6553 return target;
6554 break;
6555
6556 case BUILT_IN_MEMMOVE:
6557 target = expand_builtin_memmove (exp, target, mode, ignore);
6558 if (target)
6559 return target;
6560 break;
6561
6562 case BUILT_IN_BCOPY:
6563 target = expand_builtin_bcopy (exp, ignore);
6564 if (target)
6565 return target;
6566 break;
6567
6568 case BUILT_IN_MEMSET:
6569 target = expand_builtin_memset (exp, target, mode);
6570 if (target)
6571 return target;
6572 break;
6573
6574 case BUILT_IN_BZERO:
6575 target = expand_builtin_bzero (exp);
6576 if (target)
6577 return target;
6578 break;
6579
6580 case BUILT_IN_STRCMP:
6581 target = expand_builtin_strcmp (exp, target, mode);
6582 if (target)
6583 return target;
6584 break;
6585
6586 case BUILT_IN_STRNCMP:
6587 target = expand_builtin_strncmp (exp, target, mode);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_MEMCHR:
6593 target = expand_builtin_memchr (exp, target, mode);
6594 if (target)
6595 return target;
6596 break;
6597
6598 case BUILT_IN_BCMP:
6599 case BUILT_IN_MEMCMP:
6600 target = expand_builtin_memcmp (exp, target, mode);
6601 if (target)
6602 return target;
6603 break;
6604
6605 case BUILT_IN_SETJMP:
6606 /* This should have been lowered to the builtins below. */
6607 gcc_unreachable ();
6608
6609 case BUILT_IN_SETJMP_SETUP:
6610 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6611 and the receiver label. */
6612 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6613 {
6614 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6615 VOIDmode, EXPAND_NORMAL);
6616 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6617 rtx label_r = label_rtx (label);
6618
6619 /* This is copied from the handling of non-local gotos. */
6620 expand_builtin_setjmp_setup (buf_addr, label_r);
6621 nonlocal_goto_handler_labels
6622 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6623 nonlocal_goto_handler_labels);
6624 /* ??? Do not let expand_label treat us as such since we would
6625 not want to be both on the list of non-local labels and on
6626 the list of forced labels. */
6627 FORCED_LABEL (label) = 0;
6628 return const0_rtx;
6629 }
6630 break;
6631
6632 case BUILT_IN_SETJMP_DISPATCHER:
6633 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6634 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6635 {
6636 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6637 rtx label_r = label_rtx (label);
6638
6639 /* Remove the dispatcher label from the list of non-local labels
6640 since the receiver labels have been added to it above. */
6641 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6642 return const0_rtx;
6643 }
6644 break;
6645
6646 case BUILT_IN_SETJMP_RECEIVER:
6647 /* __builtin_setjmp_receiver is passed the receiver label. */
6648 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6649 {
6650 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6651 rtx label_r = label_rtx (label);
6652
6653 expand_builtin_setjmp_receiver (label_r);
6654 return const0_rtx;
6655 }
6656 break;
6657
6658 /* __builtin_longjmp is passed a pointer to an array of five words.
6659 It's similar to the C library longjmp function but works with
6660 __builtin_setjmp above. */
6661 case BUILT_IN_LONGJMP:
6662 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6663 {
6664 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6665 VOIDmode, EXPAND_NORMAL);
6666 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6667
6668 if (value != const1_rtx)
6669 {
6670 error ("%<__builtin_longjmp%> second argument must be 1");
6671 return const0_rtx;
6672 }
6673
6674 expand_builtin_longjmp (buf_addr, value);
6675 return const0_rtx;
6676 }
6677 break;
6678
6679 case BUILT_IN_NONLOCAL_GOTO:
6680 target = expand_builtin_nonlocal_goto (exp);
6681 if (target)
6682 return target;
6683 break;
6684
6685 /* This updates the setjmp buffer that is its argument with the value
6686 of the current stack pointer. */
6687 case BUILT_IN_UPDATE_SETJMP_BUF:
6688 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6689 {
6690 rtx buf_addr
6691 = expand_normal (CALL_EXPR_ARG (exp, 0));
6692
6693 expand_builtin_update_setjmp_buf (buf_addr);
6694 return const0_rtx;
6695 }
6696 break;
6697
6698 case BUILT_IN_TRAP:
6699 expand_builtin_trap ();
6700 return const0_rtx;
6701
6702 case BUILT_IN_PRINTF:
6703 target = expand_builtin_printf (exp, target, mode, false);
6704 if (target)
6705 return target;
6706 break;
6707
6708 case BUILT_IN_PRINTF_UNLOCKED:
6709 target = expand_builtin_printf (exp, target, mode, true);
6710 if (target)
6711 return target;
6712 break;
6713
6714 case BUILT_IN_FPUTS:
6715 target = expand_builtin_fputs (exp, target, false);
6716 if (target)
6717 return target;
6718 break;
6719 case BUILT_IN_FPUTS_UNLOCKED:
6720 target = expand_builtin_fputs (exp, target, true);
6721 if (target)
6722 return target;
6723 break;
6724
6725 case BUILT_IN_FPRINTF:
6726 target = expand_builtin_fprintf (exp, target, mode, false);
6727 if (target)
6728 return target;
6729 break;
6730
6731 case BUILT_IN_FPRINTF_UNLOCKED:
6732 target = expand_builtin_fprintf (exp, target, mode, true);
6733 if (target)
6734 return target;
6735 break;
6736
6737 case BUILT_IN_SPRINTF:
6738 target = expand_builtin_sprintf (exp, target, mode);
6739 if (target)
6740 return target;
6741 break;
6742
6743 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6744 case BUILT_IN_SIGNBITD32:
6745 case BUILT_IN_SIGNBITD64:
6746 case BUILT_IN_SIGNBITD128:
6747 target = expand_builtin_signbit (exp, target);
6748 if (target)
6749 return target;
6750 break;
6751
6752 /* Various hooks for the DWARF 2 __throw routine. */
6753 case BUILT_IN_UNWIND_INIT:
6754 expand_builtin_unwind_init ();
6755 return const0_rtx;
6756 case BUILT_IN_DWARF_CFA:
6757 return virtual_cfa_rtx;
6758 #ifdef DWARF2_UNWIND_INFO
6759 case BUILT_IN_DWARF_SP_COLUMN:
6760 return expand_builtin_dwarf_sp_column ();
6761 case BUILT_IN_INIT_DWARF_REG_SIZES:
6762 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6763 return const0_rtx;
6764 #endif
6765 case BUILT_IN_FROB_RETURN_ADDR:
6766 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6767 case BUILT_IN_EXTRACT_RETURN_ADDR:
6768 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6769 case BUILT_IN_EH_RETURN:
6770 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6771 CALL_EXPR_ARG (exp, 1));
6772 return const0_rtx;
6773 #ifdef EH_RETURN_DATA_REGNO
6774 case BUILT_IN_EH_RETURN_DATA_REGNO:
6775 return expand_builtin_eh_return_data_regno (exp);
6776 #endif
6777 case BUILT_IN_EXTEND_POINTER:
6778 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6779
6780 case BUILT_IN_VA_START:
6781 return expand_builtin_va_start (exp);
6782 case BUILT_IN_VA_END:
6783 return expand_builtin_va_end (exp);
6784 case BUILT_IN_VA_COPY:
6785 return expand_builtin_va_copy (exp);
6786 case BUILT_IN_EXPECT:
6787 return expand_builtin_expect (exp, target);
6788 case BUILT_IN_PREFETCH:
6789 expand_builtin_prefetch (exp);
6790 return const0_rtx;
6791
6792 case BUILT_IN_PROFILE_FUNC_ENTER:
6793 return expand_builtin_profile_func (false);
6794 case BUILT_IN_PROFILE_FUNC_EXIT:
6795 return expand_builtin_profile_func (true);
6796
6797 case BUILT_IN_INIT_TRAMPOLINE:
6798 return expand_builtin_init_trampoline (exp);
6799 case BUILT_IN_ADJUST_TRAMPOLINE:
6800 return expand_builtin_adjust_trampoline (exp);
6801
6802 case BUILT_IN_FORK:
6803 case BUILT_IN_EXECL:
6804 case BUILT_IN_EXECV:
6805 case BUILT_IN_EXECLP:
6806 case BUILT_IN_EXECLE:
6807 case BUILT_IN_EXECVP:
6808 case BUILT_IN_EXECVE:
6809 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6810 if (target)
6811 return target;
6812 break;
6813
6814 case BUILT_IN_FETCH_AND_ADD_1:
6815 case BUILT_IN_FETCH_AND_ADD_2:
6816 case BUILT_IN_FETCH_AND_ADD_4:
6817 case BUILT_IN_FETCH_AND_ADD_8:
6818 case BUILT_IN_FETCH_AND_ADD_16:
6819 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6820 target = expand_builtin_sync_operation (mode, exp, PLUS,
6821 false, target, ignore);
6822 if (target)
6823 return target;
6824 break;
6825
6826 case BUILT_IN_FETCH_AND_SUB_1:
6827 case BUILT_IN_FETCH_AND_SUB_2:
6828 case BUILT_IN_FETCH_AND_SUB_4:
6829 case BUILT_IN_FETCH_AND_SUB_8:
6830 case BUILT_IN_FETCH_AND_SUB_16:
6831 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6832 target = expand_builtin_sync_operation (mode, exp, MINUS,
6833 false, target, ignore);
6834 if (target)
6835 return target;
6836 break;
6837
6838 case BUILT_IN_FETCH_AND_OR_1:
6839 case BUILT_IN_FETCH_AND_OR_2:
6840 case BUILT_IN_FETCH_AND_OR_4:
6841 case BUILT_IN_FETCH_AND_OR_8:
6842 case BUILT_IN_FETCH_AND_OR_16:
6843 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6844 target = expand_builtin_sync_operation (mode, exp, IOR,
6845 false, target, ignore);
6846 if (target)
6847 return target;
6848 break;
6849
6850 case BUILT_IN_FETCH_AND_AND_1:
6851 case BUILT_IN_FETCH_AND_AND_2:
6852 case BUILT_IN_FETCH_AND_AND_4:
6853 case BUILT_IN_FETCH_AND_AND_8:
6854 case BUILT_IN_FETCH_AND_AND_16:
6855 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6856 target = expand_builtin_sync_operation (mode, exp, AND,
6857 false, target, ignore);
6858 if (target)
6859 return target;
6860 break;
6861
6862 case BUILT_IN_FETCH_AND_XOR_1:
6863 case BUILT_IN_FETCH_AND_XOR_2:
6864 case BUILT_IN_FETCH_AND_XOR_4:
6865 case BUILT_IN_FETCH_AND_XOR_8:
6866 case BUILT_IN_FETCH_AND_XOR_16:
6867 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6868 target = expand_builtin_sync_operation (mode, exp, XOR,
6869 false, target, ignore);
6870 if (target)
6871 return target;
6872 break;
6873
6874 case BUILT_IN_FETCH_AND_NAND_1:
6875 case BUILT_IN_FETCH_AND_NAND_2:
6876 case BUILT_IN_FETCH_AND_NAND_4:
6877 case BUILT_IN_FETCH_AND_NAND_8:
6878 case BUILT_IN_FETCH_AND_NAND_16:
6879 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6880 target = expand_builtin_sync_operation (mode, exp, NOT,
6881 false, target, ignore);
6882 if (target)
6883 return target;
6884 break;
6885
6886 case BUILT_IN_ADD_AND_FETCH_1:
6887 case BUILT_IN_ADD_AND_FETCH_2:
6888 case BUILT_IN_ADD_AND_FETCH_4:
6889 case BUILT_IN_ADD_AND_FETCH_8:
6890 case BUILT_IN_ADD_AND_FETCH_16:
6891 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6892 target = expand_builtin_sync_operation (mode, exp, PLUS,
6893 true, target, ignore);
6894 if (target)
6895 return target;
6896 break;
6897
6898 case BUILT_IN_SUB_AND_FETCH_1:
6899 case BUILT_IN_SUB_AND_FETCH_2:
6900 case BUILT_IN_SUB_AND_FETCH_4:
6901 case BUILT_IN_SUB_AND_FETCH_8:
6902 case BUILT_IN_SUB_AND_FETCH_16:
6903 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6904 target = expand_builtin_sync_operation (mode, exp, MINUS,
6905 true, target, ignore);
6906 if (target)
6907 return target;
6908 break;
6909
6910 case BUILT_IN_OR_AND_FETCH_1:
6911 case BUILT_IN_OR_AND_FETCH_2:
6912 case BUILT_IN_OR_AND_FETCH_4:
6913 case BUILT_IN_OR_AND_FETCH_8:
6914 case BUILT_IN_OR_AND_FETCH_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6916 target = expand_builtin_sync_operation (mode, exp, IOR,
6917 true, target, ignore);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_AND_AND_FETCH_1:
6923 case BUILT_IN_AND_AND_FETCH_2:
6924 case BUILT_IN_AND_AND_FETCH_4:
6925 case BUILT_IN_AND_AND_FETCH_8:
6926 case BUILT_IN_AND_AND_FETCH_16:
6927 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6928 target = expand_builtin_sync_operation (mode, exp, AND,
6929 true, target, ignore);
6930 if (target)
6931 return target;
6932 break;
6933
6934 case BUILT_IN_XOR_AND_FETCH_1:
6935 case BUILT_IN_XOR_AND_FETCH_2:
6936 case BUILT_IN_XOR_AND_FETCH_4:
6937 case BUILT_IN_XOR_AND_FETCH_8:
6938 case BUILT_IN_XOR_AND_FETCH_16:
6939 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6940 target = expand_builtin_sync_operation (mode, exp, XOR,
6941 true, target, ignore);
6942 if (target)
6943 return target;
6944 break;
6945
6946 case BUILT_IN_NAND_AND_FETCH_1:
6947 case BUILT_IN_NAND_AND_FETCH_2:
6948 case BUILT_IN_NAND_AND_FETCH_4:
6949 case BUILT_IN_NAND_AND_FETCH_8:
6950 case BUILT_IN_NAND_AND_FETCH_16:
6951 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6952 target = expand_builtin_sync_operation (mode, exp, NOT,
6953 true, target, ignore);
6954 if (target)
6955 return target;
6956 break;
6957
6958 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6959 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6960 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6961 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6962 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6963 if (mode == VOIDmode)
6964 mode = TYPE_MODE (boolean_type_node);
6965 if (!target || !register_operand (target, mode))
6966 target = gen_reg_rtx (mode);
6967
6968 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6969 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6970 if (target)
6971 return target;
6972 break;
6973
6974 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6975 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6976 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6977 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6978 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6980 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6981 if (target)
6982 return target;
6983 break;
6984
6985 case BUILT_IN_LOCK_TEST_AND_SET_1:
6986 case BUILT_IN_LOCK_TEST_AND_SET_2:
6987 case BUILT_IN_LOCK_TEST_AND_SET_4:
6988 case BUILT_IN_LOCK_TEST_AND_SET_8:
6989 case BUILT_IN_LOCK_TEST_AND_SET_16:
6990 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6991 target = expand_builtin_lock_test_and_set (mode, exp, target);
6992 if (target)
6993 return target;
6994 break;
6995
6996 case BUILT_IN_LOCK_RELEASE_1:
6997 case BUILT_IN_LOCK_RELEASE_2:
6998 case BUILT_IN_LOCK_RELEASE_4:
6999 case BUILT_IN_LOCK_RELEASE_8:
7000 case BUILT_IN_LOCK_RELEASE_16:
7001 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7002 expand_builtin_lock_release (mode, exp);
7003 return const0_rtx;
7004
7005 case BUILT_IN_SYNCHRONIZE:
7006 expand_builtin_synchronize ();
7007 return const0_rtx;
7008
7009 case BUILT_IN_OBJECT_SIZE:
7010 return expand_builtin_object_size (exp);
7011
7012 case BUILT_IN_MEMCPY_CHK:
7013 case BUILT_IN_MEMPCPY_CHK:
7014 case BUILT_IN_MEMMOVE_CHK:
7015 case BUILT_IN_MEMSET_CHK:
7016 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7017 if (target)
7018 return target;
7019 break;
7020
7021 case BUILT_IN_STRCPY_CHK:
7022 case BUILT_IN_STPCPY_CHK:
7023 case BUILT_IN_STRNCPY_CHK:
7024 case BUILT_IN_STRCAT_CHK:
7025 case BUILT_IN_STRNCAT_CHK:
7026 case BUILT_IN_SNPRINTF_CHK:
7027 case BUILT_IN_VSNPRINTF_CHK:
7028 maybe_emit_chk_warning (exp, fcode);
7029 break;
7030
7031 case BUILT_IN_SPRINTF_CHK:
7032 case BUILT_IN_VSPRINTF_CHK:
7033 maybe_emit_sprintf_chk_warning (exp, fcode);
7034 break;
7035
7036 case BUILT_IN_FREE:
7037 maybe_emit_free_warning (exp);
7038 break;
7039
7040 default: /* just do library call, if unknown builtin */
7041 break;
7042 }
7043
7044 /* The switch statement above can drop through to cause the function
7045 to be called normally. */
7046 return expand_call (exp, target, ignore);
7047 }
7048
7049 /* Determine whether a tree node represents a call to a built-in
7050 function. If the tree T is a call to a built-in function with
7051 the right number of arguments of the appropriate types, return
7052 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7053 Otherwise the return value is END_BUILTINS. */
7054
7055 enum built_in_function
7056 builtin_mathfn_code (const_tree t)
7057 {
7058 const_tree fndecl, arg, parmlist;
7059 const_tree argtype, parmtype;
7060 const_call_expr_arg_iterator iter;
7061
7062 if (TREE_CODE (t) != CALL_EXPR
7063 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7064 return END_BUILTINS;
7065
7066 fndecl = get_callee_fndecl (t);
7067 if (fndecl == NULL_TREE
7068 || TREE_CODE (fndecl) != FUNCTION_DECL
7069 || ! DECL_BUILT_IN (fndecl)
7070 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7071 return END_BUILTINS;
7072
7073 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7074 init_const_call_expr_arg_iterator (t, &iter);
7075 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7076 {
7077 /* If a function doesn't take a variable number of arguments,
7078 the last element in the list will have type `void'. */
7079 parmtype = TREE_VALUE (parmlist);
7080 if (VOID_TYPE_P (parmtype))
7081 {
7082 if (more_const_call_expr_args_p (&iter))
7083 return END_BUILTINS;
7084 return DECL_FUNCTION_CODE (fndecl);
7085 }
7086
7087 if (! more_const_call_expr_args_p (&iter))
7088 return END_BUILTINS;
7089
7090 arg = next_const_call_expr_arg (&iter);
7091 argtype = TREE_TYPE (arg);
7092
7093 if (SCALAR_FLOAT_TYPE_P (parmtype))
7094 {
7095 if (! SCALAR_FLOAT_TYPE_P (argtype))
7096 return END_BUILTINS;
7097 }
7098 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7099 {
7100 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7101 return END_BUILTINS;
7102 }
7103 else if (POINTER_TYPE_P (parmtype))
7104 {
7105 if (! POINTER_TYPE_P (argtype))
7106 return END_BUILTINS;
7107 }
7108 else if (INTEGRAL_TYPE_P (parmtype))
7109 {
7110 if (! INTEGRAL_TYPE_P (argtype))
7111 return END_BUILTINS;
7112 }
7113 else
7114 return END_BUILTINS;
7115 }
7116
7117 /* Variable-length argument list. */
7118 return DECL_FUNCTION_CODE (fndecl);
7119 }
7120
7121 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7122 evaluate to a constant. */
7123
7124 static tree
7125 fold_builtin_constant_p (tree arg)
7126 {
7127 /* We return 1 for a numeric type that's known to be a constant
7128 value at compile-time or for an aggregate type that's a
7129 literal constant. */
7130 STRIP_NOPS (arg);
7131
7132 /* If we know this is a constant, emit the constant of one. */
7133 if (CONSTANT_CLASS_P (arg)
7134 || (TREE_CODE (arg) == CONSTRUCTOR
7135 && TREE_CONSTANT (arg)))
7136 return integer_one_node;
7137 if (TREE_CODE (arg) == ADDR_EXPR)
7138 {
7139 tree op = TREE_OPERAND (arg, 0);
7140 if (TREE_CODE (op) == STRING_CST
7141 || (TREE_CODE (op) == ARRAY_REF
7142 && integer_zerop (TREE_OPERAND (op, 1))
7143 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7144 return integer_one_node;
7145 }
7146
7147 /* If this expression has side effects, show we don't know it to be a
7148 constant. Likewise if it's a pointer or aggregate type since in
7149 those case we only want literals, since those are only optimized
7150 when generating RTL, not later.
7151 And finally, if we are compiling an initializer, not code, we
7152 need to return a definite result now; there's not going to be any
7153 more optimization done. */
7154 if (TREE_SIDE_EFFECTS (arg)
7155 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7156 || POINTER_TYPE_P (TREE_TYPE (arg))
7157 || cfun == 0
7158 || folding_initializer)
7159 return integer_zero_node;
7160
7161 return NULL_TREE;
7162 }
7163
7164 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7165 return it as a truthvalue. */
7166
7167 static tree
7168 build_builtin_expect_predicate (tree pred, tree expected)
7169 {
7170 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7171
7172 fn = built_in_decls[BUILT_IN_EXPECT];
7173 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7174 ret_type = TREE_TYPE (TREE_TYPE (fn));
7175 pred_type = TREE_VALUE (arg_types);
7176 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7177
7178 pred = fold_convert (pred_type, pred);
7179 expected = fold_convert (expected_type, expected);
7180 call_expr = build_call_expr (fn, 2, pred, expected);
7181
7182 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7183 build_int_cst (ret_type, 0));
7184 }
7185
7186 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7187 NULL_TREE if no simplification is possible. */
7188
7189 static tree
7190 fold_builtin_expect (tree arg0, tree arg1)
7191 {
7192 tree inner, fndecl;
7193 enum tree_code code;
7194
7195 /* If this is a builtin_expect within a builtin_expect keep the
7196 inner one. See through a comparison against a constant. It
7197 might have been added to create a thruthvalue. */
7198 inner = arg0;
7199 if (COMPARISON_CLASS_P (inner)
7200 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7201 inner = TREE_OPERAND (inner, 0);
7202
7203 if (TREE_CODE (inner) == CALL_EXPR
7204 && (fndecl = get_callee_fndecl (inner))
7205 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7206 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7207 return arg0;
7208
7209 /* Distribute the expected value over short-circuiting operators.
7210 See through the cast from truthvalue_type_node to long. */
7211 inner = arg0;
7212 while (TREE_CODE (inner) == NOP_EXPR
7213 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7214 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7215 inner = TREE_OPERAND (inner, 0);
7216
7217 code = TREE_CODE (inner);
7218 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7219 {
7220 tree op0 = TREE_OPERAND (inner, 0);
7221 tree op1 = TREE_OPERAND (inner, 1);
7222
7223 op0 = build_builtin_expect_predicate (op0, arg1);
7224 op1 = build_builtin_expect_predicate (op1, arg1);
7225 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7226
7227 return fold_convert (TREE_TYPE (arg0), inner);
7228 }
7229
7230 /* If the argument isn't invariant then there's nothing else we can do. */
7231 if (!TREE_CONSTANT (arg0))
7232 return NULL_TREE;
7233
7234 /* If we expect that a comparison against the argument will fold to
7235 a constant return the constant. In practice, this means a true
7236 constant or the address of a non-weak symbol. */
7237 inner = arg0;
7238 STRIP_NOPS (inner);
7239 if (TREE_CODE (inner) == ADDR_EXPR)
7240 {
7241 do
7242 {
7243 inner = TREE_OPERAND (inner, 0);
7244 }
7245 while (TREE_CODE (inner) == COMPONENT_REF
7246 || TREE_CODE (inner) == ARRAY_REF);
7247 if (DECL_P (inner) && DECL_WEAK (inner))
7248 return NULL_TREE;
7249 }
7250
7251 /* Otherwise, ARG0 already has the proper type for the return value. */
7252 return arg0;
7253 }
7254
7255 /* Fold a call to __builtin_classify_type with argument ARG. */
7256
7257 static tree
7258 fold_builtin_classify_type (tree arg)
7259 {
7260 if (arg == 0)
7261 return build_int_cst (NULL_TREE, no_type_class);
7262
7263 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7264 }
7265
7266 /* Fold a call to __builtin_strlen with argument ARG. */
7267
7268 static tree
7269 fold_builtin_strlen (tree arg)
7270 {
7271 if (!validate_arg (arg, POINTER_TYPE))
7272 return NULL_TREE;
7273 else
7274 {
7275 tree len = c_strlen (arg, 0);
7276
7277 if (len)
7278 {
7279 /* Convert from the internal "sizetype" type to "size_t". */
7280 if (size_type_node)
7281 len = fold_convert (size_type_node, len);
7282 return len;
7283 }
7284
7285 return NULL_TREE;
7286 }
7287 }
7288
7289 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7290
7291 static tree
7292 fold_builtin_inf (tree type, int warn)
7293 {
7294 REAL_VALUE_TYPE real;
7295
7296 /* __builtin_inff is intended to be usable to define INFINITY on all
7297 targets. If an infinity is not available, INFINITY expands "to a
7298 positive constant of type float that overflows at translation
7299 time", footnote "In this case, using INFINITY will violate the
7300 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7301 Thus we pedwarn to ensure this constraint violation is
7302 diagnosed. */
7303 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7304 pedwarn (input_location, 0, "target format does not support infinity");
7305
7306 real_inf (&real);
7307 return build_real (type, real);
7308 }
7309
7310 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7311
7312 static tree
7313 fold_builtin_nan (tree arg, tree type, int quiet)
7314 {
7315 REAL_VALUE_TYPE real;
7316 const char *str;
7317
7318 if (!validate_arg (arg, POINTER_TYPE))
7319 return NULL_TREE;
7320 str = c_getstr (arg);
7321 if (!str)
7322 return NULL_TREE;
7323
7324 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7325 return NULL_TREE;
7326
7327 return build_real (type, real);
7328 }
7329
7330 /* Return true if the floating point expression T has an integer value.
7331 We also allow +Inf, -Inf and NaN to be considered integer values. */
7332
7333 static bool
7334 integer_valued_real_p (tree t)
7335 {
7336 switch (TREE_CODE (t))
7337 {
7338 case FLOAT_EXPR:
7339 return true;
7340
7341 case ABS_EXPR:
7342 case SAVE_EXPR:
7343 return integer_valued_real_p (TREE_OPERAND (t, 0));
7344
7345 case COMPOUND_EXPR:
7346 case MODIFY_EXPR:
7347 case BIND_EXPR:
7348 return integer_valued_real_p (TREE_OPERAND (t, 1));
7349
7350 case PLUS_EXPR:
7351 case MINUS_EXPR:
7352 case MULT_EXPR:
7353 case MIN_EXPR:
7354 case MAX_EXPR:
7355 return integer_valued_real_p (TREE_OPERAND (t, 0))
7356 && integer_valued_real_p (TREE_OPERAND (t, 1));
7357
7358 case COND_EXPR:
7359 return integer_valued_real_p (TREE_OPERAND (t, 1))
7360 && integer_valued_real_p (TREE_OPERAND (t, 2));
7361
7362 case REAL_CST:
7363 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7364
7365 case NOP_EXPR:
7366 {
7367 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7368 if (TREE_CODE (type) == INTEGER_TYPE)
7369 return true;
7370 if (TREE_CODE (type) == REAL_TYPE)
7371 return integer_valued_real_p (TREE_OPERAND (t, 0));
7372 break;
7373 }
7374
7375 case CALL_EXPR:
7376 switch (builtin_mathfn_code (t))
7377 {
7378 CASE_FLT_FN (BUILT_IN_CEIL):
7379 CASE_FLT_FN (BUILT_IN_FLOOR):
7380 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7381 CASE_FLT_FN (BUILT_IN_RINT):
7382 CASE_FLT_FN (BUILT_IN_ROUND):
7383 CASE_FLT_FN (BUILT_IN_TRUNC):
7384 return true;
7385
7386 CASE_FLT_FN (BUILT_IN_FMIN):
7387 CASE_FLT_FN (BUILT_IN_FMAX):
7388 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7389 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7390
7391 default:
7392 break;
7393 }
7394 break;
7395
7396 default:
7397 break;
7398 }
7399 return false;
7400 }
7401
7402 /* FNDECL is assumed to be a builtin where truncation can be propagated
7403 across (for instance floor((double)f) == (double)floorf (f).
7404 Do the transformation for a call with argument ARG. */
7405
7406 static tree
7407 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7408 {
7409 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7410
7411 if (!validate_arg (arg, REAL_TYPE))
7412 return NULL_TREE;
7413
7414 /* Integer rounding functions are idempotent. */
7415 if (fcode == builtin_mathfn_code (arg))
7416 return arg;
7417
7418 /* If argument is already integer valued, and we don't need to worry
7419 about setting errno, there's no need to perform rounding. */
7420 if (! flag_errno_math && integer_valued_real_p (arg))
7421 return arg;
7422
7423 if (optimize)
7424 {
7425 tree arg0 = strip_float_extensions (arg);
7426 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7427 tree newtype = TREE_TYPE (arg0);
7428 tree decl;
7429
7430 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7431 && (decl = mathfn_built_in (newtype, fcode)))
7432 return fold_convert (ftype,
7433 build_call_expr (decl, 1,
7434 fold_convert (newtype, arg0)));
7435 }
7436 return NULL_TREE;
7437 }
7438
7439 /* FNDECL is assumed to be builtin which can narrow the FP type of
7440 the argument, for instance lround((double)f) -> lroundf (f).
7441 Do the transformation for a call with argument ARG. */
7442
7443 static tree
7444 fold_fixed_mathfn (tree fndecl, tree arg)
7445 {
7446 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7447
7448 if (!validate_arg (arg, REAL_TYPE))
7449 return NULL_TREE;
7450
7451 /* If argument is already integer valued, and we don't need to worry
7452 about setting errno, there's no need to perform rounding. */
7453 if (! flag_errno_math && integer_valued_real_p (arg))
7454 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7455
7456 if (optimize)
7457 {
7458 tree ftype = TREE_TYPE (arg);
7459 tree arg0 = strip_float_extensions (arg);
7460 tree newtype = TREE_TYPE (arg0);
7461 tree decl;
7462
7463 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7464 && (decl = mathfn_built_in (newtype, fcode)))
7465 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7466 }
7467
7468 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7469 sizeof (long long) == sizeof (long). */
7470 if (TYPE_PRECISION (long_long_integer_type_node)
7471 == TYPE_PRECISION (long_integer_type_node))
7472 {
7473 tree newfn = NULL_TREE;
7474 switch (fcode)
7475 {
7476 CASE_FLT_FN (BUILT_IN_LLCEIL):
7477 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7478 break;
7479
7480 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7481 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7482 break;
7483
7484 CASE_FLT_FN (BUILT_IN_LLROUND):
7485 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7486 break;
7487
7488 CASE_FLT_FN (BUILT_IN_LLRINT):
7489 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7490 break;
7491
7492 default:
7493 break;
7494 }
7495
7496 if (newfn)
7497 {
7498 tree newcall = build_call_expr(newfn, 1, arg);
7499 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7500 }
7501 }
7502
7503 return NULL_TREE;
7504 }
7505
7506 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7507 return type. Return NULL_TREE if no simplification can be made. */
7508
7509 static tree
7510 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7511 {
7512 tree res;
7513
7514 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7515 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7516 return NULL_TREE;
7517
7518 /* Calculate the result when the argument is a constant. */
7519 if (TREE_CODE (arg) == COMPLEX_CST
7520 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7521 type, mpfr_hypot)))
7522 return res;
7523
7524 if (TREE_CODE (arg) == COMPLEX_EXPR)
7525 {
7526 tree real = TREE_OPERAND (arg, 0);
7527 tree imag = TREE_OPERAND (arg, 1);
7528
7529 /* If either part is zero, cabs is fabs of the other. */
7530 if (real_zerop (real))
7531 return fold_build1 (ABS_EXPR, type, imag);
7532 if (real_zerop (imag))
7533 return fold_build1 (ABS_EXPR, type, real);
7534
7535 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7536 if (flag_unsafe_math_optimizations
7537 && operand_equal_p (real, imag, OEP_PURE_SAME))
7538 {
7539 const REAL_VALUE_TYPE sqrt2_trunc
7540 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7541 STRIP_NOPS (real);
7542 return fold_build2 (MULT_EXPR, type,
7543 fold_build1 (ABS_EXPR, type, real),
7544 build_real (type, sqrt2_trunc));
7545 }
7546 }
7547
7548 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7549 if (TREE_CODE (arg) == NEGATE_EXPR
7550 || TREE_CODE (arg) == CONJ_EXPR)
7551 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7552
7553 /* Don't do this when optimizing for size. */
7554 if (flag_unsafe_math_optimizations
7555 && optimize && optimize_function_for_speed_p (cfun))
7556 {
7557 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7558
7559 if (sqrtfn != NULL_TREE)
7560 {
7561 tree rpart, ipart, result;
7562
7563 arg = builtin_save_expr (arg);
7564
7565 rpart = fold_build1 (REALPART_EXPR, type, arg);
7566 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7567
7568 rpart = builtin_save_expr (rpart);
7569 ipart = builtin_save_expr (ipart);
7570
7571 result = fold_build2 (PLUS_EXPR, type,
7572 fold_build2 (MULT_EXPR, type,
7573 rpart, rpart),
7574 fold_build2 (MULT_EXPR, type,
7575 ipart, ipart));
7576
7577 return build_call_expr (sqrtfn, 1, result);
7578 }
7579 }
7580
7581 return NULL_TREE;
7582 }
7583
7584 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7585 Return NULL_TREE if no simplification can be made. */
7586
7587 static tree
7588 fold_builtin_sqrt (tree arg, tree type)
7589 {
7590
7591 enum built_in_function fcode;
7592 tree res;
7593
7594 if (!validate_arg (arg, REAL_TYPE))
7595 return NULL_TREE;
7596
7597 /* Calculate the result when the argument is a constant. */
7598 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7599 return res;
7600
7601 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7602 fcode = builtin_mathfn_code (arg);
7603 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7604 {
7605 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7606 arg = fold_build2 (MULT_EXPR, type,
7607 CALL_EXPR_ARG (arg, 0),
7608 build_real (type, dconsthalf));
7609 return build_call_expr (expfn, 1, arg);
7610 }
7611
7612 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7613 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7614 {
7615 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7616
7617 if (powfn)
7618 {
7619 tree arg0 = CALL_EXPR_ARG (arg, 0);
7620 tree tree_root;
7621 /* The inner root was either sqrt or cbrt. */
7622 REAL_VALUE_TYPE dconstroot =
7623 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconst_third ();
7624
7625 /* Adjust for the outer root. */
7626 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7627 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7628 tree_root = build_real (type, dconstroot);
7629 return build_call_expr (powfn, 2, arg0, tree_root);
7630 }
7631 }
7632
7633 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7634 if (flag_unsafe_math_optimizations
7635 && (fcode == BUILT_IN_POW
7636 || fcode == BUILT_IN_POWF
7637 || fcode == BUILT_IN_POWL))
7638 {
7639 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7640 tree arg0 = CALL_EXPR_ARG (arg, 0);
7641 tree arg1 = CALL_EXPR_ARG (arg, 1);
7642 tree narg1;
7643 if (!tree_expr_nonnegative_p (arg0))
7644 arg0 = build1 (ABS_EXPR, type, arg0);
7645 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7646 build_real (type, dconsthalf));
7647 return build_call_expr (powfn, 2, arg0, narg1);
7648 }
7649
7650 return NULL_TREE;
7651 }
7652
7653 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7654 Return NULL_TREE if no simplification can be made. */
7655
7656 static tree
7657 fold_builtin_cbrt (tree arg, tree type)
7658 {
7659 const enum built_in_function fcode = builtin_mathfn_code (arg);
7660 tree res;
7661
7662 if (!validate_arg (arg, REAL_TYPE))
7663 return NULL_TREE;
7664
7665 /* Calculate the result when the argument is a constant. */
7666 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7667 return res;
7668
7669 if (flag_unsafe_math_optimizations)
7670 {
7671 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7672 if (BUILTIN_EXPONENT_P (fcode))
7673 {
7674 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7675 const REAL_VALUE_TYPE third_trunc =
7676 real_value_truncate (TYPE_MODE (type), dconst_third ());
7677 arg = fold_build2 (MULT_EXPR, type,
7678 CALL_EXPR_ARG (arg, 0),
7679 build_real (type, third_trunc));
7680 return build_call_expr (expfn, 1, arg);
7681 }
7682
7683 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7684 if (BUILTIN_SQRT_P (fcode))
7685 {
7686 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7687
7688 if (powfn)
7689 {
7690 tree arg0 = CALL_EXPR_ARG (arg, 0);
7691 tree tree_root;
7692 REAL_VALUE_TYPE dconstroot = dconst_third ();
7693
7694 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7695 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7696 tree_root = build_real (type, dconstroot);
7697 return build_call_expr (powfn, 2, arg0, tree_root);
7698 }
7699 }
7700
7701 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7702 if (BUILTIN_CBRT_P (fcode))
7703 {
7704 tree arg0 = CALL_EXPR_ARG (arg, 0);
7705 if (tree_expr_nonnegative_p (arg0))
7706 {
7707 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7708
7709 if (powfn)
7710 {
7711 tree tree_root;
7712 REAL_VALUE_TYPE dconstroot;
7713
7714 real_arithmetic (&dconstroot, MULT_EXPR,
7715 dconst_third_ptr (), dconst_third_ptr ());
7716 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7717 tree_root = build_real (type, dconstroot);
7718 return build_call_expr (powfn, 2, arg0, tree_root);
7719 }
7720 }
7721 }
7722
7723 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7724 if (fcode == BUILT_IN_POW
7725 || fcode == BUILT_IN_POWF
7726 || fcode == BUILT_IN_POWL)
7727 {
7728 tree arg00 = CALL_EXPR_ARG (arg, 0);
7729 tree arg01 = CALL_EXPR_ARG (arg, 1);
7730 if (tree_expr_nonnegative_p (arg00))
7731 {
7732 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7733 const REAL_VALUE_TYPE dconstroot
7734 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7735 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7736 build_real (type, dconstroot));
7737 return build_call_expr (powfn, 2, arg00, narg01);
7738 }
7739 }
7740 }
7741 return NULL_TREE;
7742 }
7743
7744 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7745 TYPE is the type of the return value. Return NULL_TREE if no
7746 simplification can be made. */
7747
7748 static tree
7749 fold_builtin_cos (tree arg, tree type, tree fndecl)
7750 {
7751 tree res, narg;
7752
7753 if (!validate_arg (arg, REAL_TYPE))
7754 return NULL_TREE;
7755
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7758 return res;
7759
7760 /* Optimize cos(-x) into cos (x). */
7761 if ((narg = fold_strip_sign_ops (arg)))
7762 return build_call_expr (fndecl, 1, narg);
7763
7764 return NULL_TREE;
7765 }
7766
7767 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7768 Return NULL_TREE if no simplification can be made. */
7769
7770 static tree
7771 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7772 {
7773 if (validate_arg (arg, REAL_TYPE))
7774 {
7775 tree res, narg;
7776
7777 /* Calculate the result when the argument is a constant. */
7778 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7779 return res;
7780
7781 /* Optimize cosh(-x) into cosh (x). */
7782 if ((narg = fold_strip_sign_ops (arg)))
7783 return build_call_expr (fndecl, 1, narg);
7784 }
7785
7786 return NULL_TREE;
7787 }
7788
7789 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7790 Return NULL_TREE if no simplification can be made. */
7791
7792 static tree
7793 fold_builtin_tan (tree arg, tree type)
7794 {
7795 enum built_in_function fcode;
7796 tree res;
7797
7798 if (!validate_arg (arg, REAL_TYPE))
7799 return NULL_TREE;
7800
7801 /* Calculate the result when the argument is a constant. */
7802 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7803 return res;
7804
7805 /* Optimize tan(atan(x)) = x. */
7806 fcode = builtin_mathfn_code (arg);
7807 if (flag_unsafe_math_optimizations
7808 && (fcode == BUILT_IN_ATAN
7809 || fcode == BUILT_IN_ATANF
7810 || fcode == BUILT_IN_ATANL))
7811 return CALL_EXPR_ARG (arg, 0);
7812
7813 return NULL_TREE;
7814 }
7815
7816 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7817 NULL_TREE if no simplification can be made. */
7818
7819 static tree
7820 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7821 {
7822 tree type;
7823 tree res, fn, call;
7824
7825 if (!validate_arg (arg0, REAL_TYPE)
7826 || !validate_arg (arg1, POINTER_TYPE)
7827 || !validate_arg (arg2, POINTER_TYPE))
7828 return NULL_TREE;
7829
7830 type = TREE_TYPE (arg0);
7831
7832 /* Calculate the result when the argument is a constant. */
7833 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7834 return res;
7835
7836 /* Canonicalize sincos to cexpi. */
7837 if (!TARGET_C99_FUNCTIONS)
7838 return NULL_TREE;
7839 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7840 if (!fn)
7841 return NULL_TREE;
7842
7843 call = build_call_expr (fn, 1, arg0);
7844 call = builtin_save_expr (call);
7845
7846 return build2 (COMPOUND_EXPR, type,
7847 build2 (MODIFY_EXPR, void_type_node,
7848 build_fold_indirect_ref (arg1),
7849 build1 (IMAGPART_EXPR, type, call)),
7850 build2 (MODIFY_EXPR, void_type_node,
7851 build_fold_indirect_ref (arg2),
7852 build1 (REALPART_EXPR, type, call)));
7853 }
7854
7855 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7856 NULL_TREE if no simplification can be made. */
7857
7858 static tree
7859 fold_builtin_cexp (tree arg0, tree type)
7860 {
7861 tree rtype;
7862 tree realp, imagp, ifn;
7863
7864 if (!validate_arg (arg0, COMPLEX_TYPE))
7865 return NULL_TREE;
7866
7867 rtype = TREE_TYPE (TREE_TYPE (arg0));
7868
7869 /* In case we can figure out the real part of arg0 and it is constant zero
7870 fold to cexpi. */
7871 if (!TARGET_C99_FUNCTIONS)
7872 return NULL_TREE;
7873 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7874 if (!ifn)
7875 return NULL_TREE;
7876
7877 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7878 && real_zerop (realp))
7879 {
7880 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7881 return build_call_expr (ifn, 1, narg);
7882 }
7883
7884 /* In case we can easily decompose real and imaginary parts split cexp
7885 to exp (r) * cexpi (i). */
7886 if (flag_unsafe_math_optimizations
7887 && realp)
7888 {
7889 tree rfn, rcall, icall;
7890
7891 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7892 if (!rfn)
7893 return NULL_TREE;
7894
7895 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7896 if (!imagp)
7897 return NULL_TREE;
7898
7899 icall = build_call_expr (ifn, 1, imagp);
7900 icall = builtin_save_expr (icall);
7901 rcall = build_call_expr (rfn, 1, realp);
7902 rcall = builtin_save_expr (rcall);
7903 return fold_build2 (COMPLEX_EXPR, type,
7904 fold_build2 (MULT_EXPR, rtype,
7905 rcall,
7906 fold_build1 (REALPART_EXPR, rtype, icall)),
7907 fold_build2 (MULT_EXPR, rtype,
7908 rcall,
7909 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7910 }
7911
7912 return NULL_TREE;
7913 }
7914
7915 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7916 Return NULL_TREE if no simplification can be made. */
7917
7918 static tree
7919 fold_builtin_trunc (tree fndecl, tree arg)
7920 {
7921 if (!validate_arg (arg, REAL_TYPE))
7922 return NULL_TREE;
7923
7924 /* Optimize trunc of constant value. */
7925 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7926 {
7927 REAL_VALUE_TYPE r, x;
7928 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7929
7930 x = TREE_REAL_CST (arg);
7931 real_trunc (&r, TYPE_MODE (type), &x);
7932 return build_real (type, r);
7933 }
7934
7935 return fold_trunc_transparent_mathfn (fndecl, arg);
7936 }
7937
7938 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7939 Return NULL_TREE if no simplification can be made. */
7940
7941 static tree
7942 fold_builtin_floor (tree fndecl, tree arg)
7943 {
7944 if (!validate_arg (arg, REAL_TYPE))
7945 return NULL_TREE;
7946
7947 /* Optimize floor of constant value. */
7948 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7949 {
7950 REAL_VALUE_TYPE x;
7951
7952 x = TREE_REAL_CST (arg);
7953 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7954 {
7955 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7956 REAL_VALUE_TYPE r;
7957
7958 real_floor (&r, TYPE_MODE (type), &x);
7959 return build_real (type, r);
7960 }
7961 }
7962
7963 /* Fold floor (x) where x is nonnegative to trunc (x). */
7964 if (tree_expr_nonnegative_p (arg))
7965 {
7966 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7967 if (truncfn)
7968 return build_call_expr (truncfn, 1, arg);
7969 }
7970
7971 return fold_trunc_transparent_mathfn (fndecl, arg);
7972 }
7973
7974 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7975 Return NULL_TREE if no simplification can be made. */
7976
7977 static tree
7978 fold_builtin_ceil (tree fndecl, tree arg)
7979 {
7980 if (!validate_arg (arg, REAL_TYPE))
7981 return NULL_TREE;
7982
7983 /* Optimize ceil of constant value. */
7984 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7985 {
7986 REAL_VALUE_TYPE x;
7987
7988 x = TREE_REAL_CST (arg);
7989 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7990 {
7991 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7992 REAL_VALUE_TYPE r;
7993
7994 real_ceil (&r, TYPE_MODE (type), &x);
7995 return build_real (type, r);
7996 }
7997 }
7998
7999 return fold_trunc_transparent_mathfn (fndecl, arg);
8000 }
8001
8002 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8003 Return NULL_TREE if no simplification can be made. */
8004
8005 static tree
8006 fold_builtin_round (tree fndecl, tree arg)
8007 {
8008 if (!validate_arg (arg, REAL_TYPE))
8009 return NULL_TREE;
8010
8011 /* Optimize round of constant value. */
8012 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8013 {
8014 REAL_VALUE_TYPE x;
8015
8016 x = TREE_REAL_CST (arg);
8017 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8018 {
8019 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8020 REAL_VALUE_TYPE r;
8021
8022 real_round (&r, TYPE_MODE (type), &x);
8023 return build_real (type, r);
8024 }
8025 }
8026
8027 return fold_trunc_transparent_mathfn (fndecl, arg);
8028 }
8029
8030 /* Fold function call to builtin lround, lroundf or lroundl (or the
8031 corresponding long long versions) and other rounding functions. ARG
8032 is the argument to the call. Return NULL_TREE if no simplification
8033 can be made. */
8034
8035 static tree
8036 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8037 {
8038 if (!validate_arg (arg, REAL_TYPE))
8039 return NULL_TREE;
8040
8041 /* Optimize lround of constant value. */
8042 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8043 {
8044 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8045
8046 if (real_isfinite (&x))
8047 {
8048 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8049 tree ftype = TREE_TYPE (arg);
8050 unsigned HOST_WIDE_INT lo2;
8051 HOST_WIDE_INT hi, lo;
8052 REAL_VALUE_TYPE r;
8053
8054 switch (DECL_FUNCTION_CODE (fndecl))
8055 {
8056 CASE_FLT_FN (BUILT_IN_LFLOOR):
8057 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8058 real_floor (&r, TYPE_MODE (ftype), &x);
8059 break;
8060
8061 CASE_FLT_FN (BUILT_IN_LCEIL):
8062 CASE_FLT_FN (BUILT_IN_LLCEIL):
8063 real_ceil (&r, TYPE_MODE (ftype), &x);
8064 break;
8065
8066 CASE_FLT_FN (BUILT_IN_LROUND):
8067 CASE_FLT_FN (BUILT_IN_LLROUND):
8068 real_round (&r, TYPE_MODE (ftype), &x);
8069 break;
8070
8071 default:
8072 gcc_unreachable ();
8073 }
8074
8075 REAL_VALUE_TO_INT (&lo, &hi, r);
8076 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8077 return build_int_cst_wide (itype, lo2, hi);
8078 }
8079 }
8080
8081 switch (DECL_FUNCTION_CODE (fndecl))
8082 {
8083 CASE_FLT_FN (BUILT_IN_LFLOOR):
8084 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8085 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8086 if (tree_expr_nonnegative_p (arg))
8087 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8088 arg);
8089 break;
8090 default:;
8091 }
8092
8093 return fold_fixed_mathfn (fndecl, arg);
8094 }
8095
8096 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8097 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8098 the argument to the call. Return NULL_TREE if no simplification can
8099 be made. */
8100
8101 static tree
8102 fold_builtin_bitop (tree fndecl, tree arg)
8103 {
8104 if (!validate_arg (arg, INTEGER_TYPE))
8105 return NULL_TREE;
8106
8107 /* Optimize for constant argument. */
8108 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8109 {
8110 HOST_WIDE_INT hi, width, result;
8111 unsigned HOST_WIDE_INT lo;
8112 tree type;
8113
8114 type = TREE_TYPE (arg);
8115 width = TYPE_PRECISION (type);
8116 lo = TREE_INT_CST_LOW (arg);
8117
8118 /* Clear all the bits that are beyond the type's precision. */
8119 if (width > HOST_BITS_PER_WIDE_INT)
8120 {
8121 hi = TREE_INT_CST_HIGH (arg);
8122 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8123 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8124 }
8125 else
8126 {
8127 hi = 0;
8128 if (width < HOST_BITS_PER_WIDE_INT)
8129 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8130 }
8131
8132 switch (DECL_FUNCTION_CODE (fndecl))
8133 {
8134 CASE_INT_FN (BUILT_IN_FFS):
8135 if (lo != 0)
8136 result = exact_log2 (lo & -lo) + 1;
8137 else if (hi != 0)
8138 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8139 else
8140 result = 0;
8141 break;
8142
8143 CASE_INT_FN (BUILT_IN_CLZ):
8144 if (hi != 0)
8145 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8146 else if (lo != 0)
8147 result = width - floor_log2 (lo) - 1;
8148 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8149 result = width;
8150 break;
8151
8152 CASE_INT_FN (BUILT_IN_CTZ):
8153 if (lo != 0)
8154 result = exact_log2 (lo & -lo);
8155 else if (hi != 0)
8156 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8157 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8158 result = width;
8159 break;
8160
8161 CASE_INT_FN (BUILT_IN_POPCOUNT):
8162 result = 0;
8163 while (lo)
8164 result++, lo &= lo - 1;
8165 while (hi)
8166 result++, hi &= hi - 1;
8167 break;
8168
8169 CASE_INT_FN (BUILT_IN_PARITY):
8170 result = 0;
8171 while (lo)
8172 result++, lo &= lo - 1;
8173 while (hi)
8174 result++, hi &= hi - 1;
8175 result &= 1;
8176 break;
8177
8178 default:
8179 gcc_unreachable ();
8180 }
8181
8182 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8183 }
8184
8185 return NULL_TREE;
8186 }
8187
8188 /* Fold function call to builtin_bswap and the long and long long
8189 variants. Return NULL_TREE if no simplification can be made. */
8190 static tree
8191 fold_builtin_bswap (tree fndecl, tree arg)
8192 {
8193 if (! validate_arg (arg, INTEGER_TYPE))
8194 return NULL_TREE;
8195
8196 /* Optimize constant value. */
8197 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8198 {
8199 HOST_WIDE_INT hi, width, r_hi = 0;
8200 unsigned HOST_WIDE_INT lo, r_lo = 0;
8201 tree type;
8202
8203 type = TREE_TYPE (arg);
8204 width = TYPE_PRECISION (type);
8205 lo = TREE_INT_CST_LOW (arg);
8206 hi = TREE_INT_CST_HIGH (arg);
8207
8208 switch (DECL_FUNCTION_CODE (fndecl))
8209 {
8210 case BUILT_IN_BSWAP32:
8211 case BUILT_IN_BSWAP64:
8212 {
8213 int s;
8214
8215 for (s = 0; s < width; s += 8)
8216 {
8217 int d = width - s - 8;
8218 unsigned HOST_WIDE_INT byte;
8219
8220 if (s < HOST_BITS_PER_WIDE_INT)
8221 byte = (lo >> s) & 0xff;
8222 else
8223 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8224
8225 if (d < HOST_BITS_PER_WIDE_INT)
8226 r_lo |= byte << d;
8227 else
8228 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8229 }
8230 }
8231
8232 break;
8233
8234 default:
8235 gcc_unreachable ();
8236 }
8237
8238 if (width < HOST_BITS_PER_WIDE_INT)
8239 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8240 else
8241 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8242 }
8243
8244 return NULL_TREE;
8245 }
8246
8247 /* Return true if EXPR is the real constant contained in VALUE. */
8248
8249 static bool
8250 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8251 {
8252 STRIP_NOPS (expr);
8253
8254 return ((TREE_CODE (expr) == REAL_CST
8255 && !TREE_OVERFLOW (expr)
8256 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8257 || (TREE_CODE (expr) == COMPLEX_CST
8258 && real_dconstp (TREE_REALPART (expr), value)
8259 && real_zerop (TREE_IMAGPART (expr))));
8260 }
8261
8262 /* A subroutine of fold_builtin to fold the various logarithmic
8263 functions. Return NULL_TREE if no simplification can me made.
8264 FUNC is the corresponding MPFR logarithm function. */
8265
8266 static tree
8267 fold_builtin_logarithm (tree fndecl, tree arg,
8268 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8269 {
8270 if (validate_arg (arg, REAL_TYPE))
8271 {
8272 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8273 tree res;
8274 const enum built_in_function fcode = builtin_mathfn_code (arg);
8275
8276 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8277 instead we'll look for 'e' truncated to MODE. So only do
8278 this if flag_unsafe_math_optimizations is set. */
8279 if (flag_unsafe_math_optimizations && func == mpfr_log)
8280 {
8281 const REAL_VALUE_TYPE e_truncated =
8282 real_value_truncate (TYPE_MODE (type), dconst_e ());
8283 if (real_dconstp (arg, &e_truncated))
8284 return build_real (type, dconst1);
8285 }
8286
8287 /* Calculate the result when the argument is a constant. */
8288 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8289 return res;
8290
8291 /* Special case, optimize logN(expN(x)) = x. */
8292 if (flag_unsafe_math_optimizations
8293 && ((func == mpfr_log
8294 && (fcode == BUILT_IN_EXP
8295 || fcode == BUILT_IN_EXPF
8296 || fcode == BUILT_IN_EXPL))
8297 || (func == mpfr_log2
8298 && (fcode == BUILT_IN_EXP2
8299 || fcode == BUILT_IN_EXP2F
8300 || fcode == BUILT_IN_EXP2L))
8301 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8302 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8303
8304 /* Optimize logN(func()) for various exponential functions. We
8305 want to determine the value "x" and the power "exponent" in
8306 order to transform logN(x**exponent) into exponent*logN(x). */
8307 if (flag_unsafe_math_optimizations)
8308 {
8309 tree exponent = 0, x = 0;
8310
8311 switch (fcode)
8312 {
8313 CASE_FLT_FN (BUILT_IN_EXP):
8314 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8315 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8316 dconst_e ()));
8317 exponent = CALL_EXPR_ARG (arg, 0);
8318 break;
8319 CASE_FLT_FN (BUILT_IN_EXP2):
8320 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8321 x = build_real (type, dconst2);
8322 exponent = CALL_EXPR_ARG (arg, 0);
8323 break;
8324 CASE_FLT_FN (BUILT_IN_EXP10):
8325 CASE_FLT_FN (BUILT_IN_POW10):
8326 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8327 {
8328 REAL_VALUE_TYPE dconst10;
8329 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8330 x = build_real (type, dconst10);
8331 }
8332 exponent = CALL_EXPR_ARG (arg, 0);
8333 break;
8334 CASE_FLT_FN (BUILT_IN_SQRT):
8335 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8336 x = CALL_EXPR_ARG (arg, 0);
8337 exponent = build_real (type, dconsthalf);
8338 break;
8339 CASE_FLT_FN (BUILT_IN_CBRT):
8340 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8341 x = CALL_EXPR_ARG (arg, 0);
8342 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8343 dconst_third ()));
8344 break;
8345 CASE_FLT_FN (BUILT_IN_POW):
8346 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8347 x = CALL_EXPR_ARG (arg, 0);
8348 exponent = CALL_EXPR_ARG (arg, 1);
8349 break;
8350 default:
8351 break;
8352 }
8353
8354 /* Now perform the optimization. */
8355 if (x && exponent)
8356 {
8357 tree logfn = build_call_expr (fndecl, 1, x);
8358 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8359 }
8360 }
8361 }
8362
8363 return NULL_TREE;
8364 }
8365
8366 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8367 NULL_TREE if no simplification can be made. */
8368
8369 static tree
8370 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8371 {
8372 tree res, narg0, narg1;
8373
8374 if (!validate_arg (arg0, REAL_TYPE)
8375 || !validate_arg (arg1, REAL_TYPE))
8376 return NULL_TREE;
8377
8378 /* Calculate the result when the argument is a constant. */
8379 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8380 return res;
8381
8382 /* If either argument to hypot has a negate or abs, strip that off.
8383 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8384 narg0 = fold_strip_sign_ops (arg0);
8385 narg1 = fold_strip_sign_ops (arg1);
8386 if (narg0 || narg1)
8387 {
8388 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8389 narg1 ? narg1 : arg1);
8390 }
8391
8392 /* If either argument is zero, hypot is fabs of the other. */
8393 if (real_zerop (arg0))
8394 return fold_build1 (ABS_EXPR, type, arg1);
8395 else if (real_zerop (arg1))
8396 return fold_build1 (ABS_EXPR, type, arg0);
8397
8398 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8399 if (flag_unsafe_math_optimizations
8400 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8401 {
8402 const REAL_VALUE_TYPE sqrt2_trunc
8403 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8404 return fold_build2 (MULT_EXPR, type,
8405 fold_build1 (ABS_EXPR, type, arg0),
8406 build_real (type, sqrt2_trunc));
8407 }
8408
8409 return NULL_TREE;
8410 }
8411
8412
8413 /* Fold a builtin function call to pow, powf, or powl. Return
8414 NULL_TREE if no simplification can be made. */
8415 static tree
8416 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8417 {
8418 tree res;
8419
8420 if (!validate_arg (arg0, REAL_TYPE)
8421 || !validate_arg (arg1, REAL_TYPE))
8422 return NULL_TREE;
8423
8424 /* Calculate the result when the argument is a constant. */
8425 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8426 return res;
8427
8428 /* Optimize pow(1.0,y) = 1.0. */
8429 if (real_onep (arg0))
8430 return omit_one_operand (type, build_real (type, dconst1), arg1);
8431
8432 if (TREE_CODE (arg1) == REAL_CST
8433 && !TREE_OVERFLOW (arg1))
8434 {
8435 REAL_VALUE_TYPE cint;
8436 REAL_VALUE_TYPE c;
8437 HOST_WIDE_INT n;
8438
8439 c = TREE_REAL_CST (arg1);
8440
8441 /* Optimize pow(x,0.0) = 1.0. */
8442 if (REAL_VALUES_EQUAL (c, dconst0))
8443 return omit_one_operand (type, build_real (type, dconst1),
8444 arg0);
8445
8446 /* Optimize pow(x,1.0) = x. */
8447 if (REAL_VALUES_EQUAL (c, dconst1))
8448 return arg0;
8449
8450 /* Optimize pow(x,-1.0) = 1.0/x. */
8451 if (REAL_VALUES_EQUAL (c, dconstm1))
8452 return fold_build2 (RDIV_EXPR, type,
8453 build_real (type, dconst1), arg0);
8454
8455 /* Optimize pow(x,0.5) = sqrt(x). */
8456 if (flag_unsafe_math_optimizations
8457 && REAL_VALUES_EQUAL (c, dconsthalf))
8458 {
8459 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8460
8461 if (sqrtfn != NULL_TREE)
8462 return build_call_expr (sqrtfn, 1, arg0);
8463 }
8464
8465 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8466 if (flag_unsafe_math_optimizations)
8467 {
8468 const REAL_VALUE_TYPE dconstroot
8469 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8470
8471 if (REAL_VALUES_EQUAL (c, dconstroot))
8472 {
8473 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8474 if (cbrtfn != NULL_TREE)
8475 return build_call_expr (cbrtfn, 1, arg0);
8476 }
8477 }
8478
8479 /* Check for an integer exponent. */
8480 n = real_to_integer (&c);
8481 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8482 if (real_identical (&c, &cint))
8483 {
8484 /* Attempt to evaluate pow at compile-time, unless this should
8485 raise an exception. */
8486 if (TREE_CODE (arg0) == REAL_CST
8487 && !TREE_OVERFLOW (arg0)
8488 && (n > 0
8489 || (!flag_trapping_math && !flag_errno_math)
8490 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8491 {
8492 REAL_VALUE_TYPE x;
8493 bool inexact;
8494
8495 x = TREE_REAL_CST (arg0);
8496 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8497 if (flag_unsafe_math_optimizations || !inexact)
8498 return build_real (type, x);
8499 }
8500
8501 /* Strip sign ops from even integer powers. */
8502 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8503 {
8504 tree narg0 = fold_strip_sign_ops (arg0);
8505 if (narg0)
8506 return build_call_expr (fndecl, 2, narg0, arg1);
8507 }
8508 }
8509 }
8510
8511 if (flag_unsafe_math_optimizations)
8512 {
8513 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8514
8515 /* Optimize pow(expN(x),y) = expN(x*y). */
8516 if (BUILTIN_EXPONENT_P (fcode))
8517 {
8518 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8519 tree arg = CALL_EXPR_ARG (arg0, 0);
8520 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8521 return build_call_expr (expfn, 1, arg);
8522 }
8523
8524 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8525 if (BUILTIN_SQRT_P (fcode))
8526 {
8527 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8528 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8529 build_real (type, dconsthalf));
8530 return build_call_expr (fndecl, 2, narg0, narg1);
8531 }
8532
8533 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8534 if (BUILTIN_CBRT_P (fcode))
8535 {
8536 tree arg = CALL_EXPR_ARG (arg0, 0);
8537 if (tree_expr_nonnegative_p (arg))
8538 {
8539 const REAL_VALUE_TYPE dconstroot
8540 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8541 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8542 build_real (type, dconstroot));
8543 return build_call_expr (fndecl, 2, arg, narg1);
8544 }
8545 }
8546
8547 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8548 if (fcode == BUILT_IN_POW
8549 || fcode == BUILT_IN_POWF
8550 || fcode == BUILT_IN_POWL)
8551 {
8552 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8553 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8554 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8555 return build_call_expr (fndecl, 2, arg00, narg1);
8556 }
8557 }
8558
8559 return NULL_TREE;
8560 }
8561
8562 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8563 Return NULL_TREE if no simplification can be made. */
8564 static tree
8565 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8566 tree arg0, tree arg1, tree type)
8567 {
8568 if (!validate_arg (arg0, REAL_TYPE)
8569 || !validate_arg (arg1, INTEGER_TYPE))
8570 return NULL_TREE;
8571
8572 /* Optimize pow(1.0,y) = 1.0. */
8573 if (real_onep (arg0))
8574 return omit_one_operand (type, build_real (type, dconst1), arg1);
8575
8576 if (host_integerp (arg1, 0))
8577 {
8578 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8579
8580 /* Evaluate powi at compile-time. */
8581 if (TREE_CODE (arg0) == REAL_CST
8582 && !TREE_OVERFLOW (arg0))
8583 {
8584 REAL_VALUE_TYPE x;
8585 x = TREE_REAL_CST (arg0);
8586 real_powi (&x, TYPE_MODE (type), &x, c);
8587 return build_real (type, x);
8588 }
8589
8590 /* Optimize pow(x,0) = 1.0. */
8591 if (c == 0)
8592 return omit_one_operand (type, build_real (type, dconst1),
8593 arg0);
8594
8595 /* Optimize pow(x,1) = x. */
8596 if (c == 1)
8597 return arg0;
8598
8599 /* Optimize pow(x,-1) = 1.0/x. */
8600 if (c == -1)
8601 return fold_build2 (RDIV_EXPR, type,
8602 build_real (type, dconst1), arg0);
8603 }
8604
8605 return NULL_TREE;
8606 }
8607
8608 /* A subroutine of fold_builtin to fold the various exponent
8609 functions. Return NULL_TREE if no simplification can be made.
8610 FUNC is the corresponding MPFR exponent function. */
8611
8612 static tree
8613 fold_builtin_exponent (tree fndecl, tree arg,
8614 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8615 {
8616 if (validate_arg (arg, REAL_TYPE))
8617 {
8618 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8619 tree res;
8620
8621 /* Calculate the result when the argument is a constant. */
8622 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8623 return res;
8624
8625 /* Optimize expN(logN(x)) = x. */
8626 if (flag_unsafe_math_optimizations)
8627 {
8628 const enum built_in_function fcode = builtin_mathfn_code (arg);
8629
8630 if ((func == mpfr_exp
8631 && (fcode == BUILT_IN_LOG
8632 || fcode == BUILT_IN_LOGF
8633 || fcode == BUILT_IN_LOGL))
8634 || (func == mpfr_exp2
8635 && (fcode == BUILT_IN_LOG2
8636 || fcode == BUILT_IN_LOG2F
8637 || fcode == BUILT_IN_LOG2L))
8638 || (func == mpfr_exp10
8639 && (fcode == BUILT_IN_LOG10
8640 || fcode == BUILT_IN_LOG10F
8641 || fcode == BUILT_IN_LOG10L)))
8642 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8643 }
8644 }
8645
8646 return NULL_TREE;
8647 }
8648
8649 /* Return true if VAR is a VAR_DECL or a component thereof. */
8650
8651 static bool
8652 var_decl_component_p (tree var)
8653 {
8654 tree inner = var;
8655 while (handled_component_p (inner))
8656 inner = TREE_OPERAND (inner, 0);
8657 return SSA_VAR_P (inner);
8658 }
8659
8660 /* Fold function call to builtin memset. Return
8661 NULL_TREE if no simplification can be made. */
8662
8663 static tree
8664 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8665 {
8666 tree var, ret;
8667 unsigned HOST_WIDE_INT length, cval;
8668
8669 if (! validate_arg (dest, POINTER_TYPE)
8670 || ! validate_arg (c, INTEGER_TYPE)
8671 || ! validate_arg (len, INTEGER_TYPE))
8672 return NULL_TREE;
8673
8674 if (! host_integerp (len, 1))
8675 return NULL_TREE;
8676
8677 /* If the LEN parameter is zero, return DEST. */
8678 if (integer_zerop (len))
8679 return omit_one_operand (type, dest, c);
8680
8681 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8682 return NULL_TREE;
8683
8684 var = dest;
8685 STRIP_NOPS (var);
8686 if (TREE_CODE (var) != ADDR_EXPR)
8687 return NULL_TREE;
8688
8689 var = TREE_OPERAND (var, 0);
8690 if (TREE_THIS_VOLATILE (var))
8691 return NULL_TREE;
8692
8693 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8694 && !POINTER_TYPE_P (TREE_TYPE (var)))
8695 return NULL_TREE;
8696
8697 if (! var_decl_component_p (var))
8698 return NULL_TREE;
8699
8700 length = tree_low_cst (len, 1);
8701 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8702 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8703 < (int) length)
8704 return NULL_TREE;
8705
8706 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8707 return NULL_TREE;
8708
8709 if (integer_zerop (c))
8710 cval = 0;
8711 else
8712 {
8713 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8714 return NULL_TREE;
8715
8716 cval = tree_low_cst (c, 1);
8717 cval &= 0xff;
8718 cval |= cval << 8;
8719 cval |= cval << 16;
8720 cval |= (cval << 31) << 1;
8721 }
8722
8723 ret = build_int_cst_type (TREE_TYPE (var), cval);
8724 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8725 if (ignore)
8726 return ret;
8727
8728 return omit_one_operand (type, dest, ret);
8729 }
8730
8731 /* Fold function call to builtin memset. Return
8732 NULL_TREE if no simplification can be made. */
8733
8734 static tree
8735 fold_builtin_bzero (tree dest, tree size, bool ignore)
8736 {
8737 if (! validate_arg (dest, POINTER_TYPE)
8738 || ! validate_arg (size, INTEGER_TYPE))
8739 return NULL_TREE;
8740
8741 if (!ignore)
8742 return NULL_TREE;
8743
8744 /* New argument list transforming bzero(ptr x, int y) to
8745 memset(ptr x, int 0, size_t y). This is done this way
8746 so that if it isn't expanded inline, we fallback to
8747 calling bzero instead of memset. */
8748
8749 return fold_builtin_memset (dest, integer_zero_node,
8750 fold_convert (sizetype, size),
8751 void_type_node, ignore);
8752 }
8753
8754 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8755 NULL_TREE if no simplification can be made.
8756 If ENDP is 0, return DEST (like memcpy).
8757 If ENDP is 1, return DEST+LEN (like mempcpy).
8758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8760 (memmove). */
8761
8762 static tree
8763 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8764 {
8765 tree destvar, srcvar, expr;
8766
8767 if (! validate_arg (dest, POINTER_TYPE)
8768 || ! validate_arg (src, POINTER_TYPE)
8769 || ! validate_arg (len, INTEGER_TYPE))
8770 return NULL_TREE;
8771
8772 /* If the LEN parameter is zero, return DEST. */
8773 if (integer_zerop (len))
8774 return omit_one_operand (type, dest, src);
8775
8776 /* If SRC and DEST are the same (and not volatile), return
8777 DEST{,+LEN,+LEN-1}. */
8778 if (operand_equal_p (src, dest, 0))
8779 expr = len;
8780 else
8781 {
8782 tree srctype, desttype;
8783 if (endp == 3)
8784 {
8785 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8786 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8787
8788 /* Both DEST and SRC must be pointer types.
8789 ??? This is what old code did. Is the testing for pointer types
8790 really mandatory?
8791
8792 If either SRC is readonly or length is 1, we can use memcpy. */
8793 if (dest_align && src_align
8794 && (readonly_data_expr (src)
8795 || (host_integerp (len, 1)
8796 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8797 tree_low_cst (len, 1)))))
8798 {
8799 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8800 if (!fn)
8801 return NULL_TREE;
8802 return build_call_expr (fn, 3, dest, src, len);
8803 }
8804 return NULL_TREE;
8805 }
8806
8807 if (!host_integerp (len, 0))
8808 return NULL_TREE;
8809 /* FIXME:
8810 This logic lose for arguments like (type *)malloc (sizeof (type)),
8811 since we strip the casts of up to VOID return value from malloc.
8812 Perhaps we ought to inherit type from non-VOID argument here? */
8813 STRIP_NOPS (src);
8814 STRIP_NOPS (dest);
8815 srctype = TREE_TYPE (TREE_TYPE (src));
8816 desttype = TREE_TYPE (TREE_TYPE (dest));
8817 if (!srctype || !desttype
8818 || !TYPE_SIZE_UNIT (srctype)
8819 || !TYPE_SIZE_UNIT (desttype)
8820 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8821 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8822 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8823 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8824 return NULL_TREE;
8825
8826 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8827 < (int) TYPE_ALIGN (desttype)
8828 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8829 < (int) TYPE_ALIGN (srctype)))
8830 return NULL_TREE;
8831
8832 if (!ignore)
8833 dest = builtin_save_expr (dest);
8834
8835 srcvar = build_fold_indirect_ref (src);
8836 if (TREE_THIS_VOLATILE (srcvar))
8837 return NULL_TREE;
8838 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8839 return NULL_TREE;
8840 /* With memcpy, it is possible to bypass aliasing rules, so without
8841 this check i.e. execute/20060930-2.c would be misoptimized, because
8842 it use conflicting alias set to hold argument for the memcpy call.
8843 This check is probably unnecessary with -fno-strict-aliasing.
8844 Similarly for destvar. See also PR29286. */
8845 if (!var_decl_component_p (srcvar)
8846 /* Accept: memcpy (*char_var, "test", 1); that simplify
8847 to char_var='t'; */
8848 || is_gimple_min_invariant (srcvar)
8849 || readonly_data_expr (src))
8850 return NULL_TREE;
8851
8852 destvar = build_fold_indirect_ref (dest);
8853 if (TREE_THIS_VOLATILE (destvar))
8854 return NULL_TREE;
8855 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8856 return NULL_TREE;
8857 if (!var_decl_component_p (destvar))
8858 return NULL_TREE;
8859
8860 if (srctype == desttype
8861 || (gimple_in_ssa_p (cfun)
8862 && useless_type_conversion_p (desttype, srctype)))
8863 expr = srcvar;
8864 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8865 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8866 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8867 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8868 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8869 else
8870 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8871 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8872 }
8873
8874 if (ignore)
8875 return expr;
8876
8877 if (endp == 0 || endp == 3)
8878 return omit_one_operand (type, dest, expr);
8879
8880 if (expr == len)
8881 expr = NULL_TREE;
8882
8883 if (endp == 2)
8884 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8885 ssize_int (1));
8886
8887 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8888 dest = fold_convert (type, dest);
8889 if (expr)
8890 dest = omit_one_operand (type, dest, expr);
8891 return dest;
8892 }
8893
8894 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8895 If LEN is not NULL, it represents the length of the string to be
8896 copied. Return NULL_TREE if no simplification can be made. */
8897
8898 tree
8899 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8900 {
8901 tree fn;
8902
8903 if (!validate_arg (dest, POINTER_TYPE)
8904 || !validate_arg (src, POINTER_TYPE))
8905 return NULL_TREE;
8906
8907 /* If SRC and DEST are the same (and not volatile), return DEST. */
8908 if (operand_equal_p (src, dest, 0))
8909 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8910
8911 if (optimize_function_for_size_p (cfun))
8912 return NULL_TREE;
8913
8914 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8915 if (!fn)
8916 return NULL_TREE;
8917
8918 if (!len)
8919 {
8920 len = c_strlen (src, 1);
8921 if (! len || TREE_SIDE_EFFECTS (len))
8922 return NULL_TREE;
8923 }
8924
8925 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8926 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8927 build_call_expr (fn, 3, dest, src, len));
8928 }
8929
8930 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8931 If SLEN is not NULL, it represents the length of the source string.
8932 Return NULL_TREE if no simplification can be made. */
8933
8934 tree
8935 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8936 {
8937 tree fn;
8938
8939 if (!validate_arg (dest, POINTER_TYPE)
8940 || !validate_arg (src, POINTER_TYPE)
8941 || !validate_arg (len, INTEGER_TYPE))
8942 return NULL_TREE;
8943
8944 /* If the LEN parameter is zero, return DEST. */
8945 if (integer_zerop (len))
8946 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8947
8948 /* We can't compare slen with len as constants below if len is not a
8949 constant. */
8950 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8951 return NULL_TREE;
8952
8953 if (!slen)
8954 slen = c_strlen (src, 1);
8955
8956 /* Now, we must be passed a constant src ptr parameter. */
8957 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8958 return NULL_TREE;
8959
8960 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8961
8962 /* We do not support simplification of this case, though we do
8963 support it when expanding trees into RTL. */
8964 /* FIXME: generate a call to __builtin_memset. */
8965 if (tree_int_cst_lt (slen, len))
8966 return NULL_TREE;
8967
8968 /* OK transform into builtin memcpy. */
8969 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8970 if (!fn)
8971 return NULL_TREE;
8972 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8973 build_call_expr (fn, 3, dest, src, len));
8974 }
8975
8976 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8977 arguments to the call, and TYPE is its return type.
8978 Return NULL_TREE if no simplification can be made. */
8979
8980 static tree
8981 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8982 {
8983 if (!validate_arg (arg1, POINTER_TYPE)
8984 || !validate_arg (arg2, INTEGER_TYPE)
8985 || !validate_arg (len, INTEGER_TYPE))
8986 return NULL_TREE;
8987 else
8988 {
8989 const char *p1;
8990
8991 if (TREE_CODE (arg2) != INTEGER_CST
8992 || !host_integerp (len, 1))
8993 return NULL_TREE;
8994
8995 p1 = c_getstr (arg1);
8996 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8997 {
8998 char c;
8999 const char *r;
9000 tree tem;
9001
9002 if (target_char_cast (arg2, &c))
9003 return NULL_TREE;
9004
9005 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9006
9007 if (r == NULL)
9008 return build_int_cst (TREE_TYPE (arg1), 0);
9009
9010 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9011 size_int (r - p1));
9012 return fold_convert (type, tem);
9013 }
9014 return NULL_TREE;
9015 }
9016 }
9017
9018 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9019 Return NULL_TREE if no simplification can be made. */
9020
9021 static tree
9022 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9023 {
9024 const char *p1, *p2;
9025
9026 if (!validate_arg (arg1, POINTER_TYPE)
9027 || !validate_arg (arg2, POINTER_TYPE)
9028 || !validate_arg (len, INTEGER_TYPE))
9029 return NULL_TREE;
9030
9031 /* If the LEN parameter is zero, return zero. */
9032 if (integer_zerop (len))
9033 return omit_two_operands (integer_type_node, integer_zero_node,
9034 arg1, arg2);
9035
9036 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9037 if (operand_equal_p (arg1, arg2, 0))
9038 return omit_one_operand (integer_type_node, integer_zero_node, len);
9039
9040 p1 = c_getstr (arg1);
9041 p2 = c_getstr (arg2);
9042
9043 /* If all arguments are constant, and the value of len is not greater
9044 than the lengths of arg1 and arg2, evaluate at compile-time. */
9045 if (host_integerp (len, 1) && p1 && p2
9046 && compare_tree_int (len, strlen (p1) + 1) <= 0
9047 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9048 {
9049 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9050
9051 if (r > 0)
9052 return integer_one_node;
9053 else if (r < 0)
9054 return integer_minus_one_node;
9055 else
9056 return integer_zero_node;
9057 }
9058
9059 /* If len parameter is one, return an expression corresponding to
9060 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9061 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9062 {
9063 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9064 tree cst_uchar_ptr_node
9065 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9066
9067 tree ind1 = fold_convert (integer_type_node,
9068 build1 (INDIRECT_REF, cst_uchar_node,
9069 fold_convert (cst_uchar_ptr_node,
9070 arg1)));
9071 tree ind2 = fold_convert (integer_type_node,
9072 build1 (INDIRECT_REF, cst_uchar_node,
9073 fold_convert (cst_uchar_ptr_node,
9074 arg2)));
9075 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9076 }
9077
9078 return NULL_TREE;
9079 }
9080
9081 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9082 Return NULL_TREE if no simplification can be made. */
9083
9084 static tree
9085 fold_builtin_strcmp (tree arg1, tree arg2)
9086 {
9087 const char *p1, *p2;
9088
9089 if (!validate_arg (arg1, POINTER_TYPE)
9090 || !validate_arg (arg2, POINTER_TYPE))
9091 return NULL_TREE;
9092
9093 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9094 if (operand_equal_p (arg1, arg2, 0))
9095 return integer_zero_node;
9096
9097 p1 = c_getstr (arg1);
9098 p2 = c_getstr (arg2);
9099
9100 if (p1 && p2)
9101 {
9102 const int i = strcmp (p1, p2);
9103 if (i < 0)
9104 return integer_minus_one_node;
9105 else if (i > 0)
9106 return integer_one_node;
9107 else
9108 return integer_zero_node;
9109 }
9110
9111 /* If the second arg is "", return *(const unsigned char*)arg1. */
9112 if (p2 && *p2 == '\0')
9113 {
9114 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9115 tree cst_uchar_ptr_node
9116 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9117
9118 return fold_convert (integer_type_node,
9119 build1 (INDIRECT_REF, cst_uchar_node,
9120 fold_convert (cst_uchar_ptr_node,
9121 arg1)));
9122 }
9123
9124 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9125 if (p1 && *p1 == '\0')
9126 {
9127 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9128 tree cst_uchar_ptr_node
9129 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9130
9131 tree temp = fold_convert (integer_type_node,
9132 build1 (INDIRECT_REF, cst_uchar_node,
9133 fold_convert (cst_uchar_ptr_node,
9134 arg2)));
9135 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9136 }
9137
9138 return NULL_TREE;
9139 }
9140
9141 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9142 Return NULL_TREE if no simplification can be made. */
9143
9144 static tree
9145 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9146 {
9147 const char *p1, *p2;
9148
9149 if (!validate_arg (arg1, POINTER_TYPE)
9150 || !validate_arg (arg2, POINTER_TYPE)
9151 || !validate_arg (len, INTEGER_TYPE))
9152 return NULL_TREE;
9153
9154 /* If the LEN parameter is zero, return zero. */
9155 if (integer_zerop (len))
9156 return omit_two_operands (integer_type_node, integer_zero_node,
9157 arg1, arg2);
9158
9159 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9160 if (operand_equal_p (arg1, arg2, 0))
9161 return omit_one_operand (integer_type_node, integer_zero_node, len);
9162
9163 p1 = c_getstr (arg1);
9164 p2 = c_getstr (arg2);
9165
9166 if (host_integerp (len, 1) && p1 && p2)
9167 {
9168 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9169 if (i > 0)
9170 return integer_one_node;
9171 else if (i < 0)
9172 return integer_minus_one_node;
9173 else
9174 return integer_zero_node;
9175 }
9176
9177 /* If the second arg is "", and the length is greater than zero,
9178 return *(const unsigned char*)arg1. */
9179 if (p2 && *p2 == '\0'
9180 && TREE_CODE (len) == INTEGER_CST
9181 && tree_int_cst_sgn (len) == 1)
9182 {
9183 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9184 tree cst_uchar_ptr_node
9185 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9186
9187 return fold_convert (integer_type_node,
9188 build1 (INDIRECT_REF, cst_uchar_node,
9189 fold_convert (cst_uchar_ptr_node,
9190 arg1)));
9191 }
9192
9193 /* If the first arg is "", and the length is greater than zero,
9194 return -*(const unsigned char*)arg2. */
9195 if (p1 && *p1 == '\0'
9196 && TREE_CODE (len) == INTEGER_CST
9197 && tree_int_cst_sgn (len) == 1)
9198 {
9199 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9200 tree cst_uchar_ptr_node
9201 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9202
9203 tree temp = fold_convert (integer_type_node,
9204 build1 (INDIRECT_REF, cst_uchar_node,
9205 fold_convert (cst_uchar_ptr_node,
9206 arg2)));
9207 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9208 }
9209
9210 /* If len parameter is one, return an expression corresponding to
9211 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9212 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9213 {
9214 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9215 tree cst_uchar_ptr_node
9216 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9217
9218 tree ind1 = fold_convert (integer_type_node,
9219 build1 (INDIRECT_REF, cst_uchar_node,
9220 fold_convert (cst_uchar_ptr_node,
9221 arg1)));
9222 tree ind2 = fold_convert (integer_type_node,
9223 build1 (INDIRECT_REF, cst_uchar_node,
9224 fold_convert (cst_uchar_ptr_node,
9225 arg2)));
9226 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9227 }
9228
9229 return NULL_TREE;
9230 }
9231
9232 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9233 ARG. Return NULL_TREE if no simplification can be made. */
9234
9235 static tree
9236 fold_builtin_signbit (tree arg, tree type)
9237 {
9238 tree temp;
9239
9240 if (!validate_arg (arg, REAL_TYPE))
9241 return NULL_TREE;
9242
9243 /* If ARG is a compile-time constant, determine the result. */
9244 if (TREE_CODE (arg) == REAL_CST
9245 && !TREE_OVERFLOW (arg))
9246 {
9247 REAL_VALUE_TYPE c;
9248
9249 c = TREE_REAL_CST (arg);
9250 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9251 return fold_convert (type, temp);
9252 }
9253
9254 /* If ARG is non-negative, the result is always zero. */
9255 if (tree_expr_nonnegative_p (arg))
9256 return omit_one_operand (type, integer_zero_node, arg);
9257
9258 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9259 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9260 return fold_build2 (LT_EXPR, type, arg,
9261 build_real (TREE_TYPE (arg), dconst0));
9262
9263 return NULL_TREE;
9264 }
9265
9266 /* Fold function call to builtin copysign, copysignf or copysignl with
9267 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9268 be made. */
9269
9270 static tree
9271 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9272 {
9273 tree tem;
9274
9275 if (!validate_arg (arg1, REAL_TYPE)
9276 || !validate_arg (arg2, REAL_TYPE))
9277 return NULL_TREE;
9278
9279 /* copysign(X,X) is X. */
9280 if (operand_equal_p (arg1, arg2, 0))
9281 return fold_convert (type, arg1);
9282
9283 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9284 if (TREE_CODE (arg1) == REAL_CST
9285 && TREE_CODE (arg2) == REAL_CST
9286 && !TREE_OVERFLOW (arg1)
9287 && !TREE_OVERFLOW (arg2))
9288 {
9289 REAL_VALUE_TYPE c1, c2;
9290
9291 c1 = TREE_REAL_CST (arg1);
9292 c2 = TREE_REAL_CST (arg2);
9293 /* c1.sign := c2.sign. */
9294 real_copysign (&c1, &c2);
9295 return build_real (type, c1);
9296 }
9297
9298 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9299 Remember to evaluate Y for side-effects. */
9300 if (tree_expr_nonnegative_p (arg2))
9301 return omit_one_operand (type,
9302 fold_build1 (ABS_EXPR, type, arg1),
9303 arg2);
9304
9305 /* Strip sign changing operations for the first argument. */
9306 tem = fold_strip_sign_ops (arg1);
9307 if (tem)
9308 return build_call_expr (fndecl, 2, tem, arg2);
9309
9310 return NULL_TREE;
9311 }
9312
9313 /* Fold a call to builtin isascii with argument ARG. */
9314
9315 static tree
9316 fold_builtin_isascii (tree arg)
9317 {
9318 if (!validate_arg (arg, INTEGER_TYPE))
9319 return NULL_TREE;
9320 else
9321 {
9322 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9323 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9324 build_int_cst (NULL_TREE,
9325 ~ (unsigned HOST_WIDE_INT) 0x7f));
9326 return fold_build2 (EQ_EXPR, integer_type_node,
9327 arg, integer_zero_node);
9328 }
9329 }
9330
9331 /* Fold a call to builtin toascii with argument ARG. */
9332
9333 static tree
9334 fold_builtin_toascii (tree arg)
9335 {
9336 if (!validate_arg (arg, INTEGER_TYPE))
9337 return NULL_TREE;
9338
9339 /* Transform toascii(c) -> (c & 0x7f). */
9340 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9341 build_int_cst (NULL_TREE, 0x7f));
9342 }
9343
9344 /* Fold a call to builtin isdigit with argument ARG. */
9345
9346 static tree
9347 fold_builtin_isdigit (tree arg)
9348 {
9349 if (!validate_arg (arg, INTEGER_TYPE))
9350 return NULL_TREE;
9351 else
9352 {
9353 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9354 /* According to the C standard, isdigit is unaffected by locale.
9355 However, it definitely is affected by the target character set. */
9356 unsigned HOST_WIDE_INT target_digit0
9357 = lang_hooks.to_target_charset ('0');
9358
9359 if (target_digit0 == 0)
9360 return NULL_TREE;
9361
9362 arg = fold_convert (unsigned_type_node, arg);
9363 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9364 build_int_cst (unsigned_type_node, target_digit0));
9365 return fold_build2 (LE_EXPR, integer_type_node, arg,
9366 build_int_cst (unsigned_type_node, 9));
9367 }
9368 }
9369
9370 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9371
9372 static tree
9373 fold_builtin_fabs (tree arg, tree type)
9374 {
9375 if (!validate_arg (arg, REAL_TYPE))
9376 return NULL_TREE;
9377
9378 arg = fold_convert (type, arg);
9379 if (TREE_CODE (arg) == REAL_CST)
9380 return fold_abs_const (arg, type);
9381 return fold_build1 (ABS_EXPR, type, arg);
9382 }
9383
9384 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9385
9386 static tree
9387 fold_builtin_abs (tree arg, tree type)
9388 {
9389 if (!validate_arg (arg, INTEGER_TYPE))
9390 return NULL_TREE;
9391
9392 arg = fold_convert (type, arg);
9393 if (TREE_CODE (arg) == INTEGER_CST)
9394 return fold_abs_const (arg, type);
9395 return fold_build1 (ABS_EXPR, type, arg);
9396 }
9397
9398 /* Fold a call to builtin fmin or fmax. */
9399
9400 static tree
9401 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9402 {
9403 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9404 {
9405 /* Calculate the result when the argument is a constant. */
9406 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9407
9408 if (res)
9409 return res;
9410
9411 /* If either argument is NaN, return the other one. Avoid the
9412 transformation if we get (and honor) a signalling NaN. Using
9413 omit_one_operand() ensures we create a non-lvalue. */
9414 if (TREE_CODE (arg0) == REAL_CST
9415 && real_isnan (&TREE_REAL_CST (arg0))
9416 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9417 || ! TREE_REAL_CST (arg0).signalling))
9418 return omit_one_operand (type, arg1, arg0);
9419 if (TREE_CODE (arg1) == REAL_CST
9420 && real_isnan (&TREE_REAL_CST (arg1))
9421 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9422 || ! TREE_REAL_CST (arg1).signalling))
9423 return omit_one_operand (type, arg0, arg1);
9424
9425 /* Transform fmin/fmax(x,x) -> x. */
9426 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9427 return omit_one_operand (type, arg0, arg1);
9428
9429 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9430 functions to return the numeric arg if the other one is NaN.
9431 These tree codes don't honor that, so only transform if
9432 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9433 handled, so we don't have to worry about it either. */
9434 if (flag_finite_math_only)
9435 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9436 fold_convert (type, arg0),
9437 fold_convert (type, arg1));
9438 }
9439 return NULL_TREE;
9440 }
9441
9442 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9443
9444 static tree
9445 fold_builtin_carg (tree arg, tree type)
9446 {
9447 if (validate_arg (arg, COMPLEX_TYPE))
9448 {
9449 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9450
9451 if (atan2_fn)
9452 {
9453 tree new_arg = builtin_save_expr (arg);
9454 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9455 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9456 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9457 }
9458 }
9459
9460 return NULL_TREE;
9461 }
9462
9463 /* Fold a call to builtin logb/ilogb. */
9464
9465 static tree
9466 fold_builtin_logb (tree arg, tree rettype)
9467 {
9468 if (! validate_arg (arg, REAL_TYPE))
9469 return NULL_TREE;
9470
9471 STRIP_NOPS (arg);
9472
9473 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9474 {
9475 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9476
9477 switch (value->cl)
9478 {
9479 case rvc_nan:
9480 case rvc_inf:
9481 /* If arg is Inf or NaN and we're logb, return it. */
9482 if (TREE_CODE (rettype) == REAL_TYPE)
9483 return fold_convert (rettype, arg);
9484 /* Fall through... */
9485 case rvc_zero:
9486 /* Zero may set errno and/or raise an exception for logb, also
9487 for ilogb we don't know FP_ILOGB0. */
9488 return NULL_TREE;
9489 case rvc_normal:
9490 /* For normal numbers, proceed iff radix == 2. In GCC,
9491 normalized significands are in the range [0.5, 1.0). We
9492 want the exponent as if they were [1.0, 2.0) so get the
9493 exponent and subtract 1. */
9494 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9495 return fold_convert (rettype, build_int_cst (NULL_TREE,
9496 REAL_EXP (value)-1));
9497 break;
9498 }
9499 }
9500
9501 return NULL_TREE;
9502 }
9503
9504 /* Fold a call to builtin significand, if radix == 2. */
9505
9506 static tree
9507 fold_builtin_significand (tree arg, tree rettype)
9508 {
9509 if (! validate_arg (arg, REAL_TYPE))
9510 return NULL_TREE;
9511
9512 STRIP_NOPS (arg);
9513
9514 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9515 {
9516 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9517
9518 switch (value->cl)
9519 {
9520 case rvc_zero:
9521 case rvc_nan:
9522 case rvc_inf:
9523 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9524 return fold_convert (rettype, arg);
9525 case rvc_normal:
9526 /* For normal numbers, proceed iff radix == 2. */
9527 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9528 {
9529 REAL_VALUE_TYPE result = *value;
9530 /* In GCC, normalized significands are in the range [0.5,
9531 1.0). We want them to be [1.0, 2.0) so set the
9532 exponent to 1. */
9533 SET_REAL_EXP (&result, 1);
9534 return build_real (rettype, result);
9535 }
9536 break;
9537 }
9538 }
9539
9540 return NULL_TREE;
9541 }
9542
9543 /* Fold a call to builtin frexp, we can assume the base is 2. */
9544
9545 static tree
9546 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9547 {
9548 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9549 return NULL_TREE;
9550
9551 STRIP_NOPS (arg0);
9552
9553 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9554 return NULL_TREE;
9555
9556 arg1 = build_fold_indirect_ref (arg1);
9557
9558 /* Proceed if a valid pointer type was passed in. */
9559 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9560 {
9561 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9562 tree frac, exp;
9563
9564 switch (value->cl)
9565 {
9566 case rvc_zero:
9567 /* For +-0, return (*exp = 0, +-0). */
9568 exp = integer_zero_node;
9569 frac = arg0;
9570 break;
9571 case rvc_nan:
9572 case rvc_inf:
9573 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9574 return omit_one_operand (rettype, arg0, arg1);
9575 case rvc_normal:
9576 {
9577 /* Since the frexp function always expects base 2, and in
9578 GCC normalized significands are already in the range
9579 [0.5, 1.0), we have exactly what frexp wants. */
9580 REAL_VALUE_TYPE frac_rvt = *value;
9581 SET_REAL_EXP (&frac_rvt, 0);
9582 frac = build_real (rettype, frac_rvt);
9583 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9584 }
9585 break;
9586 default:
9587 gcc_unreachable ();
9588 }
9589
9590 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9591 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9592 TREE_SIDE_EFFECTS (arg1) = 1;
9593 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9594 }
9595
9596 return NULL_TREE;
9597 }
9598
9599 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9600 then we can assume the base is two. If it's false, then we have to
9601 check the mode of the TYPE parameter in certain cases. */
9602
9603 static tree
9604 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9605 {
9606 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9607 {
9608 STRIP_NOPS (arg0);
9609 STRIP_NOPS (arg1);
9610
9611 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9612 if (real_zerop (arg0) || integer_zerop (arg1)
9613 || (TREE_CODE (arg0) == REAL_CST
9614 && !real_isfinite (&TREE_REAL_CST (arg0))))
9615 return omit_one_operand (type, arg0, arg1);
9616
9617 /* If both arguments are constant, then try to evaluate it. */
9618 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9619 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9620 && host_integerp (arg1, 0))
9621 {
9622 /* Bound the maximum adjustment to twice the range of the
9623 mode's valid exponents. Use abs to ensure the range is
9624 positive as a sanity check. */
9625 const long max_exp_adj = 2 *
9626 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9627 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9628
9629 /* Get the user-requested adjustment. */
9630 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9631
9632 /* The requested adjustment must be inside this range. This
9633 is a preliminary cap to avoid things like overflow, we
9634 may still fail to compute the result for other reasons. */
9635 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9636 {
9637 REAL_VALUE_TYPE initial_result;
9638
9639 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9640
9641 /* Ensure we didn't overflow. */
9642 if (! real_isinf (&initial_result))
9643 {
9644 const REAL_VALUE_TYPE trunc_result
9645 = real_value_truncate (TYPE_MODE (type), initial_result);
9646
9647 /* Only proceed if the target mode can hold the
9648 resulting value. */
9649 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9650 return build_real (type, trunc_result);
9651 }
9652 }
9653 }
9654 }
9655
9656 return NULL_TREE;
9657 }
9658
9659 /* Fold a call to builtin modf. */
9660
9661 static tree
9662 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9663 {
9664 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9665 return NULL_TREE;
9666
9667 STRIP_NOPS (arg0);
9668
9669 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9670 return NULL_TREE;
9671
9672 arg1 = build_fold_indirect_ref (arg1);
9673
9674 /* Proceed if a valid pointer type was passed in. */
9675 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9676 {
9677 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9678 REAL_VALUE_TYPE trunc, frac;
9679
9680 switch (value->cl)
9681 {
9682 case rvc_nan:
9683 case rvc_zero:
9684 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9685 trunc = frac = *value;
9686 break;
9687 case rvc_inf:
9688 /* For +-Inf, return (*arg1 = arg0, +-0). */
9689 frac = dconst0;
9690 frac.sign = value->sign;
9691 trunc = *value;
9692 break;
9693 case rvc_normal:
9694 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9695 real_trunc (&trunc, VOIDmode, value);
9696 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9697 /* If the original number was negative and already
9698 integral, then the fractional part is -0.0. */
9699 if (value->sign && frac.cl == rvc_zero)
9700 frac.sign = value->sign;
9701 break;
9702 }
9703
9704 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9705 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9706 build_real (rettype, trunc));
9707 TREE_SIDE_EFFECTS (arg1) = 1;
9708 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9709 build_real (rettype, frac));
9710 }
9711
9712 return NULL_TREE;
9713 }
9714
9715 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9716 ARG is the argument for the call. */
9717
9718 static tree
9719 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9720 {
9721 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9722 REAL_VALUE_TYPE r;
9723
9724 if (!validate_arg (arg, REAL_TYPE))
9725 return NULL_TREE;
9726
9727 switch (builtin_index)
9728 {
9729 case BUILT_IN_ISINF:
9730 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9731 return omit_one_operand (type, integer_zero_node, arg);
9732
9733 if (TREE_CODE (arg) == REAL_CST)
9734 {
9735 r = TREE_REAL_CST (arg);
9736 if (real_isinf (&r))
9737 return real_compare (GT_EXPR, &r, &dconst0)
9738 ? integer_one_node : integer_minus_one_node;
9739 else
9740 return integer_zero_node;
9741 }
9742
9743 return NULL_TREE;
9744
9745 case BUILT_IN_ISINF_SIGN:
9746 {
9747 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9748 /* In a boolean context, GCC will fold the inner COND_EXPR to
9749 1. So e.g. "if (isinf_sign(x))" would be folded to just
9750 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9751 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9752 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9753 tree tmp = NULL_TREE;
9754
9755 arg = builtin_save_expr (arg);
9756
9757 if (signbit_fn && isinf_fn)
9758 {
9759 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9760 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9761
9762 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9763 signbit_call, integer_zero_node);
9764 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9765 isinf_call, integer_zero_node);
9766
9767 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9768 integer_minus_one_node, integer_one_node);
9769 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9770 integer_zero_node);
9771 }
9772
9773 return tmp;
9774 }
9775
9776 case BUILT_IN_ISFINITE:
9777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9778 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9779 return omit_one_operand (type, integer_one_node, arg);
9780
9781 if (TREE_CODE (arg) == REAL_CST)
9782 {
9783 r = TREE_REAL_CST (arg);
9784 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9785 }
9786
9787 return NULL_TREE;
9788
9789 case BUILT_IN_ISNAN:
9790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9791 return omit_one_operand (type, integer_zero_node, arg);
9792
9793 if (TREE_CODE (arg) == REAL_CST)
9794 {
9795 r = TREE_REAL_CST (arg);
9796 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9797 }
9798
9799 arg = builtin_save_expr (arg);
9800 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9801
9802 default:
9803 gcc_unreachable ();
9804 }
9805 }
9806
9807 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9808 This builtin will generate code to return the appropriate floating
9809 point classification depending on the value of the floating point
9810 number passed in. The possible return values must be supplied as
9811 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9812 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9813 one floating point argument which is "type generic". */
9814
9815 static tree
9816 fold_builtin_fpclassify (tree exp)
9817 {
9818 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9819 arg, type, res, tmp;
9820 enum machine_mode mode;
9821 REAL_VALUE_TYPE r;
9822 char buf[128];
9823
9824 /* Verify the required arguments in the original call. */
9825 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9826 INTEGER_TYPE, INTEGER_TYPE,
9827 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9828 return NULL_TREE;
9829
9830 fp_nan = CALL_EXPR_ARG (exp, 0);
9831 fp_infinite = CALL_EXPR_ARG (exp, 1);
9832 fp_normal = CALL_EXPR_ARG (exp, 2);
9833 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9834 fp_zero = CALL_EXPR_ARG (exp, 4);
9835 arg = CALL_EXPR_ARG (exp, 5);
9836 type = TREE_TYPE (arg);
9837 mode = TYPE_MODE (type);
9838 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9839
9840 /* fpclassify(x) ->
9841 isnan(x) ? FP_NAN :
9842 (fabs(x) == Inf ? FP_INFINITE :
9843 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9844 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9845
9846 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9847 build_real (type, dconst0));
9848 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9849
9850 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9851 real_from_string (&r, buf);
9852 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9853 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9854
9855 if (HONOR_INFINITIES (mode))
9856 {
9857 real_inf (&r);
9858 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9859 build_real (type, r));
9860 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9861 }
9862
9863 if (HONOR_NANS (mode))
9864 {
9865 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9866 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9867 }
9868
9869 return res;
9870 }
9871
9872 /* Fold a call to an unordered comparison function such as
9873 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9874 being called and ARG0 and ARG1 are the arguments for the call.
9875 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9876 the opposite of the desired result. UNORDERED_CODE is used
9877 for modes that can hold NaNs and ORDERED_CODE is used for
9878 the rest. */
9879
9880 static tree
9881 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9882 enum tree_code unordered_code,
9883 enum tree_code ordered_code)
9884 {
9885 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9886 enum tree_code code;
9887 tree type0, type1;
9888 enum tree_code code0, code1;
9889 tree cmp_type = NULL_TREE;
9890
9891 type0 = TREE_TYPE (arg0);
9892 type1 = TREE_TYPE (arg1);
9893
9894 code0 = TREE_CODE (type0);
9895 code1 = TREE_CODE (type1);
9896
9897 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9898 /* Choose the wider of two real types. */
9899 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9900 ? type0 : type1;
9901 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9902 cmp_type = type0;
9903 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9904 cmp_type = type1;
9905
9906 arg0 = fold_convert (cmp_type, arg0);
9907 arg1 = fold_convert (cmp_type, arg1);
9908
9909 if (unordered_code == UNORDERED_EXPR)
9910 {
9911 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9912 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9913 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9914 }
9915
9916 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9917 : ordered_code;
9918 return fold_build1 (TRUTH_NOT_EXPR, type,
9919 fold_build2 (code, type, arg0, arg1));
9920 }
9921
9922 /* Fold a call to built-in function FNDECL with 0 arguments.
9923 IGNORE is true if the result of the function call is ignored. This
9924 function returns NULL_TREE if no simplification was possible. */
9925
9926 static tree
9927 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9928 {
9929 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9930 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9931 switch (fcode)
9932 {
9933 CASE_FLT_FN (BUILT_IN_INF):
9934 case BUILT_IN_INFD32:
9935 case BUILT_IN_INFD64:
9936 case BUILT_IN_INFD128:
9937 return fold_builtin_inf (type, true);
9938
9939 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9940 return fold_builtin_inf (type, false);
9941
9942 case BUILT_IN_CLASSIFY_TYPE:
9943 return fold_builtin_classify_type (NULL_TREE);
9944
9945 default:
9946 break;
9947 }
9948 return NULL_TREE;
9949 }
9950
9951 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9952 IGNORE is true if the result of the function call is ignored. This
9953 function returns NULL_TREE if no simplification was possible. */
9954
9955 static tree
9956 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9957 {
9958 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9960 switch (fcode)
9961 {
9962
9963 case BUILT_IN_CONSTANT_P:
9964 {
9965 tree val = fold_builtin_constant_p (arg0);
9966
9967 /* Gimplification will pull the CALL_EXPR for the builtin out of
9968 an if condition. When not optimizing, we'll not CSE it back.
9969 To avoid link error types of regressions, return false now. */
9970 if (!val && !optimize)
9971 val = integer_zero_node;
9972
9973 return val;
9974 }
9975
9976 case BUILT_IN_CLASSIFY_TYPE:
9977 return fold_builtin_classify_type (arg0);
9978
9979 case BUILT_IN_STRLEN:
9980 return fold_builtin_strlen (arg0);
9981
9982 CASE_FLT_FN (BUILT_IN_FABS):
9983 return fold_builtin_fabs (arg0, type);
9984
9985 case BUILT_IN_ABS:
9986 case BUILT_IN_LABS:
9987 case BUILT_IN_LLABS:
9988 case BUILT_IN_IMAXABS:
9989 return fold_builtin_abs (arg0, type);
9990
9991 CASE_FLT_FN (BUILT_IN_CONJ):
9992 if (validate_arg (arg0, COMPLEX_TYPE))
9993 return fold_build1 (CONJ_EXPR, type, arg0);
9994 break;
9995
9996 CASE_FLT_FN (BUILT_IN_CREAL):
9997 if (validate_arg (arg0, COMPLEX_TYPE))
9998 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9999 break;
10000
10001 CASE_FLT_FN (BUILT_IN_CIMAG):
10002 if (validate_arg (arg0, COMPLEX_TYPE))
10003 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10004 break;
10005
10006 CASE_FLT_FN (BUILT_IN_CCOS):
10007 CASE_FLT_FN (BUILT_IN_CCOSH):
10008 /* These functions are "even", i.e. f(x) == f(-x). */
10009 if (validate_arg (arg0, COMPLEX_TYPE))
10010 {
10011 tree narg = fold_strip_sign_ops (arg0);
10012 if (narg)
10013 return build_call_expr (fndecl, 1, narg);
10014 }
10015 break;
10016
10017 CASE_FLT_FN (BUILT_IN_CABS):
10018 return fold_builtin_cabs (arg0, type, fndecl);
10019
10020 CASE_FLT_FN (BUILT_IN_CARG):
10021 return fold_builtin_carg (arg0, type);
10022
10023 CASE_FLT_FN (BUILT_IN_SQRT):
10024 return fold_builtin_sqrt (arg0, type);
10025
10026 CASE_FLT_FN (BUILT_IN_CBRT):
10027 return fold_builtin_cbrt (arg0, type);
10028
10029 CASE_FLT_FN (BUILT_IN_ASIN):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10032 &dconstm1, &dconst1, true);
10033 break;
10034
10035 CASE_FLT_FN (BUILT_IN_ACOS):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10038 &dconstm1, &dconst1, true);
10039 break;
10040
10041 CASE_FLT_FN (BUILT_IN_ATAN):
10042 if (validate_arg (arg0, REAL_TYPE))
10043 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10044 break;
10045
10046 CASE_FLT_FN (BUILT_IN_ASINH):
10047 if (validate_arg (arg0, REAL_TYPE))
10048 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10049 break;
10050
10051 CASE_FLT_FN (BUILT_IN_ACOSH):
10052 if (validate_arg (arg0, REAL_TYPE))
10053 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10054 &dconst1, NULL, true);
10055 break;
10056
10057 CASE_FLT_FN (BUILT_IN_ATANH):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10060 &dconstm1, &dconst1, false);
10061 break;
10062
10063 CASE_FLT_FN (BUILT_IN_SIN):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10066 break;
10067
10068 CASE_FLT_FN (BUILT_IN_COS):
10069 return fold_builtin_cos (arg0, type, fndecl);
10070 break;
10071
10072 CASE_FLT_FN (BUILT_IN_TAN):
10073 return fold_builtin_tan (arg0, type);
10074
10075 CASE_FLT_FN (BUILT_IN_CEXP):
10076 return fold_builtin_cexp (arg0, type);
10077
10078 CASE_FLT_FN (BUILT_IN_CEXPI):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10081 break;
10082
10083 CASE_FLT_FN (BUILT_IN_SINH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10086 break;
10087
10088 CASE_FLT_FN (BUILT_IN_COSH):
10089 return fold_builtin_cosh (arg0, type, fndecl);
10090
10091 CASE_FLT_FN (BUILT_IN_TANH):
10092 if (validate_arg (arg0, REAL_TYPE))
10093 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10094 break;
10095
10096 CASE_FLT_FN (BUILT_IN_ERF):
10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10099 break;
10100
10101 CASE_FLT_FN (BUILT_IN_ERFC):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10104 break;
10105
10106 CASE_FLT_FN (BUILT_IN_TGAMMA):
10107 if (validate_arg (arg0, REAL_TYPE))
10108 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_EXP):
10112 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10113
10114 CASE_FLT_FN (BUILT_IN_EXP2):
10115 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10116
10117 CASE_FLT_FN (BUILT_IN_EXP10):
10118 CASE_FLT_FN (BUILT_IN_POW10):
10119 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10120
10121 CASE_FLT_FN (BUILT_IN_EXPM1):
10122 if (validate_arg (arg0, REAL_TYPE))
10123 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10124 break;
10125
10126 CASE_FLT_FN (BUILT_IN_LOG):
10127 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10128
10129 CASE_FLT_FN (BUILT_IN_LOG2):
10130 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10131
10132 CASE_FLT_FN (BUILT_IN_LOG10):
10133 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10134
10135 CASE_FLT_FN (BUILT_IN_LOG1P):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10138 &dconstm1, NULL, false);
10139 break;
10140
10141 CASE_FLT_FN (BUILT_IN_J0):
10142 if (validate_arg (arg0, REAL_TYPE))
10143 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10144 NULL, NULL, 0);
10145 break;
10146
10147 CASE_FLT_FN (BUILT_IN_J1):
10148 if (validate_arg (arg0, REAL_TYPE))
10149 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10150 NULL, NULL, 0);
10151 break;
10152
10153 CASE_FLT_FN (BUILT_IN_Y0):
10154 if (validate_arg (arg0, REAL_TYPE))
10155 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10156 &dconst0, NULL, false);
10157 break;
10158
10159 CASE_FLT_FN (BUILT_IN_Y1):
10160 if (validate_arg (arg0, REAL_TYPE))
10161 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10162 &dconst0, NULL, false);
10163 break;
10164
10165 CASE_FLT_FN (BUILT_IN_NAN):
10166 case BUILT_IN_NAND32:
10167 case BUILT_IN_NAND64:
10168 case BUILT_IN_NAND128:
10169 return fold_builtin_nan (arg0, type, true);
10170
10171 CASE_FLT_FN (BUILT_IN_NANS):
10172 return fold_builtin_nan (arg0, type, false);
10173
10174 CASE_FLT_FN (BUILT_IN_FLOOR):
10175 return fold_builtin_floor (fndecl, arg0);
10176
10177 CASE_FLT_FN (BUILT_IN_CEIL):
10178 return fold_builtin_ceil (fndecl, arg0);
10179
10180 CASE_FLT_FN (BUILT_IN_TRUNC):
10181 return fold_builtin_trunc (fndecl, arg0);
10182
10183 CASE_FLT_FN (BUILT_IN_ROUND):
10184 return fold_builtin_round (fndecl, arg0);
10185
10186 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10187 CASE_FLT_FN (BUILT_IN_RINT):
10188 return fold_trunc_transparent_mathfn (fndecl, arg0);
10189
10190 CASE_FLT_FN (BUILT_IN_LCEIL):
10191 CASE_FLT_FN (BUILT_IN_LLCEIL):
10192 CASE_FLT_FN (BUILT_IN_LFLOOR):
10193 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10194 CASE_FLT_FN (BUILT_IN_LROUND):
10195 CASE_FLT_FN (BUILT_IN_LLROUND):
10196 return fold_builtin_int_roundingfn (fndecl, arg0);
10197
10198 CASE_FLT_FN (BUILT_IN_LRINT):
10199 CASE_FLT_FN (BUILT_IN_LLRINT):
10200 return fold_fixed_mathfn (fndecl, arg0);
10201
10202 case BUILT_IN_BSWAP32:
10203 case BUILT_IN_BSWAP64:
10204 return fold_builtin_bswap (fndecl, arg0);
10205
10206 CASE_INT_FN (BUILT_IN_FFS):
10207 CASE_INT_FN (BUILT_IN_CLZ):
10208 CASE_INT_FN (BUILT_IN_CTZ):
10209 CASE_INT_FN (BUILT_IN_POPCOUNT):
10210 CASE_INT_FN (BUILT_IN_PARITY):
10211 return fold_builtin_bitop (fndecl, arg0);
10212
10213 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10214 return fold_builtin_signbit (arg0, type);
10215
10216 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10217 return fold_builtin_significand (arg0, type);
10218
10219 CASE_FLT_FN (BUILT_IN_ILOGB):
10220 CASE_FLT_FN (BUILT_IN_LOGB):
10221 return fold_builtin_logb (arg0, type);
10222
10223 case BUILT_IN_ISASCII:
10224 return fold_builtin_isascii (arg0);
10225
10226 case BUILT_IN_TOASCII:
10227 return fold_builtin_toascii (arg0);
10228
10229 case BUILT_IN_ISDIGIT:
10230 return fold_builtin_isdigit (arg0);
10231
10232 CASE_FLT_FN (BUILT_IN_FINITE):
10233 case BUILT_IN_FINITED32:
10234 case BUILT_IN_FINITED64:
10235 case BUILT_IN_FINITED128:
10236 case BUILT_IN_ISFINITE:
10237 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10238
10239 CASE_FLT_FN (BUILT_IN_ISINF):
10240 case BUILT_IN_ISINFD32:
10241 case BUILT_IN_ISINFD64:
10242 case BUILT_IN_ISINFD128:
10243 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10244
10245 case BUILT_IN_ISINF_SIGN:
10246 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10247
10248 CASE_FLT_FN (BUILT_IN_ISNAN):
10249 case BUILT_IN_ISNAND32:
10250 case BUILT_IN_ISNAND64:
10251 case BUILT_IN_ISNAND128:
10252 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10253
10254 case BUILT_IN_PRINTF:
10255 case BUILT_IN_PRINTF_UNLOCKED:
10256 case BUILT_IN_VPRINTF:
10257 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10258
10259 default:
10260 break;
10261 }
10262
10263 return NULL_TREE;
10264
10265 }
10266
10267 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10268 IGNORE is true if the result of the function call is ignored. This
10269 function returns NULL_TREE if no simplification was possible. */
10270
10271 static tree
10272 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10273 {
10274 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10275 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10276
10277 switch (fcode)
10278 {
10279 CASE_FLT_FN (BUILT_IN_JN):
10280 if (validate_arg (arg0, INTEGER_TYPE)
10281 && validate_arg (arg1, REAL_TYPE))
10282 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10283 break;
10284
10285 CASE_FLT_FN (BUILT_IN_YN):
10286 if (validate_arg (arg0, INTEGER_TYPE)
10287 && validate_arg (arg1, REAL_TYPE))
10288 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10289 &dconst0, false);
10290 break;
10291
10292 CASE_FLT_FN (BUILT_IN_DREM):
10293 CASE_FLT_FN (BUILT_IN_REMAINDER):
10294 if (validate_arg (arg0, REAL_TYPE)
10295 && validate_arg(arg1, REAL_TYPE))
10296 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10297 break;
10298
10299 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10300 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10301 if (validate_arg (arg0, REAL_TYPE)
10302 && validate_arg(arg1, POINTER_TYPE))
10303 return do_mpfr_lgamma_r (arg0, arg1, type);
10304 break;
10305
10306 CASE_FLT_FN (BUILT_IN_ATAN2):
10307 if (validate_arg (arg0, REAL_TYPE)
10308 && validate_arg(arg1, REAL_TYPE))
10309 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10310 break;
10311
10312 CASE_FLT_FN (BUILT_IN_FDIM):
10313 if (validate_arg (arg0, REAL_TYPE)
10314 && validate_arg(arg1, REAL_TYPE))
10315 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10316 break;
10317
10318 CASE_FLT_FN (BUILT_IN_HYPOT):
10319 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10320
10321 CASE_FLT_FN (BUILT_IN_LDEXP):
10322 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10323 CASE_FLT_FN (BUILT_IN_SCALBN):
10324 CASE_FLT_FN (BUILT_IN_SCALBLN):
10325 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10326
10327 CASE_FLT_FN (BUILT_IN_FREXP):
10328 return fold_builtin_frexp (arg0, arg1, type);
10329
10330 CASE_FLT_FN (BUILT_IN_MODF):
10331 return fold_builtin_modf (arg0, arg1, type);
10332
10333 case BUILT_IN_BZERO:
10334 return fold_builtin_bzero (arg0, arg1, ignore);
10335
10336 case BUILT_IN_FPUTS:
10337 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10338
10339 case BUILT_IN_FPUTS_UNLOCKED:
10340 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10341
10342 case BUILT_IN_STRSTR:
10343 return fold_builtin_strstr (arg0, arg1, type);
10344
10345 case BUILT_IN_STRCAT:
10346 return fold_builtin_strcat (arg0, arg1);
10347
10348 case BUILT_IN_STRSPN:
10349 return fold_builtin_strspn (arg0, arg1);
10350
10351 case BUILT_IN_STRCSPN:
10352 return fold_builtin_strcspn (arg0, arg1);
10353
10354 case BUILT_IN_STRCHR:
10355 case BUILT_IN_INDEX:
10356 return fold_builtin_strchr (arg0, arg1, type);
10357
10358 case BUILT_IN_STRRCHR:
10359 case BUILT_IN_RINDEX:
10360 return fold_builtin_strrchr (arg0, arg1, type);
10361
10362 case BUILT_IN_STRCPY:
10363 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10364
10365 case BUILT_IN_STRCMP:
10366 return fold_builtin_strcmp (arg0, arg1);
10367
10368 case BUILT_IN_STRPBRK:
10369 return fold_builtin_strpbrk (arg0, arg1, type);
10370
10371 case BUILT_IN_EXPECT:
10372 return fold_builtin_expect (arg0, arg1);
10373
10374 CASE_FLT_FN (BUILT_IN_POW):
10375 return fold_builtin_pow (fndecl, arg0, arg1, type);
10376
10377 CASE_FLT_FN (BUILT_IN_POWI):
10378 return fold_builtin_powi (fndecl, arg0, arg1, type);
10379
10380 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10381 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10382
10383 CASE_FLT_FN (BUILT_IN_FMIN):
10384 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10385
10386 CASE_FLT_FN (BUILT_IN_FMAX):
10387 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10388
10389 case BUILT_IN_ISGREATER:
10390 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10391 case BUILT_IN_ISGREATEREQUAL:
10392 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10393 case BUILT_IN_ISLESS:
10394 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10395 case BUILT_IN_ISLESSEQUAL:
10396 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10397 case BUILT_IN_ISLESSGREATER:
10398 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10399 case BUILT_IN_ISUNORDERED:
10400 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10401 NOP_EXPR);
10402
10403 /* We do the folding for va_start in the expander. */
10404 case BUILT_IN_VA_START:
10405 break;
10406
10407 case BUILT_IN_SPRINTF:
10408 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10409
10410 case BUILT_IN_OBJECT_SIZE:
10411 return fold_builtin_object_size (arg0, arg1);
10412
10413 case BUILT_IN_PRINTF:
10414 case BUILT_IN_PRINTF_UNLOCKED:
10415 case BUILT_IN_VPRINTF:
10416 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10417
10418 case BUILT_IN_PRINTF_CHK:
10419 case BUILT_IN_VPRINTF_CHK:
10420 if (!validate_arg (arg0, INTEGER_TYPE)
10421 || TREE_SIDE_EFFECTS (arg0))
10422 return NULL_TREE;
10423 else
10424 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10425 break;
10426
10427 case BUILT_IN_FPRINTF:
10428 case BUILT_IN_FPRINTF_UNLOCKED:
10429 case BUILT_IN_VFPRINTF:
10430 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10431 ignore, fcode);
10432
10433 default:
10434 break;
10435 }
10436 return NULL_TREE;
10437 }
10438
10439 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10440 and ARG2. IGNORE is true if the result of the function call is ignored.
10441 This function returns NULL_TREE if no simplification was possible. */
10442
10443 static tree
10444 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10445 {
10446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10447 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10448 switch (fcode)
10449 {
10450
10451 CASE_FLT_FN (BUILT_IN_SINCOS):
10452 return fold_builtin_sincos (arg0, arg1, arg2);
10453
10454 CASE_FLT_FN (BUILT_IN_FMA):
10455 if (validate_arg (arg0, REAL_TYPE)
10456 && validate_arg(arg1, REAL_TYPE)
10457 && validate_arg(arg2, REAL_TYPE))
10458 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10459 break;
10460
10461 CASE_FLT_FN (BUILT_IN_REMQUO):
10462 if (validate_arg (arg0, REAL_TYPE)
10463 && validate_arg(arg1, REAL_TYPE)
10464 && validate_arg(arg2, POINTER_TYPE))
10465 return do_mpfr_remquo (arg0, arg1, arg2);
10466 break;
10467
10468 case BUILT_IN_MEMSET:
10469 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10470
10471 case BUILT_IN_BCOPY:
10472 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10473
10474 case BUILT_IN_MEMCPY:
10475 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10476
10477 case BUILT_IN_MEMPCPY:
10478 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10479
10480 case BUILT_IN_MEMMOVE:
10481 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10482
10483 case BUILT_IN_STRNCAT:
10484 return fold_builtin_strncat (arg0, arg1, arg2);
10485
10486 case BUILT_IN_STRNCPY:
10487 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10488
10489 case BUILT_IN_STRNCMP:
10490 return fold_builtin_strncmp (arg0, arg1, arg2);
10491
10492 case BUILT_IN_MEMCHR:
10493 return fold_builtin_memchr (arg0, arg1, arg2, type);
10494
10495 case BUILT_IN_BCMP:
10496 case BUILT_IN_MEMCMP:
10497 return fold_builtin_memcmp (arg0, arg1, arg2);;
10498
10499 case BUILT_IN_SPRINTF:
10500 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10501
10502 case BUILT_IN_STRCPY_CHK:
10503 case BUILT_IN_STPCPY_CHK:
10504 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10505 ignore, fcode);
10506
10507 case BUILT_IN_STRCAT_CHK:
10508 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10509
10510 case BUILT_IN_PRINTF_CHK:
10511 case BUILT_IN_VPRINTF_CHK:
10512 if (!validate_arg (arg0, INTEGER_TYPE)
10513 || TREE_SIDE_EFFECTS (arg0))
10514 return NULL_TREE;
10515 else
10516 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10517 break;
10518
10519 case BUILT_IN_FPRINTF:
10520 case BUILT_IN_FPRINTF_UNLOCKED:
10521 case BUILT_IN_VFPRINTF:
10522 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10523
10524 case BUILT_IN_FPRINTF_CHK:
10525 case BUILT_IN_VFPRINTF_CHK:
10526 if (!validate_arg (arg1, INTEGER_TYPE)
10527 || TREE_SIDE_EFFECTS (arg1))
10528 return NULL_TREE;
10529 else
10530 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10531 ignore, fcode);
10532
10533 default:
10534 break;
10535 }
10536 return NULL_TREE;
10537 }
10538
10539 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10540 ARG2, and ARG3. IGNORE is true if the result of the function call is
10541 ignored. This function returns NULL_TREE if no simplification was
10542 possible. */
10543
10544 static tree
10545 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10546 bool ignore)
10547 {
10548 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10549
10550 switch (fcode)
10551 {
10552 case BUILT_IN_MEMCPY_CHK:
10553 case BUILT_IN_MEMPCPY_CHK:
10554 case BUILT_IN_MEMMOVE_CHK:
10555 case BUILT_IN_MEMSET_CHK:
10556 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10557 NULL_TREE, ignore,
10558 DECL_FUNCTION_CODE (fndecl));
10559
10560 case BUILT_IN_STRNCPY_CHK:
10561 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10562
10563 case BUILT_IN_STRNCAT_CHK:
10564 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10565
10566 case BUILT_IN_FPRINTF_CHK:
10567 case BUILT_IN_VFPRINTF_CHK:
10568 if (!validate_arg (arg1, INTEGER_TYPE)
10569 || TREE_SIDE_EFFECTS (arg1))
10570 return NULL_TREE;
10571 else
10572 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10573 ignore, fcode);
10574 break;
10575
10576 default:
10577 break;
10578 }
10579 return NULL_TREE;
10580 }
10581
10582 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10583 arguments, where NARGS <= 4. IGNORE is true if the result of the
10584 function call is ignored. This function returns NULL_TREE if no
10585 simplification was possible. Note that this only folds builtins with
10586 fixed argument patterns. Foldings that do varargs-to-varargs
10587 transformations, or that match calls with more than 4 arguments,
10588 need to be handled with fold_builtin_varargs instead. */
10589
10590 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10591
10592 static tree
10593 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10594 {
10595 tree ret = NULL_TREE;
10596
10597 switch (nargs)
10598 {
10599 case 0:
10600 ret = fold_builtin_0 (fndecl, ignore);
10601 break;
10602 case 1:
10603 ret = fold_builtin_1 (fndecl, args[0], ignore);
10604 break;
10605 case 2:
10606 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10607 break;
10608 case 3:
10609 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10610 break;
10611 case 4:
10612 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10613 ignore);
10614 break;
10615 default:
10616 break;
10617 }
10618 if (ret)
10619 {
10620 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10621 TREE_NO_WARNING (ret) = 1;
10622 return ret;
10623 }
10624 return NULL_TREE;
10625 }
10626
10627 /* Builtins with folding operations that operate on "..." arguments
10628 need special handling; we need to store the arguments in a convenient
10629 data structure before attempting any folding. Fortunately there are
10630 only a few builtins that fall into this category. FNDECL is the
10631 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10632 result of the function call is ignored. */
10633
10634 static tree
10635 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10636 {
10637 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10638 tree ret = NULL_TREE;
10639
10640 switch (fcode)
10641 {
10642 case BUILT_IN_SPRINTF_CHK:
10643 case BUILT_IN_VSPRINTF_CHK:
10644 ret = fold_builtin_sprintf_chk (exp, fcode);
10645 break;
10646
10647 case BUILT_IN_SNPRINTF_CHK:
10648 case BUILT_IN_VSNPRINTF_CHK:
10649 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10650 break;
10651
10652 case BUILT_IN_FPCLASSIFY:
10653 ret = fold_builtin_fpclassify (exp);
10654 break;
10655
10656 default:
10657 break;
10658 }
10659 if (ret)
10660 {
10661 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10662 TREE_NO_WARNING (ret) = 1;
10663 return ret;
10664 }
10665 return NULL_TREE;
10666 }
10667
10668 /* A wrapper function for builtin folding that prevents warnings for
10669 "statement without effect" and the like, caused by removing the
10670 call node earlier than the warning is generated. */
10671
10672 tree
10673 fold_call_expr (tree exp, bool ignore)
10674 {
10675 tree ret = NULL_TREE;
10676 tree fndecl = get_callee_fndecl (exp);
10677 if (fndecl
10678 && TREE_CODE (fndecl) == FUNCTION_DECL
10679 && DECL_BUILT_IN (fndecl)
10680 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10681 yet. Defer folding until we see all the arguments
10682 (after inlining). */
10683 && !CALL_EXPR_VA_ARG_PACK (exp))
10684 {
10685 int nargs = call_expr_nargs (exp);
10686
10687 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10688 instead last argument is __builtin_va_arg_pack (). Defer folding
10689 even in that case, until arguments are finalized. */
10690 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10691 {
10692 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10693 if (fndecl2
10694 && TREE_CODE (fndecl2) == FUNCTION_DECL
10695 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10696 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10697 return NULL_TREE;
10698 }
10699
10700 /* FIXME: Don't use a list in this interface. */
10701 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10702 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10703 else
10704 {
10705 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10706 {
10707 tree *args = CALL_EXPR_ARGP (exp);
10708 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10709 }
10710 if (!ret)
10711 ret = fold_builtin_varargs (fndecl, exp, ignore);
10712 if (ret)
10713 {
10714 /* Propagate location information from original call to
10715 expansion of builtin. Otherwise things like
10716 maybe_emit_chk_warning, that operate on the expansion
10717 of a builtin, will use the wrong location information. */
10718 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10719 {
10720 tree realret = ret;
10721 if (TREE_CODE (ret) == NOP_EXPR)
10722 realret = TREE_OPERAND (ret, 0);
10723 if (CAN_HAVE_LOCATION_P (realret)
10724 && !EXPR_HAS_LOCATION (realret))
10725 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10726 return realret;
10727 }
10728 return ret;
10729 }
10730 }
10731 }
10732 return NULL_TREE;
10733 }
10734
10735 /* Conveniently construct a function call expression. FNDECL names the
10736 function to be called and ARGLIST is a TREE_LIST of arguments. */
10737
10738 tree
10739 build_function_call_expr (tree fndecl, tree arglist)
10740 {
10741 tree fntype = TREE_TYPE (fndecl);
10742 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10743 int n = list_length (arglist);
10744 tree *argarray = (tree *) alloca (n * sizeof (tree));
10745 int i;
10746
10747 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10748 argarray[i] = TREE_VALUE (arglist);
10749 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10750 }
10751
10752 /* Conveniently construct a function call expression. FNDECL names the
10753 function to be called, N is the number of arguments, and the "..."
10754 parameters are the argument expressions. */
10755
10756 tree
10757 build_call_expr (tree fndecl, int n, ...)
10758 {
10759 va_list ap;
10760 tree fntype = TREE_TYPE (fndecl);
10761 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10762 tree *argarray = (tree *) alloca (n * sizeof (tree));
10763 int i;
10764
10765 va_start (ap, n);
10766 for (i = 0; i < n; i++)
10767 argarray[i] = va_arg (ap, tree);
10768 va_end (ap);
10769 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10770 }
10771
10772 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10773 N arguments are passed in the array ARGARRAY. */
10774
10775 tree
10776 fold_builtin_call_array (tree type,
10777 tree fn,
10778 int n,
10779 tree *argarray)
10780 {
10781 tree ret = NULL_TREE;
10782 int i;
10783 tree exp;
10784
10785 if (TREE_CODE (fn) == ADDR_EXPR)
10786 {
10787 tree fndecl = TREE_OPERAND (fn, 0);
10788 if (TREE_CODE (fndecl) == FUNCTION_DECL
10789 && DECL_BUILT_IN (fndecl))
10790 {
10791 /* If last argument is __builtin_va_arg_pack (), arguments to this
10792 function are not finalized yet. Defer folding until they are. */
10793 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10794 {
10795 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10796 if (fndecl2
10797 && TREE_CODE (fndecl2) == FUNCTION_DECL
10798 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10799 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10800 return build_call_array (type, fn, n, argarray);
10801 }
10802 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10803 {
10804 tree arglist = NULL_TREE;
10805 for (i = n - 1; i >= 0; i--)
10806 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10807 ret = targetm.fold_builtin (fndecl, arglist, false);
10808 if (ret)
10809 return ret;
10810 }
10811 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10812 {
10813 /* First try the transformations that don't require consing up
10814 an exp. */
10815 ret = fold_builtin_n (fndecl, argarray, n, false);
10816 if (ret)
10817 return ret;
10818 }
10819
10820 /* If we got this far, we need to build an exp. */
10821 exp = build_call_array (type, fn, n, argarray);
10822 ret = fold_builtin_varargs (fndecl, exp, false);
10823 return ret ? ret : exp;
10824 }
10825 }
10826
10827 return build_call_array (type, fn, n, argarray);
10828 }
10829
10830 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10831 along with N new arguments specified as the "..." parameters. SKIP
10832 is the number of arguments in EXP to be omitted. This function is used
10833 to do varargs-to-varargs transformations. */
10834
10835 static tree
10836 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10837 {
10838 int oldnargs = call_expr_nargs (exp);
10839 int nargs = oldnargs - skip + n;
10840 tree fntype = TREE_TYPE (fndecl);
10841 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10842 tree *buffer;
10843
10844 if (n > 0)
10845 {
10846 int i, j;
10847 va_list ap;
10848
10849 buffer = XALLOCAVEC (tree, nargs);
10850 va_start (ap, n);
10851 for (i = 0; i < n; i++)
10852 buffer[i] = va_arg (ap, tree);
10853 va_end (ap);
10854 for (j = skip; j < oldnargs; j++, i++)
10855 buffer[i] = CALL_EXPR_ARG (exp, j);
10856 }
10857 else
10858 buffer = CALL_EXPR_ARGP (exp) + skip;
10859
10860 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10861 }
10862
10863 /* Validate a single argument ARG against a tree code CODE representing
10864 a type. */
10865
10866 static bool
10867 validate_arg (const_tree arg, enum tree_code code)
10868 {
10869 if (!arg)
10870 return false;
10871 else if (code == POINTER_TYPE)
10872 return POINTER_TYPE_P (TREE_TYPE (arg));
10873 else if (code == INTEGER_TYPE)
10874 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10875 return code == TREE_CODE (TREE_TYPE (arg));
10876 }
10877
10878 /* This function validates the types of a function call argument list
10879 against a specified list of tree_codes. If the last specifier is a 0,
10880 that represents an ellipses, otherwise the last specifier must be a
10881 VOID_TYPE.
10882
10883 This is the GIMPLE version of validate_arglist. Eventually we want to
10884 completely convert builtins.c to work from GIMPLEs and the tree based
10885 validate_arglist will then be removed. */
10886
10887 bool
10888 validate_gimple_arglist (const_gimple call, ...)
10889 {
10890 enum tree_code code;
10891 bool res = 0;
10892 va_list ap;
10893 const_tree arg;
10894 size_t i;
10895
10896 va_start (ap, call);
10897 i = 0;
10898
10899 do
10900 {
10901 code = va_arg (ap, enum tree_code);
10902 switch (code)
10903 {
10904 case 0:
10905 /* This signifies an ellipses, any further arguments are all ok. */
10906 res = true;
10907 goto end;
10908 case VOID_TYPE:
10909 /* This signifies an endlink, if no arguments remain, return
10910 true, otherwise return false. */
10911 res = (i == gimple_call_num_args (call));
10912 goto end;
10913 default:
10914 /* If no parameters remain or the parameter's code does not
10915 match the specified code, return false. Otherwise continue
10916 checking any remaining arguments. */
10917 arg = gimple_call_arg (call, i++);
10918 if (!validate_arg (arg, code))
10919 goto end;
10920 break;
10921 }
10922 }
10923 while (1);
10924
10925 /* We need gotos here since we can only have one VA_CLOSE in a
10926 function. */
10927 end: ;
10928 va_end (ap);
10929
10930 return res;
10931 }
10932
10933 /* This function validates the types of a function call argument list
10934 against a specified list of tree_codes. If the last specifier is a 0,
10935 that represents an ellipses, otherwise the last specifier must be a
10936 VOID_TYPE. */
10937
10938 bool
10939 validate_arglist (const_tree callexpr, ...)
10940 {
10941 enum tree_code code;
10942 bool res = 0;
10943 va_list ap;
10944 const_call_expr_arg_iterator iter;
10945 const_tree arg;
10946
10947 va_start (ap, callexpr);
10948 init_const_call_expr_arg_iterator (callexpr, &iter);
10949
10950 do
10951 {
10952 code = va_arg (ap, enum tree_code);
10953 switch (code)
10954 {
10955 case 0:
10956 /* This signifies an ellipses, any further arguments are all ok. */
10957 res = true;
10958 goto end;
10959 case VOID_TYPE:
10960 /* This signifies an endlink, if no arguments remain, return
10961 true, otherwise return false. */
10962 res = !more_const_call_expr_args_p (&iter);
10963 goto end;
10964 default:
10965 /* If no parameters remain or the parameter's code does not
10966 match the specified code, return false. Otherwise continue
10967 checking any remaining arguments. */
10968 arg = next_const_call_expr_arg (&iter);
10969 if (!validate_arg (arg, code))
10970 goto end;
10971 break;
10972 }
10973 }
10974 while (1);
10975
10976 /* We need gotos here since we can only have one VA_CLOSE in a
10977 function. */
10978 end: ;
10979 va_end (ap);
10980
10981 return res;
10982 }
10983
10984 /* Default target-specific builtin expander that does nothing. */
10985
10986 rtx
10987 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10988 rtx target ATTRIBUTE_UNUSED,
10989 rtx subtarget ATTRIBUTE_UNUSED,
10990 enum machine_mode mode ATTRIBUTE_UNUSED,
10991 int ignore ATTRIBUTE_UNUSED)
10992 {
10993 return NULL_RTX;
10994 }
10995
10996 /* Returns true is EXP represents data that would potentially reside
10997 in a readonly section. */
10998
10999 static bool
11000 readonly_data_expr (tree exp)
11001 {
11002 STRIP_NOPS (exp);
11003
11004 if (TREE_CODE (exp) != ADDR_EXPR)
11005 return false;
11006
11007 exp = get_base_address (TREE_OPERAND (exp, 0));
11008 if (!exp)
11009 return false;
11010
11011 /* Make sure we call decl_readonly_section only for trees it
11012 can handle (since it returns true for everything it doesn't
11013 understand). */
11014 if (TREE_CODE (exp) == STRING_CST
11015 || TREE_CODE (exp) == CONSTRUCTOR
11016 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11017 return decl_readonly_section (exp, 0);
11018 else
11019 return false;
11020 }
11021
11022 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11023 to the call, and TYPE is its return type.
11024
11025 Return NULL_TREE if no simplification was possible, otherwise return the
11026 simplified form of the call as a tree.
11027
11028 The simplified form may be a constant or other expression which
11029 computes the same value, but in a more efficient manner (including
11030 calls to other builtin functions).
11031
11032 The call may contain arguments which need to be evaluated, but
11033 which are not useful to determine the result of the call. In
11034 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11035 COMPOUND_EXPR will be an argument which must be evaluated.
11036 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11037 COMPOUND_EXPR in the chain will contain the tree for the simplified
11038 form of the builtin function call. */
11039
11040 static tree
11041 fold_builtin_strstr (tree s1, tree s2, tree type)
11042 {
11043 if (!validate_arg (s1, POINTER_TYPE)
11044 || !validate_arg (s2, POINTER_TYPE))
11045 return NULL_TREE;
11046 else
11047 {
11048 tree fn;
11049 const char *p1, *p2;
11050
11051 p2 = c_getstr (s2);
11052 if (p2 == NULL)
11053 return NULL_TREE;
11054
11055 p1 = c_getstr (s1);
11056 if (p1 != NULL)
11057 {
11058 const char *r = strstr (p1, p2);
11059 tree tem;
11060
11061 if (r == NULL)
11062 return build_int_cst (TREE_TYPE (s1), 0);
11063
11064 /* Return an offset into the constant string argument. */
11065 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11066 s1, size_int (r - p1));
11067 return fold_convert (type, tem);
11068 }
11069
11070 /* The argument is const char *, and the result is char *, so we need
11071 a type conversion here to avoid a warning. */
11072 if (p2[0] == '\0')
11073 return fold_convert (type, s1);
11074
11075 if (p2[1] != '\0')
11076 return NULL_TREE;
11077
11078 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11079 if (!fn)
11080 return NULL_TREE;
11081
11082 /* New argument list transforming strstr(s1, s2) to
11083 strchr(s1, s2[0]). */
11084 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11085 }
11086 }
11087
11088 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11089 the call, and TYPE is its return type.
11090
11091 Return NULL_TREE if no simplification was possible, otherwise return the
11092 simplified form of the call as a tree.
11093
11094 The simplified form may be a constant or other expression which
11095 computes the same value, but in a more efficient manner (including
11096 calls to other builtin functions).
11097
11098 The call may contain arguments which need to be evaluated, but
11099 which are not useful to determine the result of the call. In
11100 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11101 COMPOUND_EXPR will be an argument which must be evaluated.
11102 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11103 COMPOUND_EXPR in the chain will contain the tree for the simplified
11104 form of the builtin function call. */
11105
11106 static tree
11107 fold_builtin_strchr (tree s1, tree s2, tree type)
11108 {
11109 if (!validate_arg (s1, POINTER_TYPE)
11110 || !validate_arg (s2, INTEGER_TYPE))
11111 return NULL_TREE;
11112 else
11113 {
11114 const char *p1;
11115
11116 if (TREE_CODE (s2) != INTEGER_CST)
11117 return NULL_TREE;
11118
11119 p1 = c_getstr (s1);
11120 if (p1 != NULL)
11121 {
11122 char c;
11123 const char *r;
11124 tree tem;
11125
11126 if (target_char_cast (s2, &c))
11127 return NULL_TREE;
11128
11129 r = strchr (p1, c);
11130
11131 if (r == NULL)
11132 return build_int_cst (TREE_TYPE (s1), 0);
11133
11134 /* Return an offset into the constant string argument. */
11135 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11136 s1, size_int (r - p1));
11137 return fold_convert (type, tem);
11138 }
11139 return NULL_TREE;
11140 }
11141 }
11142
11143 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11144 the call, and TYPE is its return type.
11145
11146 Return NULL_TREE if no simplification was possible, otherwise return the
11147 simplified form of the call as a tree.
11148
11149 The simplified form may be a constant or other expression which
11150 computes the same value, but in a more efficient manner (including
11151 calls to other builtin functions).
11152
11153 The call may contain arguments which need to be evaluated, but
11154 which are not useful to determine the result of the call. In
11155 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11156 COMPOUND_EXPR will be an argument which must be evaluated.
11157 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11158 COMPOUND_EXPR in the chain will contain the tree for the simplified
11159 form of the builtin function call. */
11160
11161 static tree
11162 fold_builtin_strrchr (tree s1, tree s2, tree type)
11163 {
11164 if (!validate_arg (s1, POINTER_TYPE)
11165 || !validate_arg (s2, INTEGER_TYPE))
11166 return NULL_TREE;
11167 else
11168 {
11169 tree fn;
11170 const char *p1;
11171
11172 if (TREE_CODE (s2) != INTEGER_CST)
11173 return NULL_TREE;
11174
11175 p1 = c_getstr (s1);
11176 if (p1 != NULL)
11177 {
11178 char c;
11179 const char *r;
11180 tree tem;
11181
11182 if (target_char_cast (s2, &c))
11183 return NULL_TREE;
11184
11185 r = strrchr (p1, c);
11186
11187 if (r == NULL)
11188 return build_int_cst (TREE_TYPE (s1), 0);
11189
11190 /* Return an offset into the constant string argument. */
11191 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11192 s1, size_int (r - p1));
11193 return fold_convert (type, tem);
11194 }
11195
11196 if (! integer_zerop (s2))
11197 return NULL_TREE;
11198
11199 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11200 if (!fn)
11201 return NULL_TREE;
11202
11203 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11204 return build_call_expr (fn, 2, s1, s2);
11205 }
11206 }
11207
11208 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11209 to the call, and TYPE is its return type.
11210
11211 Return NULL_TREE if no simplification was possible, otherwise return the
11212 simplified form of the call as a tree.
11213
11214 The simplified form may be a constant or other expression which
11215 computes the same value, but in a more efficient manner (including
11216 calls to other builtin functions).
11217
11218 The call may contain arguments which need to be evaluated, but
11219 which are not useful to determine the result of the call. In
11220 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11221 COMPOUND_EXPR will be an argument which must be evaluated.
11222 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11223 COMPOUND_EXPR in the chain will contain the tree for the simplified
11224 form of the builtin function call. */
11225
11226 static tree
11227 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11228 {
11229 if (!validate_arg (s1, POINTER_TYPE)
11230 || !validate_arg (s2, POINTER_TYPE))
11231 return NULL_TREE;
11232 else
11233 {
11234 tree fn;
11235 const char *p1, *p2;
11236
11237 p2 = c_getstr (s2);
11238 if (p2 == NULL)
11239 return NULL_TREE;
11240
11241 p1 = c_getstr (s1);
11242 if (p1 != NULL)
11243 {
11244 const char *r = strpbrk (p1, p2);
11245 tree tem;
11246
11247 if (r == NULL)
11248 return build_int_cst (TREE_TYPE (s1), 0);
11249
11250 /* Return an offset into the constant string argument. */
11251 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11252 s1, size_int (r - p1));
11253 return fold_convert (type, tem);
11254 }
11255
11256 if (p2[0] == '\0')
11257 /* strpbrk(x, "") == NULL.
11258 Evaluate and ignore s1 in case it had side-effects. */
11259 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11260
11261 if (p2[1] != '\0')
11262 return NULL_TREE; /* Really call strpbrk. */
11263
11264 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11265 if (!fn)
11266 return NULL_TREE;
11267
11268 /* New argument list transforming strpbrk(s1, s2) to
11269 strchr(s1, s2[0]). */
11270 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11271 }
11272 }
11273
11274 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11275 to the call.
11276
11277 Return NULL_TREE if no simplification was possible, otherwise return the
11278 simplified form of the call as a tree.
11279
11280 The simplified form may be a constant or other expression which
11281 computes the same value, but in a more efficient manner (including
11282 calls to other builtin functions).
11283
11284 The call may contain arguments which need to be evaluated, but
11285 which are not useful to determine the result of the call. In
11286 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11287 COMPOUND_EXPR will be an argument which must be evaluated.
11288 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11289 COMPOUND_EXPR in the chain will contain the tree for the simplified
11290 form of the builtin function call. */
11291
11292 static tree
11293 fold_builtin_strcat (tree dst, tree src)
11294 {
11295 if (!validate_arg (dst, POINTER_TYPE)
11296 || !validate_arg (src, POINTER_TYPE))
11297 return NULL_TREE;
11298 else
11299 {
11300 const char *p = c_getstr (src);
11301
11302 /* If the string length is zero, return the dst parameter. */
11303 if (p && *p == '\0')
11304 return dst;
11305
11306 return NULL_TREE;
11307 }
11308 }
11309
11310 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11311 arguments to the call.
11312
11313 Return NULL_TREE if no simplification was possible, otherwise return the
11314 simplified form of the call as a tree.
11315
11316 The simplified form may be a constant or other expression which
11317 computes the same value, but in a more efficient manner (including
11318 calls to other builtin functions).
11319
11320 The call may contain arguments which need to be evaluated, but
11321 which are not useful to determine the result of the call. In
11322 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11323 COMPOUND_EXPR will be an argument which must be evaluated.
11324 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11325 COMPOUND_EXPR in the chain will contain the tree for the simplified
11326 form of the builtin function call. */
11327
11328 static tree
11329 fold_builtin_strncat (tree dst, tree src, tree len)
11330 {
11331 if (!validate_arg (dst, POINTER_TYPE)
11332 || !validate_arg (src, POINTER_TYPE)
11333 || !validate_arg (len, INTEGER_TYPE))
11334 return NULL_TREE;
11335 else
11336 {
11337 const char *p = c_getstr (src);
11338
11339 /* If the requested length is zero, or the src parameter string
11340 length is zero, return the dst parameter. */
11341 if (integer_zerop (len) || (p && *p == '\0'))
11342 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11343
11344 /* If the requested len is greater than or equal to the string
11345 length, call strcat. */
11346 if (TREE_CODE (len) == INTEGER_CST && p
11347 && compare_tree_int (len, strlen (p)) >= 0)
11348 {
11349 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11350
11351 /* If the replacement _DECL isn't initialized, don't do the
11352 transformation. */
11353 if (!fn)
11354 return NULL_TREE;
11355
11356 return build_call_expr (fn, 2, dst, src);
11357 }
11358 return NULL_TREE;
11359 }
11360 }
11361
11362 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11363 to the call.
11364
11365 Return NULL_TREE if no simplification was possible, otherwise return the
11366 simplified form of the call as a tree.
11367
11368 The simplified form may be a constant or other expression which
11369 computes the same value, but in a more efficient manner (including
11370 calls to other builtin functions).
11371
11372 The call may contain arguments which need to be evaluated, but
11373 which are not useful to determine the result of the call. In
11374 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11375 COMPOUND_EXPR will be an argument which must be evaluated.
11376 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11377 COMPOUND_EXPR in the chain will contain the tree for the simplified
11378 form of the builtin function call. */
11379
11380 static tree
11381 fold_builtin_strspn (tree s1, tree s2)
11382 {
11383 if (!validate_arg (s1, POINTER_TYPE)
11384 || !validate_arg (s2, POINTER_TYPE))
11385 return NULL_TREE;
11386 else
11387 {
11388 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11389
11390 /* If both arguments are constants, evaluate at compile-time. */
11391 if (p1 && p2)
11392 {
11393 const size_t r = strspn (p1, p2);
11394 return size_int (r);
11395 }
11396
11397 /* If either argument is "", return NULL_TREE. */
11398 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11399 /* Evaluate and ignore both arguments in case either one has
11400 side-effects. */
11401 return omit_two_operands (integer_type_node, integer_zero_node,
11402 s1, s2);
11403 return NULL_TREE;
11404 }
11405 }
11406
11407 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11408 to the call.
11409
11410 Return NULL_TREE if no simplification was possible, otherwise return the
11411 simplified form of the call as a tree.
11412
11413 The simplified form may be a constant or other expression which
11414 computes the same value, but in a more efficient manner (including
11415 calls to other builtin functions).
11416
11417 The call may contain arguments which need to be evaluated, but
11418 which are not useful to determine the result of the call. In
11419 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11420 COMPOUND_EXPR will be an argument which must be evaluated.
11421 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11422 COMPOUND_EXPR in the chain will contain the tree for the simplified
11423 form of the builtin function call. */
11424
11425 static tree
11426 fold_builtin_strcspn (tree s1, tree s2)
11427 {
11428 if (!validate_arg (s1, POINTER_TYPE)
11429 || !validate_arg (s2, POINTER_TYPE))
11430 return NULL_TREE;
11431 else
11432 {
11433 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11434
11435 /* If both arguments are constants, evaluate at compile-time. */
11436 if (p1 && p2)
11437 {
11438 const size_t r = strcspn (p1, p2);
11439 return size_int (r);
11440 }
11441
11442 /* If the first argument is "", return NULL_TREE. */
11443 if (p1 && *p1 == '\0')
11444 {
11445 /* Evaluate and ignore argument s2 in case it has
11446 side-effects. */
11447 return omit_one_operand (integer_type_node,
11448 integer_zero_node, s2);
11449 }
11450
11451 /* If the second argument is "", return __builtin_strlen(s1). */
11452 if (p2 && *p2 == '\0')
11453 {
11454 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11455
11456 /* If the replacement _DECL isn't initialized, don't do the
11457 transformation. */
11458 if (!fn)
11459 return NULL_TREE;
11460
11461 return build_call_expr (fn, 1, s1);
11462 }
11463 return NULL_TREE;
11464 }
11465 }
11466
11467 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11468 to the call. IGNORE is true if the value returned
11469 by the builtin will be ignored. UNLOCKED is true is true if this
11470 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11471 the known length of the string. Return NULL_TREE if no simplification
11472 was possible. */
11473
11474 tree
11475 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11476 {
11477 /* If we're using an unlocked function, assume the other unlocked
11478 functions exist explicitly. */
11479 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11480 : implicit_built_in_decls[BUILT_IN_FPUTC];
11481 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11482 : implicit_built_in_decls[BUILT_IN_FWRITE];
11483
11484 /* If the return value is used, don't do the transformation. */
11485 if (!ignore)
11486 return NULL_TREE;
11487
11488 /* Verify the arguments in the original call. */
11489 if (!validate_arg (arg0, POINTER_TYPE)
11490 || !validate_arg (arg1, POINTER_TYPE))
11491 return NULL_TREE;
11492
11493 if (! len)
11494 len = c_strlen (arg0, 0);
11495
11496 /* Get the length of the string passed to fputs. If the length
11497 can't be determined, punt. */
11498 if (!len
11499 || TREE_CODE (len) != INTEGER_CST)
11500 return NULL_TREE;
11501
11502 switch (compare_tree_int (len, 1))
11503 {
11504 case -1: /* length is 0, delete the call entirely . */
11505 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11506
11507 case 0: /* length is 1, call fputc. */
11508 {
11509 const char *p = c_getstr (arg0);
11510
11511 if (p != NULL)
11512 {
11513 if (fn_fputc)
11514 return build_call_expr (fn_fputc, 2,
11515 build_int_cst (NULL_TREE, p[0]), arg1);
11516 else
11517 return NULL_TREE;
11518 }
11519 }
11520 /* FALLTHROUGH */
11521 case 1: /* length is greater than 1, call fwrite. */
11522 {
11523 /* If optimizing for size keep fputs. */
11524 if (optimize_function_for_size_p (cfun))
11525 return NULL_TREE;
11526 /* New argument list transforming fputs(string, stream) to
11527 fwrite(string, 1, len, stream). */
11528 if (fn_fwrite)
11529 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11530 else
11531 return NULL_TREE;
11532 }
11533 default:
11534 gcc_unreachable ();
11535 }
11536 return NULL_TREE;
11537 }
11538
11539 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11540 produced. False otherwise. This is done so that we don't output the error
11541 or warning twice or three times. */
11542
11543 bool
11544 fold_builtin_next_arg (tree exp, bool va_start_p)
11545 {
11546 tree fntype = TREE_TYPE (current_function_decl);
11547 int nargs = call_expr_nargs (exp);
11548 tree arg;
11549
11550 if (TYPE_ARG_TYPES (fntype) == 0
11551 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11552 == void_type_node))
11553 {
11554 error ("%<va_start%> used in function with fixed args");
11555 return true;
11556 }
11557
11558 if (va_start_p)
11559 {
11560 if (va_start_p && (nargs != 2))
11561 {
11562 error ("wrong number of arguments to function %<va_start%>");
11563 return true;
11564 }
11565 arg = CALL_EXPR_ARG (exp, 1);
11566 }
11567 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11568 when we checked the arguments and if needed issued a warning. */
11569 else
11570 {
11571 if (nargs == 0)
11572 {
11573 /* Evidently an out of date version of <stdarg.h>; can't validate
11574 va_start's second argument, but can still work as intended. */
11575 warning (0, "%<__builtin_next_arg%> called without an argument");
11576 return true;
11577 }
11578 else if (nargs > 1)
11579 {
11580 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11581 return true;
11582 }
11583 arg = CALL_EXPR_ARG (exp, 0);
11584 }
11585
11586 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11587 or __builtin_next_arg (0) the first time we see it, after checking
11588 the arguments and if needed issuing a warning. */
11589 if (!integer_zerop (arg))
11590 {
11591 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11592
11593 /* Strip off all nops for the sake of the comparison. This
11594 is not quite the same as STRIP_NOPS. It does more.
11595 We must also strip off INDIRECT_EXPR for C++ reference
11596 parameters. */
11597 while (CONVERT_EXPR_P (arg)
11598 || TREE_CODE (arg) == INDIRECT_REF)
11599 arg = TREE_OPERAND (arg, 0);
11600 if (arg != last_parm)
11601 {
11602 /* FIXME: Sometimes with the tree optimizers we can get the
11603 not the last argument even though the user used the last
11604 argument. We just warn and set the arg to be the last
11605 argument so that we will get wrong-code because of
11606 it. */
11607 warning (0, "second parameter of %<va_start%> not last named argument");
11608 }
11609
11610 /* Undefined by C99 7.15.1.4p4 (va_start):
11611 "If the parameter parmN is declared with the register storage
11612 class, with a function or array type, or with a type that is
11613 not compatible with the type that results after application of
11614 the default argument promotions, the behavior is undefined."
11615 */
11616 else if (DECL_REGISTER (arg))
11617 warning (0, "undefined behaviour when second parameter of "
11618 "%<va_start%> is declared with %<register%> storage");
11619
11620 /* We want to verify the second parameter just once before the tree
11621 optimizers are run and then avoid keeping it in the tree,
11622 as otherwise we could warn even for correct code like:
11623 void foo (int i, ...)
11624 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11625 if (va_start_p)
11626 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11627 else
11628 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11629 }
11630 return false;
11631 }
11632
11633
11634 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11635 ORIG may be null if this is a 2-argument call. We don't attempt to
11636 simplify calls with more than 3 arguments.
11637
11638 Return NULL_TREE if no simplification was possible, otherwise return the
11639 simplified form of the call as a tree. If IGNORED is true, it means that
11640 the caller does not use the returned value of the function. */
11641
11642 static tree
11643 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11644 {
11645 tree call, retval;
11646 const char *fmt_str = NULL;
11647
11648 /* Verify the required arguments in the original call. We deal with two
11649 types of sprintf() calls: 'sprintf (str, fmt)' and
11650 'sprintf (dest, "%s", orig)'. */
11651 if (!validate_arg (dest, POINTER_TYPE)
11652 || !validate_arg (fmt, POINTER_TYPE))
11653 return NULL_TREE;
11654 if (orig && !validate_arg (orig, POINTER_TYPE))
11655 return NULL_TREE;
11656
11657 /* Check whether the format is a literal string constant. */
11658 fmt_str = c_getstr (fmt);
11659 if (fmt_str == NULL)
11660 return NULL_TREE;
11661
11662 call = NULL_TREE;
11663 retval = NULL_TREE;
11664
11665 if (!init_target_chars ())
11666 return NULL_TREE;
11667
11668 /* If the format doesn't contain % args or %%, use strcpy. */
11669 if (strchr (fmt_str, target_percent) == NULL)
11670 {
11671 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11672
11673 if (!fn)
11674 return NULL_TREE;
11675
11676 /* Don't optimize sprintf (buf, "abc", ptr++). */
11677 if (orig)
11678 return NULL_TREE;
11679
11680 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11681 'format' is known to contain no % formats. */
11682 call = build_call_expr (fn, 2, dest, fmt);
11683 if (!ignored)
11684 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11685 }
11686
11687 /* If the format is "%s", use strcpy if the result isn't used. */
11688 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11689 {
11690 tree fn;
11691 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11692
11693 if (!fn)
11694 return NULL_TREE;
11695
11696 /* Don't crash on sprintf (str1, "%s"). */
11697 if (!orig)
11698 return NULL_TREE;
11699
11700 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11701 if (!ignored)
11702 {
11703 retval = c_strlen (orig, 1);
11704 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11705 return NULL_TREE;
11706 }
11707 call = build_call_expr (fn, 2, dest, orig);
11708 }
11709
11710 if (call && retval)
11711 {
11712 retval = fold_convert
11713 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11714 retval);
11715 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11716 }
11717 else
11718 return call;
11719 }
11720
11721 /* Expand a call EXP to __builtin_object_size. */
11722
11723 rtx
11724 expand_builtin_object_size (tree exp)
11725 {
11726 tree ost;
11727 int object_size_type;
11728 tree fndecl = get_callee_fndecl (exp);
11729
11730 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11731 {
11732 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11733 exp, fndecl);
11734 expand_builtin_trap ();
11735 return const0_rtx;
11736 }
11737
11738 ost = CALL_EXPR_ARG (exp, 1);
11739 STRIP_NOPS (ost);
11740
11741 if (TREE_CODE (ost) != INTEGER_CST
11742 || tree_int_cst_sgn (ost) < 0
11743 || compare_tree_int (ost, 3) > 0)
11744 {
11745 error ("%Klast argument of %D is not integer constant between 0 and 3",
11746 exp, fndecl);
11747 expand_builtin_trap ();
11748 return const0_rtx;
11749 }
11750
11751 object_size_type = tree_low_cst (ost, 0);
11752
11753 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11754 }
11755
11756 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11757 FCODE is the BUILT_IN_* to use.
11758 Return NULL_RTX if we failed; the caller should emit a normal call,
11759 otherwise try to get the result in TARGET, if convenient (and in
11760 mode MODE if that's convenient). */
11761
11762 static rtx
11763 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11764 enum built_in_function fcode)
11765 {
11766 tree dest, src, len, size;
11767
11768 if (!validate_arglist (exp,
11769 POINTER_TYPE,
11770 fcode == BUILT_IN_MEMSET_CHK
11771 ? INTEGER_TYPE : POINTER_TYPE,
11772 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11773 return NULL_RTX;
11774
11775 dest = CALL_EXPR_ARG (exp, 0);
11776 src = CALL_EXPR_ARG (exp, 1);
11777 len = CALL_EXPR_ARG (exp, 2);
11778 size = CALL_EXPR_ARG (exp, 3);
11779
11780 if (! host_integerp (size, 1))
11781 return NULL_RTX;
11782
11783 if (host_integerp (len, 1) || integer_all_onesp (size))
11784 {
11785 tree fn;
11786
11787 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11788 {
11789 warning (0, "%Kcall to %D will always overflow destination buffer",
11790 exp, get_callee_fndecl (exp));
11791 return NULL_RTX;
11792 }
11793
11794 fn = NULL_TREE;
11795 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11796 mem{cpy,pcpy,move,set} is available. */
11797 switch (fcode)
11798 {
11799 case BUILT_IN_MEMCPY_CHK:
11800 fn = built_in_decls[BUILT_IN_MEMCPY];
11801 break;
11802 case BUILT_IN_MEMPCPY_CHK:
11803 fn = built_in_decls[BUILT_IN_MEMPCPY];
11804 break;
11805 case BUILT_IN_MEMMOVE_CHK:
11806 fn = built_in_decls[BUILT_IN_MEMMOVE];
11807 break;
11808 case BUILT_IN_MEMSET_CHK:
11809 fn = built_in_decls[BUILT_IN_MEMSET];
11810 break;
11811 default:
11812 break;
11813 }
11814
11815 if (! fn)
11816 return NULL_RTX;
11817
11818 fn = build_call_expr (fn, 3, dest, src, len);
11819 STRIP_TYPE_NOPS (fn);
11820 while (TREE_CODE (fn) == COMPOUND_EXPR)
11821 {
11822 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11823 EXPAND_NORMAL);
11824 fn = TREE_OPERAND (fn, 1);
11825 }
11826 if (TREE_CODE (fn) == CALL_EXPR)
11827 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11828 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11829 }
11830 else if (fcode == BUILT_IN_MEMSET_CHK)
11831 return NULL_RTX;
11832 else
11833 {
11834 unsigned int dest_align
11835 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11836
11837 /* If DEST is not a pointer type, call the normal function. */
11838 if (dest_align == 0)
11839 return NULL_RTX;
11840
11841 /* If SRC and DEST are the same (and not volatile), do nothing. */
11842 if (operand_equal_p (src, dest, 0))
11843 {
11844 tree expr;
11845
11846 if (fcode != BUILT_IN_MEMPCPY_CHK)
11847 {
11848 /* Evaluate and ignore LEN in case it has side-effects. */
11849 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11850 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11851 }
11852
11853 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11854 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11855 }
11856
11857 /* __memmove_chk special case. */
11858 if (fcode == BUILT_IN_MEMMOVE_CHK)
11859 {
11860 unsigned int src_align
11861 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11862
11863 if (src_align == 0)
11864 return NULL_RTX;
11865
11866 /* If src is categorized for a readonly section we can use
11867 normal __memcpy_chk. */
11868 if (readonly_data_expr (src))
11869 {
11870 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11871 if (!fn)
11872 return NULL_RTX;
11873 fn = build_call_expr (fn, 4, dest, src, len, size);
11874 STRIP_TYPE_NOPS (fn);
11875 while (TREE_CODE (fn) == COMPOUND_EXPR)
11876 {
11877 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11878 EXPAND_NORMAL);
11879 fn = TREE_OPERAND (fn, 1);
11880 }
11881 if (TREE_CODE (fn) == CALL_EXPR)
11882 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11883 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11884 }
11885 }
11886 return NULL_RTX;
11887 }
11888 }
11889
11890 /* Emit warning if a buffer overflow is detected at compile time. */
11891
11892 static void
11893 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11894 {
11895 int is_strlen = 0;
11896 tree len, size;
11897
11898 switch (fcode)
11899 {
11900 case BUILT_IN_STRCPY_CHK:
11901 case BUILT_IN_STPCPY_CHK:
11902 /* For __strcat_chk the warning will be emitted only if overflowing
11903 by at least strlen (dest) + 1 bytes. */
11904 case BUILT_IN_STRCAT_CHK:
11905 len = CALL_EXPR_ARG (exp, 1);
11906 size = CALL_EXPR_ARG (exp, 2);
11907 is_strlen = 1;
11908 break;
11909 case BUILT_IN_STRNCAT_CHK:
11910 case BUILT_IN_STRNCPY_CHK:
11911 len = CALL_EXPR_ARG (exp, 2);
11912 size = CALL_EXPR_ARG (exp, 3);
11913 break;
11914 case BUILT_IN_SNPRINTF_CHK:
11915 case BUILT_IN_VSNPRINTF_CHK:
11916 len = CALL_EXPR_ARG (exp, 1);
11917 size = CALL_EXPR_ARG (exp, 3);
11918 break;
11919 default:
11920 gcc_unreachable ();
11921 }
11922
11923 if (!len || !size)
11924 return;
11925
11926 if (! host_integerp (size, 1) || integer_all_onesp (size))
11927 return;
11928
11929 if (is_strlen)
11930 {
11931 len = c_strlen (len, 1);
11932 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11933 return;
11934 }
11935 else if (fcode == BUILT_IN_STRNCAT_CHK)
11936 {
11937 tree src = CALL_EXPR_ARG (exp, 1);
11938 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11939 return;
11940 src = c_strlen (src, 1);
11941 if (! src || ! host_integerp (src, 1))
11942 {
11943 warning (0, "%Kcall to %D might overflow destination buffer",
11944 exp, get_callee_fndecl (exp));
11945 return;
11946 }
11947 else if (tree_int_cst_lt (src, size))
11948 return;
11949 }
11950 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11951 return;
11952
11953 warning (0, "%Kcall to %D will always overflow destination buffer",
11954 exp, get_callee_fndecl (exp));
11955 }
11956
11957 /* Emit warning if a buffer overflow is detected at compile time
11958 in __sprintf_chk/__vsprintf_chk calls. */
11959
11960 static void
11961 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11962 {
11963 tree dest, size, len, fmt, flag;
11964 const char *fmt_str;
11965 int nargs = call_expr_nargs (exp);
11966
11967 /* Verify the required arguments in the original call. */
11968
11969 if (nargs < 4)
11970 return;
11971 dest = CALL_EXPR_ARG (exp, 0);
11972 flag = CALL_EXPR_ARG (exp, 1);
11973 size = CALL_EXPR_ARG (exp, 2);
11974 fmt = CALL_EXPR_ARG (exp, 3);
11975
11976 if (! host_integerp (size, 1) || integer_all_onesp (size))
11977 return;
11978
11979 /* Check whether the format is a literal string constant. */
11980 fmt_str = c_getstr (fmt);
11981 if (fmt_str == NULL)
11982 return;
11983
11984 if (!init_target_chars ())
11985 return;
11986
11987 /* If the format doesn't contain % args or %%, we know its size. */
11988 if (strchr (fmt_str, target_percent) == 0)
11989 len = build_int_cstu (size_type_node, strlen (fmt_str));
11990 /* If the format is "%s" and first ... argument is a string literal,
11991 we know it too. */
11992 else if (fcode == BUILT_IN_SPRINTF_CHK
11993 && strcmp (fmt_str, target_percent_s) == 0)
11994 {
11995 tree arg;
11996
11997 if (nargs < 5)
11998 return;
11999 arg = CALL_EXPR_ARG (exp, 4);
12000 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12001 return;
12002
12003 len = c_strlen (arg, 1);
12004 if (!len || ! host_integerp (len, 1))
12005 return;
12006 }
12007 else
12008 return;
12009
12010 if (! tree_int_cst_lt (len, size))
12011 {
12012 warning (0, "%Kcall to %D will always overflow destination buffer",
12013 exp, get_callee_fndecl (exp));
12014 }
12015 }
12016
12017 /* Emit warning if a free is called with address of a variable. */
12018
12019 static void
12020 maybe_emit_free_warning (tree exp)
12021 {
12022 tree arg = CALL_EXPR_ARG (exp, 0);
12023
12024 STRIP_NOPS (arg);
12025 if (TREE_CODE (arg) != ADDR_EXPR)
12026 return;
12027
12028 arg = get_base_address (TREE_OPERAND (arg, 0));
12029 if (arg == NULL || INDIRECT_REF_P (arg))
12030 return;
12031
12032 if (SSA_VAR_P (arg))
12033 warning (0, "%Kattempt to free a non-heap object %qD", exp, arg);
12034 else
12035 warning (0, "%Kattempt to free a non-heap object", exp);
12036 }
12037
12038 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12039 if possible. */
12040
12041 tree
12042 fold_builtin_object_size (tree ptr, tree ost)
12043 {
12044 tree ret = NULL_TREE;
12045 int object_size_type;
12046
12047 if (!validate_arg (ptr, POINTER_TYPE)
12048 || !validate_arg (ost, INTEGER_TYPE))
12049 return NULL_TREE;
12050
12051 STRIP_NOPS (ost);
12052
12053 if (TREE_CODE (ost) != INTEGER_CST
12054 || tree_int_cst_sgn (ost) < 0
12055 || compare_tree_int (ost, 3) > 0)
12056 return NULL_TREE;
12057
12058 object_size_type = tree_low_cst (ost, 0);
12059
12060 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12061 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12062 and (size_t) 0 for types 2 and 3. */
12063 if (TREE_SIDE_EFFECTS (ptr))
12064 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12065
12066 if (TREE_CODE (ptr) == ADDR_EXPR)
12067 ret = build_int_cstu (size_type_node,
12068 compute_builtin_object_size (ptr, object_size_type));
12069
12070 else if (TREE_CODE (ptr) == SSA_NAME)
12071 {
12072 unsigned HOST_WIDE_INT bytes;
12073
12074 /* If object size is not known yet, delay folding until
12075 later. Maybe subsequent passes will help determining
12076 it. */
12077 bytes = compute_builtin_object_size (ptr, object_size_type);
12078 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12079 ? -1 : 0))
12080 ret = build_int_cstu (size_type_node, bytes);
12081 }
12082
12083 if (ret)
12084 {
12085 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12086 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12087 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12088 ret = NULL_TREE;
12089 }
12090
12091 return ret;
12092 }
12093
12094 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12095 DEST, SRC, LEN, and SIZE are the arguments to the call.
12096 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12097 code of the builtin. If MAXLEN is not NULL, it is maximum length
12098 passed as third argument. */
12099
12100 tree
12101 fold_builtin_memory_chk (tree fndecl,
12102 tree dest, tree src, tree len, tree size,
12103 tree maxlen, bool ignore,
12104 enum built_in_function fcode)
12105 {
12106 tree fn;
12107
12108 if (!validate_arg (dest, POINTER_TYPE)
12109 || !validate_arg (src,
12110 (fcode == BUILT_IN_MEMSET_CHK
12111 ? INTEGER_TYPE : POINTER_TYPE))
12112 || !validate_arg (len, INTEGER_TYPE)
12113 || !validate_arg (size, INTEGER_TYPE))
12114 return NULL_TREE;
12115
12116 /* If SRC and DEST are the same (and not volatile), return DEST
12117 (resp. DEST+LEN for __mempcpy_chk). */
12118 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12119 {
12120 if (fcode != BUILT_IN_MEMPCPY_CHK)
12121 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12122 else
12123 {
12124 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12125 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12126 }
12127 }
12128
12129 if (! host_integerp (size, 1))
12130 return NULL_TREE;
12131
12132 if (! integer_all_onesp (size))
12133 {
12134 if (! host_integerp (len, 1))
12135 {
12136 /* If LEN is not constant, try MAXLEN too.
12137 For MAXLEN only allow optimizing into non-_ocs function
12138 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12139 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12140 {
12141 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12142 {
12143 /* (void) __mempcpy_chk () can be optimized into
12144 (void) __memcpy_chk (). */
12145 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12146 if (!fn)
12147 return NULL_TREE;
12148
12149 return build_call_expr (fn, 4, dest, src, len, size);
12150 }
12151 return NULL_TREE;
12152 }
12153 }
12154 else
12155 maxlen = len;
12156
12157 if (tree_int_cst_lt (size, maxlen))
12158 return NULL_TREE;
12159 }
12160
12161 fn = NULL_TREE;
12162 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12163 mem{cpy,pcpy,move,set} is available. */
12164 switch (fcode)
12165 {
12166 case BUILT_IN_MEMCPY_CHK:
12167 fn = built_in_decls[BUILT_IN_MEMCPY];
12168 break;
12169 case BUILT_IN_MEMPCPY_CHK:
12170 fn = built_in_decls[BUILT_IN_MEMPCPY];
12171 break;
12172 case BUILT_IN_MEMMOVE_CHK:
12173 fn = built_in_decls[BUILT_IN_MEMMOVE];
12174 break;
12175 case BUILT_IN_MEMSET_CHK:
12176 fn = built_in_decls[BUILT_IN_MEMSET];
12177 break;
12178 default:
12179 break;
12180 }
12181
12182 if (!fn)
12183 return NULL_TREE;
12184
12185 return build_call_expr (fn, 3, dest, src, len);
12186 }
12187
12188 /* Fold a call to the __st[rp]cpy_chk builtin.
12189 DEST, SRC, and SIZE are the arguments to the call.
12190 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12191 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12192 strings passed as second argument. */
12193
12194 tree
12195 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12196 tree maxlen, bool ignore,
12197 enum built_in_function fcode)
12198 {
12199 tree len, fn;
12200
12201 if (!validate_arg (dest, POINTER_TYPE)
12202 || !validate_arg (src, POINTER_TYPE)
12203 || !validate_arg (size, INTEGER_TYPE))
12204 return NULL_TREE;
12205
12206 /* If SRC and DEST are the same (and not volatile), return DEST. */
12207 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12208 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12209
12210 if (! host_integerp (size, 1))
12211 return NULL_TREE;
12212
12213 if (! integer_all_onesp (size))
12214 {
12215 len = c_strlen (src, 1);
12216 if (! len || ! host_integerp (len, 1))
12217 {
12218 /* If LEN is not constant, try MAXLEN too.
12219 For MAXLEN only allow optimizing into non-_ocs function
12220 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12221 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12222 {
12223 if (fcode == BUILT_IN_STPCPY_CHK)
12224 {
12225 if (! ignore)
12226 return NULL_TREE;
12227
12228 /* If return value of __stpcpy_chk is ignored,
12229 optimize into __strcpy_chk. */
12230 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12231 if (!fn)
12232 return NULL_TREE;
12233
12234 return build_call_expr (fn, 3, dest, src, size);
12235 }
12236
12237 if (! len || TREE_SIDE_EFFECTS (len))
12238 return NULL_TREE;
12239
12240 /* If c_strlen returned something, but not a constant,
12241 transform __strcpy_chk into __memcpy_chk. */
12242 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12243 if (!fn)
12244 return NULL_TREE;
12245
12246 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12247 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12248 build_call_expr (fn, 4,
12249 dest, src, len, size));
12250 }
12251 }
12252 else
12253 maxlen = len;
12254
12255 if (! tree_int_cst_lt (maxlen, size))
12256 return NULL_TREE;
12257 }
12258
12259 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12260 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12261 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12262 if (!fn)
12263 return NULL_TREE;
12264
12265 return build_call_expr (fn, 2, dest, src);
12266 }
12267
12268 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12269 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12270 length passed as third argument. */
12271
12272 tree
12273 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12274 tree maxlen)
12275 {
12276 tree fn;
12277
12278 if (!validate_arg (dest, POINTER_TYPE)
12279 || !validate_arg (src, POINTER_TYPE)
12280 || !validate_arg (len, INTEGER_TYPE)
12281 || !validate_arg (size, INTEGER_TYPE))
12282 return NULL_TREE;
12283
12284 if (! host_integerp (size, 1))
12285 return NULL_TREE;
12286
12287 if (! integer_all_onesp (size))
12288 {
12289 if (! host_integerp (len, 1))
12290 {
12291 /* If LEN is not constant, try MAXLEN too.
12292 For MAXLEN only allow optimizing into non-_ocs function
12293 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12294 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12295 return NULL_TREE;
12296 }
12297 else
12298 maxlen = len;
12299
12300 if (tree_int_cst_lt (size, maxlen))
12301 return NULL_TREE;
12302 }
12303
12304 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12305 fn = built_in_decls[BUILT_IN_STRNCPY];
12306 if (!fn)
12307 return NULL_TREE;
12308
12309 return build_call_expr (fn, 3, dest, src, len);
12310 }
12311
12312 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12313 are the arguments to the call. */
12314
12315 static tree
12316 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12317 {
12318 tree fn;
12319 const char *p;
12320
12321 if (!validate_arg (dest, POINTER_TYPE)
12322 || !validate_arg (src, POINTER_TYPE)
12323 || !validate_arg (size, INTEGER_TYPE))
12324 return NULL_TREE;
12325
12326 p = c_getstr (src);
12327 /* If the SRC parameter is "", return DEST. */
12328 if (p && *p == '\0')
12329 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12330
12331 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12332 return NULL_TREE;
12333
12334 /* If __builtin_strcat_chk is used, assume strcat is available. */
12335 fn = built_in_decls[BUILT_IN_STRCAT];
12336 if (!fn)
12337 return NULL_TREE;
12338
12339 return build_call_expr (fn, 2, dest, src);
12340 }
12341
12342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12343 LEN, and SIZE. */
12344
12345 static tree
12346 fold_builtin_strncat_chk (tree fndecl,
12347 tree dest, tree src, tree len, tree size)
12348 {
12349 tree fn;
12350 const char *p;
12351
12352 if (!validate_arg (dest, POINTER_TYPE)
12353 || !validate_arg (src, POINTER_TYPE)
12354 || !validate_arg (size, INTEGER_TYPE)
12355 || !validate_arg (size, INTEGER_TYPE))
12356 return NULL_TREE;
12357
12358 p = c_getstr (src);
12359 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12360 if (p && *p == '\0')
12361 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12362 else if (integer_zerop (len))
12363 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12364
12365 if (! host_integerp (size, 1))
12366 return NULL_TREE;
12367
12368 if (! integer_all_onesp (size))
12369 {
12370 tree src_len = c_strlen (src, 1);
12371 if (src_len
12372 && host_integerp (src_len, 1)
12373 && host_integerp (len, 1)
12374 && ! tree_int_cst_lt (len, src_len))
12375 {
12376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12377 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12378 if (!fn)
12379 return NULL_TREE;
12380
12381 return build_call_expr (fn, 3, dest, src, size);
12382 }
12383 return NULL_TREE;
12384 }
12385
12386 /* If __builtin_strncat_chk is used, assume strncat is available. */
12387 fn = built_in_decls[BUILT_IN_STRNCAT];
12388 if (!fn)
12389 return NULL_TREE;
12390
12391 return build_call_expr (fn, 3, dest, src, len);
12392 }
12393
12394 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12395 a normal call should be emitted rather than expanding the function
12396 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12397
12398 static tree
12399 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12400 {
12401 tree dest, size, len, fn, fmt, flag;
12402 const char *fmt_str;
12403 int nargs = call_expr_nargs (exp);
12404
12405 /* Verify the required arguments in the original call. */
12406 if (nargs < 4)
12407 return NULL_TREE;
12408 dest = CALL_EXPR_ARG (exp, 0);
12409 if (!validate_arg (dest, POINTER_TYPE))
12410 return NULL_TREE;
12411 flag = CALL_EXPR_ARG (exp, 1);
12412 if (!validate_arg (flag, INTEGER_TYPE))
12413 return NULL_TREE;
12414 size = CALL_EXPR_ARG (exp, 2);
12415 if (!validate_arg (size, INTEGER_TYPE))
12416 return NULL_TREE;
12417 fmt = CALL_EXPR_ARG (exp, 3);
12418 if (!validate_arg (fmt, POINTER_TYPE))
12419 return NULL_TREE;
12420
12421 if (! host_integerp (size, 1))
12422 return NULL_TREE;
12423
12424 len = NULL_TREE;
12425
12426 if (!init_target_chars ())
12427 return NULL_TREE;
12428
12429 /* Check whether the format is a literal string constant. */
12430 fmt_str = c_getstr (fmt);
12431 if (fmt_str != NULL)
12432 {
12433 /* If the format doesn't contain % args or %%, we know the size. */
12434 if (strchr (fmt_str, target_percent) == 0)
12435 {
12436 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12437 len = build_int_cstu (size_type_node, strlen (fmt_str));
12438 }
12439 /* If the format is "%s" and first ... argument is a string literal,
12440 we know the size too. */
12441 else if (fcode == BUILT_IN_SPRINTF_CHK
12442 && strcmp (fmt_str, target_percent_s) == 0)
12443 {
12444 tree arg;
12445
12446 if (nargs == 5)
12447 {
12448 arg = CALL_EXPR_ARG (exp, 4);
12449 if (validate_arg (arg, POINTER_TYPE))
12450 {
12451 len = c_strlen (arg, 1);
12452 if (! len || ! host_integerp (len, 1))
12453 len = NULL_TREE;
12454 }
12455 }
12456 }
12457 }
12458
12459 if (! integer_all_onesp (size))
12460 {
12461 if (! len || ! tree_int_cst_lt (len, size))
12462 return NULL_TREE;
12463 }
12464
12465 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12466 or if format doesn't contain % chars or is "%s". */
12467 if (! integer_zerop (flag))
12468 {
12469 if (fmt_str == NULL)
12470 return NULL_TREE;
12471 if (strchr (fmt_str, target_percent) != NULL
12472 && strcmp (fmt_str, target_percent_s))
12473 return NULL_TREE;
12474 }
12475
12476 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12477 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12478 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12479 if (!fn)
12480 return NULL_TREE;
12481
12482 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12483 }
12484
12485 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12486 a normal call should be emitted rather than expanding the function
12487 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12488 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12489 passed as second argument. */
12490
12491 tree
12492 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12493 enum built_in_function fcode)
12494 {
12495 tree dest, size, len, fn, fmt, flag;
12496 const char *fmt_str;
12497
12498 /* Verify the required arguments in the original call. */
12499 if (call_expr_nargs (exp) < 5)
12500 return NULL_TREE;
12501 dest = CALL_EXPR_ARG (exp, 0);
12502 if (!validate_arg (dest, POINTER_TYPE))
12503 return NULL_TREE;
12504 len = CALL_EXPR_ARG (exp, 1);
12505 if (!validate_arg (len, INTEGER_TYPE))
12506 return NULL_TREE;
12507 flag = CALL_EXPR_ARG (exp, 2);
12508 if (!validate_arg (flag, INTEGER_TYPE))
12509 return NULL_TREE;
12510 size = CALL_EXPR_ARG (exp, 3);
12511 if (!validate_arg (size, INTEGER_TYPE))
12512 return NULL_TREE;
12513 fmt = CALL_EXPR_ARG (exp, 4);
12514 if (!validate_arg (fmt, POINTER_TYPE))
12515 return NULL_TREE;
12516
12517 if (! host_integerp (size, 1))
12518 return NULL_TREE;
12519
12520 if (! integer_all_onesp (size))
12521 {
12522 if (! host_integerp (len, 1))
12523 {
12524 /* If LEN is not constant, try MAXLEN too.
12525 For MAXLEN only allow optimizing into non-_ocs function
12526 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12527 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12528 return NULL_TREE;
12529 }
12530 else
12531 maxlen = len;
12532
12533 if (tree_int_cst_lt (size, maxlen))
12534 return NULL_TREE;
12535 }
12536
12537 if (!init_target_chars ())
12538 return NULL_TREE;
12539
12540 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12541 or if format doesn't contain % chars or is "%s". */
12542 if (! integer_zerop (flag))
12543 {
12544 fmt_str = c_getstr (fmt);
12545 if (fmt_str == NULL)
12546 return NULL_TREE;
12547 if (strchr (fmt_str, target_percent) != NULL
12548 && strcmp (fmt_str, target_percent_s))
12549 return NULL_TREE;
12550 }
12551
12552 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12553 available. */
12554 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12555 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12556 if (!fn)
12557 return NULL_TREE;
12558
12559 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12560 }
12561
12562 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12563 FMT and ARG are the arguments to the call; we don't fold cases with
12564 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12565
12566 Return NULL_TREE if no simplification was possible, otherwise return the
12567 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12568 code of the function to be simplified. */
12569
12570 static tree
12571 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12572 enum built_in_function fcode)
12573 {
12574 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12575 const char *fmt_str = NULL;
12576
12577 /* If the return value is used, don't do the transformation. */
12578 if (! ignore)
12579 return NULL_TREE;
12580
12581 /* Verify the required arguments in the original call. */
12582 if (!validate_arg (fmt, POINTER_TYPE))
12583 return NULL_TREE;
12584
12585 /* Check whether the format is a literal string constant. */
12586 fmt_str = c_getstr (fmt);
12587 if (fmt_str == NULL)
12588 return NULL_TREE;
12589
12590 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12591 {
12592 /* If we're using an unlocked function, assume the other
12593 unlocked functions exist explicitly. */
12594 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12595 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12596 }
12597 else
12598 {
12599 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12600 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12601 }
12602
12603 if (!init_target_chars ())
12604 return NULL_TREE;
12605
12606 if (strcmp (fmt_str, target_percent_s) == 0
12607 || strchr (fmt_str, target_percent) == NULL)
12608 {
12609 const char *str;
12610
12611 if (strcmp (fmt_str, target_percent_s) == 0)
12612 {
12613 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12614 return NULL_TREE;
12615
12616 if (!arg || !validate_arg (arg, POINTER_TYPE))
12617 return NULL_TREE;
12618
12619 str = c_getstr (arg);
12620 if (str == NULL)
12621 return NULL_TREE;
12622 }
12623 else
12624 {
12625 /* The format specifier doesn't contain any '%' characters. */
12626 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12627 && arg)
12628 return NULL_TREE;
12629 str = fmt_str;
12630 }
12631
12632 /* If the string was "", printf does nothing. */
12633 if (str[0] == '\0')
12634 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12635
12636 /* If the string has length of 1, call putchar. */
12637 if (str[1] == '\0')
12638 {
12639 /* Given printf("c"), (where c is any one character,)
12640 convert "c"[0] to an int and pass that to the replacement
12641 function. */
12642 newarg = build_int_cst (NULL_TREE, str[0]);
12643 if (fn_putchar)
12644 call = build_call_expr (fn_putchar, 1, newarg);
12645 }
12646 else
12647 {
12648 /* If the string was "string\n", call puts("string"). */
12649 size_t len = strlen (str);
12650 if ((unsigned char)str[len - 1] == target_newline)
12651 {
12652 /* Create a NUL-terminated string that's one char shorter
12653 than the original, stripping off the trailing '\n'. */
12654 char *newstr = XALLOCAVEC (char, len);
12655 memcpy (newstr, str, len - 1);
12656 newstr[len - 1] = 0;
12657
12658 newarg = build_string_literal (len, newstr);
12659 if (fn_puts)
12660 call = build_call_expr (fn_puts, 1, newarg);
12661 }
12662 else
12663 /* We'd like to arrange to call fputs(string,stdout) here,
12664 but we need stdout and don't have a way to get it yet. */
12665 return NULL_TREE;
12666 }
12667 }
12668
12669 /* The other optimizations can be done only on the non-va_list variants. */
12670 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12671 return NULL_TREE;
12672
12673 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12674 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12675 {
12676 if (!arg || !validate_arg (arg, POINTER_TYPE))
12677 return NULL_TREE;
12678 if (fn_puts)
12679 call = build_call_expr (fn_puts, 1, arg);
12680 }
12681
12682 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12683 else if (strcmp (fmt_str, target_percent_c) == 0)
12684 {
12685 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12686 return NULL_TREE;
12687 if (fn_putchar)
12688 call = build_call_expr (fn_putchar, 1, arg);
12689 }
12690
12691 if (!call)
12692 return NULL_TREE;
12693
12694 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12695 }
12696
12697 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12698 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12699 more than 3 arguments, and ARG may be null in the 2-argument case.
12700
12701 Return NULL_TREE if no simplification was possible, otherwise return the
12702 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12703 code of the function to be simplified. */
12704
12705 static tree
12706 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12707 enum built_in_function fcode)
12708 {
12709 tree fn_fputc, fn_fputs, call = NULL_TREE;
12710 const char *fmt_str = NULL;
12711
12712 /* If the return value is used, don't do the transformation. */
12713 if (! ignore)
12714 return NULL_TREE;
12715
12716 /* Verify the required arguments in the original call. */
12717 if (!validate_arg (fp, POINTER_TYPE))
12718 return NULL_TREE;
12719 if (!validate_arg (fmt, POINTER_TYPE))
12720 return NULL_TREE;
12721
12722 /* Check whether the format is a literal string constant. */
12723 fmt_str = c_getstr (fmt);
12724 if (fmt_str == NULL)
12725 return NULL_TREE;
12726
12727 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12728 {
12729 /* If we're using an unlocked function, assume the other
12730 unlocked functions exist explicitly. */
12731 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12732 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12733 }
12734 else
12735 {
12736 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12737 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12738 }
12739
12740 if (!init_target_chars ())
12741 return NULL_TREE;
12742
12743 /* If the format doesn't contain % args or %%, use strcpy. */
12744 if (strchr (fmt_str, target_percent) == NULL)
12745 {
12746 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12747 && arg)
12748 return NULL_TREE;
12749
12750 /* If the format specifier was "", fprintf does nothing. */
12751 if (fmt_str[0] == '\0')
12752 {
12753 /* If FP has side-effects, just wait until gimplification is
12754 done. */
12755 if (TREE_SIDE_EFFECTS (fp))
12756 return NULL_TREE;
12757
12758 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12759 }
12760
12761 /* When "string" doesn't contain %, replace all cases of
12762 fprintf (fp, string) with fputs (string, fp). The fputs
12763 builtin will take care of special cases like length == 1. */
12764 if (fn_fputs)
12765 call = build_call_expr (fn_fputs, 2, fmt, fp);
12766 }
12767
12768 /* The other optimizations can be done only on the non-va_list variants. */
12769 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12770 return NULL_TREE;
12771
12772 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12773 else if (strcmp (fmt_str, target_percent_s) == 0)
12774 {
12775 if (!arg || !validate_arg (arg, POINTER_TYPE))
12776 return NULL_TREE;
12777 if (fn_fputs)
12778 call = build_call_expr (fn_fputs, 2, arg, fp);
12779 }
12780
12781 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12782 else if (strcmp (fmt_str, target_percent_c) == 0)
12783 {
12784 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12785 return NULL_TREE;
12786 if (fn_fputc)
12787 call = build_call_expr (fn_fputc, 2, arg, fp);
12788 }
12789
12790 if (!call)
12791 return NULL_TREE;
12792 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12793 }
12794
12795 /* Initialize format string characters in the target charset. */
12796
12797 static bool
12798 init_target_chars (void)
12799 {
12800 static bool init;
12801 if (!init)
12802 {
12803 target_newline = lang_hooks.to_target_charset ('\n');
12804 target_percent = lang_hooks.to_target_charset ('%');
12805 target_c = lang_hooks.to_target_charset ('c');
12806 target_s = lang_hooks.to_target_charset ('s');
12807 if (target_newline == 0 || target_percent == 0 || target_c == 0
12808 || target_s == 0)
12809 return false;
12810
12811 target_percent_c[0] = target_percent;
12812 target_percent_c[1] = target_c;
12813 target_percent_c[2] = '\0';
12814
12815 target_percent_s[0] = target_percent;
12816 target_percent_s[1] = target_s;
12817 target_percent_s[2] = '\0';
12818
12819 target_percent_s_newline[0] = target_percent;
12820 target_percent_s_newline[1] = target_s;
12821 target_percent_s_newline[2] = target_newline;
12822 target_percent_s_newline[3] = '\0';
12823
12824 init = true;
12825 }
12826 return true;
12827 }
12828
12829 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12830 and no overflow/underflow occurred. INEXACT is true if M was not
12831 exactly calculated. TYPE is the tree type for the result. This
12832 function assumes that you cleared the MPFR flags and then
12833 calculated M to see if anything subsequently set a flag prior to
12834 entering this function. Return NULL_TREE if any checks fail. */
12835
12836 static tree
12837 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12838 {
12839 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12840 overflow/underflow occurred. If -frounding-math, proceed iff the
12841 result of calling FUNC was exact. */
12842 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12843 && (!flag_rounding_math || !inexact))
12844 {
12845 REAL_VALUE_TYPE rr;
12846
12847 real_from_mpfr (&rr, m, type, GMP_RNDN);
12848 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12849 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12850 but the mpft_t is not, then we underflowed in the
12851 conversion. */
12852 if (real_isfinite (&rr)
12853 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12854 {
12855 REAL_VALUE_TYPE rmode;
12856
12857 real_convert (&rmode, TYPE_MODE (type), &rr);
12858 /* Proceed iff the specified mode can hold the value. */
12859 if (real_identical (&rmode, &rr))
12860 return build_real (type, rmode);
12861 }
12862 }
12863 return NULL_TREE;
12864 }
12865
12866 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12867 FUNC on it and return the resulting value as a tree with type TYPE.
12868 If MIN and/or MAX are not NULL, then the supplied ARG must be
12869 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12870 acceptable values, otherwise they are not. The mpfr precision is
12871 set to the precision of TYPE. We assume that function FUNC returns
12872 zero if the result could be calculated exactly within the requested
12873 precision. */
12874
12875 static tree
12876 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12877 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12878 bool inclusive)
12879 {
12880 tree result = NULL_TREE;
12881
12882 STRIP_NOPS (arg);
12883
12884 /* To proceed, MPFR must exactly represent the target floating point
12885 format, which only happens when the target base equals two. */
12886 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12887 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12888 {
12889 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12890
12891 if (real_isfinite (ra)
12892 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12893 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12894 {
12895 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12896 const int prec = fmt->p;
12897 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12898 int inexact;
12899 mpfr_t m;
12900
12901 mpfr_init2 (m, prec);
12902 mpfr_from_real (m, ra, GMP_RNDN);
12903 mpfr_clear_flags ();
12904 inexact = func (m, m, rnd);
12905 result = do_mpfr_ckconv (m, type, inexact);
12906 mpfr_clear (m);
12907 }
12908 }
12909
12910 return result;
12911 }
12912
12913 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12914 FUNC on it and return the resulting value as a tree with type TYPE.
12915 The mpfr precision is set to the precision of TYPE. We assume that
12916 function FUNC returns zero if the result could be calculated
12917 exactly within the requested precision. */
12918
12919 static tree
12920 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12921 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12922 {
12923 tree result = NULL_TREE;
12924
12925 STRIP_NOPS (arg1);
12926 STRIP_NOPS (arg2);
12927
12928 /* To proceed, MPFR must exactly represent the target floating point
12929 format, which only happens when the target base equals two. */
12930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12931 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12932 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12933 {
12934 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12935 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12936
12937 if (real_isfinite (ra1) && real_isfinite (ra2))
12938 {
12939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12940 const int prec = fmt->p;
12941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12942 int inexact;
12943 mpfr_t m1, m2;
12944
12945 mpfr_inits2 (prec, m1, m2, NULL);
12946 mpfr_from_real (m1, ra1, GMP_RNDN);
12947 mpfr_from_real (m2, ra2, GMP_RNDN);
12948 mpfr_clear_flags ();
12949 inexact = func (m1, m1, m2, rnd);
12950 result = do_mpfr_ckconv (m1, type, inexact);
12951 mpfr_clears (m1, m2, NULL);
12952 }
12953 }
12954
12955 return result;
12956 }
12957
12958 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12959 FUNC on it and return the resulting value as a tree with type TYPE.
12960 The mpfr precision is set to the precision of TYPE. We assume that
12961 function FUNC returns zero if the result could be calculated
12962 exactly within the requested precision. */
12963
12964 static tree
12965 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12966 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12967 {
12968 tree result = NULL_TREE;
12969
12970 STRIP_NOPS (arg1);
12971 STRIP_NOPS (arg2);
12972 STRIP_NOPS (arg3);
12973
12974 /* To proceed, MPFR must exactly represent the target floating point
12975 format, which only happens when the target base equals two. */
12976 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12977 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12978 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12979 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12980 {
12981 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12982 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12983 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12984
12985 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12986 {
12987 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12988 const int prec = fmt->p;
12989 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12990 int inexact;
12991 mpfr_t m1, m2, m3;
12992
12993 mpfr_inits2 (prec, m1, m2, m3, NULL);
12994 mpfr_from_real (m1, ra1, GMP_RNDN);
12995 mpfr_from_real (m2, ra2, GMP_RNDN);
12996 mpfr_from_real (m3, ra3, GMP_RNDN);
12997 mpfr_clear_flags ();
12998 inexact = func (m1, m1, m2, m3, rnd);
12999 result = do_mpfr_ckconv (m1, type, inexact);
13000 mpfr_clears (m1, m2, m3, NULL);
13001 }
13002 }
13003
13004 return result;
13005 }
13006
13007 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13008 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13009 If ARG_SINP and ARG_COSP are NULL then the result is returned
13010 as a complex value.
13011 The type is taken from the type of ARG and is used for setting the
13012 precision of the calculation and results. */
13013
13014 static tree
13015 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13016 {
13017 tree const type = TREE_TYPE (arg);
13018 tree result = NULL_TREE;
13019
13020 STRIP_NOPS (arg);
13021
13022 /* To proceed, MPFR must exactly represent the target floating point
13023 format, which only happens when the target base equals two. */
13024 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13025 && TREE_CODE (arg) == REAL_CST
13026 && !TREE_OVERFLOW (arg))
13027 {
13028 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13029
13030 if (real_isfinite (ra))
13031 {
13032 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13033 const int prec = fmt->p;
13034 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13035 tree result_s, result_c;
13036 int inexact;
13037 mpfr_t m, ms, mc;
13038
13039 mpfr_inits2 (prec, m, ms, mc, NULL);
13040 mpfr_from_real (m, ra, GMP_RNDN);
13041 mpfr_clear_flags ();
13042 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13043 result_s = do_mpfr_ckconv (ms, type, inexact);
13044 result_c = do_mpfr_ckconv (mc, type, inexact);
13045 mpfr_clears (m, ms, mc, NULL);
13046 if (result_s && result_c)
13047 {
13048 /* If we are to return in a complex value do so. */
13049 if (!arg_sinp && !arg_cosp)
13050 return build_complex (build_complex_type (type),
13051 result_c, result_s);
13052
13053 /* Dereference the sin/cos pointer arguments. */
13054 arg_sinp = build_fold_indirect_ref (arg_sinp);
13055 arg_cosp = build_fold_indirect_ref (arg_cosp);
13056 /* Proceed if valid pointer type were passed in. */
13057 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13058 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13059 {
13060 /* Set the values. */
13061 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13062 result_s);
13063 TREE_SIDE_EFFECTS (result_s) = 1;
13064 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13065 result_c);
13066 TREE_SIDE_EFFECTS (result_c) = 1;
13067 /* Combine the assignments into a compound expr. */
13068 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13069 result_s, result_c));
13070 }
13071 }
13072 }
13073 }
13074 return result;
13075 }
13076
13077 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13078 two-argument mpfr order N Bessel function FUNC on them and return
13079 the resulting value as a tree with type TYPE. The mpfr precision
13080 is set to the precision of TYPE. We assume that function FUNC
13081 returns zero if the result could be calculated exactly within the
13082 requested precision. */
13083 static tree
13084 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13085 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13086 const REAL_VALUE_TYPE *min, bool inclusive)
13087 {
13088 tree result = NULL_TREE;
13089
13090 STRIP_NOPS (arg1);
13091 STRIP_NOPS (arg2);
13092
13093 /* To proceed, MPFR must exactly represent the target floating point
13094 format, which only happens when the target base equals two. */
13095 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13096 && host_integerp (arg1, 0)
13097 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13098 {
13099 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13100 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13101
13102 if (n == (long)n
13103 && real_isfinite (ra)
13104 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13105 {
13106 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13107 const int prec = fmt->p;
13108 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13109 int inexact;
13110 mpfr_t m;
13111
13112 mpfr_init2 (m, prec);
13113 mpfr_from_real (m, ra, GMP_RNDN);
13114 mpfr_clear_flags ();
13115 inexact = func (m, n, m, rnd);
13116 result = do_mpfr_ckconv (m, type, inexact);
13117 mpfr_clear (m);
13118 }
13119 }
13120
13121 return result;
13122 }
13123
13124 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13125 the pointer *(ARG_QUO) and return the result. The type is taken
13126 from the type of ARG0 and is used for setting the precision of the
13127 calculation and results. */
13128
13129 static tree
13130 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13131 {
13132 tree const type = TREE_TYPE (arg0);
13133 tree result = NULL_TREE;
13134
13135 STRIP_NOPS (arg0);
13136 STRIP_NOPS (arg1);
13137
13138 /* To proceed, MPFR must exactly represent the target floating point
13139 format, which only happens when the target base equals two. */
13140 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13141 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13142 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13143 {
13144 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13145 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13146
13147 if (real_isfinite (ra0) && real_isfinite (ra1))
13148 {
13149 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13150 const int prec = fmt->p;
13151 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13152 tree result_rem;
13153 long integer_quo;
13154 mpfr_t m0, m1;
13155
13156 mpfr_inits2 (prec, m0, m1, NULL);
13157 mpfr_from_real (m0, ra0, GMP_RNDN);
13158 mpfr_from_real (m1, ra1, GMP_RNDN);
13159 mpfr_clear_flags ();
13160 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13161 /* Remquo is independent of the rounding mode, so pass
13162 inexact=0 to do_mpfr_ckconv(). */
13163 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13164 mpfr_clears (m0, m1, NULL);
13165 if (result_rem)
13166 {
13167 /* MPFR calculates quo in the host's long so it may
13168 return more bits in quo than the target int can hold
13169 if sizeof(host long) > sizeof(target int). This can
13170 happen even for native compilers in LP64 mode. In
13171 these cases, modulo the quo value with the largest
13172 number that the target int can hold while leaving one
13173 bit for the sign. */
13174 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13175 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13176
13177 /* Dereference the quo pointer argument. */
13178 arg_quo = build_fold_indirect_ref (arg_quo);
13179 /* Proceed iff a valid pointer type was passed in. */
13180 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13181 {
13182 /* Set the value. */
13183 tree result_quo = fold_build2 (MODIFY_EXPR,
13184 TREE_TYPE (arg_quo), arg_quo,
13185 build_int_cst (NULL, integer_quo));
13186 TREE_SIDE_EFFECTS (result_quo) = 1;
13187 /* Combine the quo assignment with the rem. */
13188 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13189 result_quo, result_rem));
13190 }
13191 }
13192 }
13193 }
13194 return result;
13195 }
13196
13197 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13198 resulting value as a tree with type TYPE. The mpfr precision is
13199 set to the precision of TYPE. We assume that this mpfr function
13200 returns zero if the result could be calculated exactly within the
13201 requested precision. In addition, the integer pointer represented
13202 by ARG_SG will be dereferenced and set to the appropriate signgam
13203 (-1,1) value. */
13204
13205 static tree
13206 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13207 {
13208 tree result = NULL_TREE;
13209
13210 STRIP_NOPS (arg);
13211
13212 /* To proceed, MPFR must exactly represent the target floating point
13213 format, which only happens when the target base equals two. Also
13214 verify ARG is a constant and that ARG_SG is an int pointer. */
13215 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13216 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13217 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13218 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13219 {
13220 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13221
13222 /* In addition to NaN and Inf, the argument cannot be zero or a
13223 negative integer. */
13224 if (real_isfinite (ra)
13225 && ra->cl != rvc_zero
13226 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13227 {
13228 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13229 const int prec = fmt->p;
13230 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13231 int inexact, sg;
13232 mpfr_t m;
13233 tree result_lg;
13234
13235 mpfr_init2 (m, prec);
13236 mpfr_from_real (m, ra, GMP_RNDN);
13237 mpfr_clear_flags ();
13238 inexact = mpfr_lgamma (m, &sg, m, rnd);
13239 result_lg = do_mpfr_ckconv (m, type, inexact);
13240 mpfr_clear (m);
13241 if (result_lg)
13242 {
13243 tree result_sg;
13244
13245 /* Dereference the arg_sg pointer argument. */
13246 arg_sg = build_fold_indirect_ref (arg_sg);
13247 /* Assign the signgam value into *arg_sg. */
13248 result_sg = fold_build2 (MODIFY_EXPR,
13249 TREE_TYPE (arg_sg), arg_sg,
13250 build_int_cst (NULL, sg));
13251 TREE_SIDE_EFFECTS (result_sg) = 1;
13252 /* Combine the signgam assignment with the lgamma result. */
13253 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13254 result_sg, result_lg));
13255 }
13256 }
13257 }
13258
13259 return result;
13260 }
13261
13262 /* FIXME tuples.
13263 The functions below provide an alternate interface for folding
13264 builtin function calls presented as GIMPLE_CALL statements rather
13265 than as CALL_EXPRs. The folded result is still expressed as a
13266 tree. There is too much code duplication in the handling of
13267 varargs functions, and a more intrusive re-factoring would permit
13268 better sharing of code between the tree and statement-based
13269 versions of these functions. */
13270
13271 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13272 along with N new arguments specified as the "..." parameters. SKIP
13273 is the number of arguments in STMT to be omitted. This function is used
13274 to do varargs-to-varargs transformations. */
13275
13276 static tree
13277 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13278 {
13279 int oldnargs = gimple_call_num_args (stmt);
13280 int nargs = oldnargs - skip + n;
13281 tree fntype = TREE_TYPE (fndecl);
13282 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13283 tree *buffer;
13284 int i, j;
13285 va_list ap;
13286
13287 buffer = XALLOCAVEC (tree, nargs);
13288 va_start (ap, n);
13289 for (i = 0; i < n; i++)
13290 buffer[i] = va_arg (ap, tree);
13291 va_end (ap);
13292 for (j = skip; j < oldnargs; j++, i++)
13293 buffer[i] = gimple_call_arg (stmt, j);
13294
13295 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13296 }
13297
13298 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13299 a normal call should be emitted rather than expanding the function
13300 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13301
13302 static tree
13303 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13304 {
13305 tree dest, size, len, fn, fmt, flag;
13306 const char *fmt_str;
13307 int nargs = gimple_call_num_args (stmt);
13308
13309 /* Verify the required arguments in the original call. */
13310 if (nargs < 4)
13311 return NULL_TREE;
13312 dest = gimple_call_arg (stmt, 0);
13313 if (!validate_arg (dest, POINTER_TYPE))
13314 return NULL_TREE;
13315 flag = gimple_call_arg (stmt, 1);
13316 if (!validate_arg (flag, INTEGER_TYPE))
13317 return NULL_TREE;
13318 size = gimple_call_arg (stmt, 2);
13319 if (!validate_arg (size, INTEGER_TYPE))
13320 return NULL_TREE;
13321 fmt = gimple_call_arg (stmt, 3);
13322 if (!validate_arg (fmt, POINTER_TYPE))
13323 return NULL_TREE;
13324
13325 if (! host_integerp (size, 1))
13326 return NULL_TREE;
13327
13328 len = NULL_TREE;
13329
13330 if (!init_target_chars ())
13331 return NULL_TREE;
13332
13333 /* Check whether the format is a literal string constant. */
13334 fmt_str = c_getstr (fmt);
13335 if (fmt_str != NULL)
13336 {
13337 /* If the format doesn't contain % args or %%, we know the size. */
13338 if (strchr (fmt_str, target_percent) == 0)
13339 {
13340 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13341 len = build_int_cstu (size_type_node, strlen (fmt_str));
13342 }
13343 /* If the format is "%s" and first ... argument is a string literal,
13344 we know the size too. */
13345 else if (fcode == BUILT_IN_SPRINTF_CHK
13346 && strcmp (fmt_str, target_percent_s) == 0)
13347 {
13348 tree arg;
13349
13350 if (nargs == 5)
13351 {
13352 arg = gimple_call_arg (stmt, 4);
13353 if (validate_arg (arg, POINTER_TYPE))
13354 {
13355 len = c_strlen (arg, 1);
13356 if (! len || ! host_integerp (len, 1))
13357 len = NULL_TREE;
13358 }
13359 }
13360 }
13361 }
13362
13363 if (! integer_all_onesp (size))
13364 {
13365 if (! len || ! tree_int_cst_lt (len, size))
13366 return NULL_TREE;
13367 }
13368
13369 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13370 or if format doesn't contain % chars or is "%s". */
13371 if (! integer_zerop (flag))
13372 {
13373 if (fmt_str == NULL)
13374 return NULL_TREE;
13375 if (strchr (fmt_str, target_percent) != NULL
13376 && strcmp (fmt_str, target_percent_s))
13377 return NULL_TREE;
13378 }
13379
13380 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13381 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13382 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13383 if (!fn)
13384 return NULL_TREE;
13385
13386 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13387 }
13388
13389 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13390 a normal call should be emitted rather than expanding the function
13391 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13392 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13393 passed as second argument. */
13394
13395 tree
13396 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13397 enum built_in_function fcode)
13398 {
13399 tree dest, size, len, fn, fmt, flag;
13400 const char *fmt_str;
13401
13402 /* Verify the required arguments in the original call. */
13403 if (gimple_call_num_args (stmt) < 5)
13404 return NULL_TREE;
13405 dest = gimple_call_arg (stmt, 0);
13406 if (!validate_arg (dest, POINTER_TYPE))
13407 return NULL_TREE;
13408 len = gimple_call_arg (stmt, 1);
13409 if (!validate_arg (len, INTEGER_TYPE))
13410 return NULL_TREE;
13411 flag = gimple_call_arg (stmt, 2);
13412 if (!validate_arg (flag, INTEGER_TYPE))
13413 return NULL_TREE;
13414 size = gimple_call_arg (stmt, 3);
13415 if (!validate_arg (size, INTEGER_TYPE))
13416 return NULL_TREE;
13417 fmt = gimple_call_arg (stmt, 4);
13418 if (!validate_arg (fmt, POINTER_TYPE))
13419 return NULL_TREE;
13420
13421 if (! host_integerp (size, 1))
13422 return NULL_TREE;
13423
13424 if (! integer_all_onesp (size))
13425 {
13426 if (! host_integerp (len, 1))
13427 {
13428 /* If LEN is not constant, try MAXLEN too.
13429 For MAXLEN only allow optimizing into non-_ocs function
13430 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13431 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13432 return NULL_TREE;
13433 }
13434 else
13435 maxlen = len;
13436
13437 if (tree_int_cst_lt (size, maxlen))
13438 return NULL_TREE;
13439 }
13440
13441 if (!init_target_chars ())
13442 return NULL_TREE;
13443
13444 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13445 or if format doesn't contain % chars or is "%s". */
13446 if (! integer_zerop (flag))
13447 {
13448 fmt_str = c_getstr (fmt);
13449 if (fmt_str == NULL)
13450 return NULL_TREE;
13451 if (strchr (fmt_str, target_percent) != NULL
13452 && strcmp (fmt_str, target_percent_s))
13453 return NULL_TREE;
13454 }
13455
13456 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13457 available. */
13458 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13459 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13460 if (!fn)
13461 return NULL_TREE;
13462
13463 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13464 }
13465
13466 /* Builtins with folding operations that operate on "..." arguments
13467 need special handling; we need to store the arguments in a convenient
13468 data structure before attempting any folding. Fortunately there are
13469 only a few builtins that fall into this category. FNDECL is the
13470 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13471 result of the function call is ignored. */
13472
13473 static tree
13474 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13475 {
13476 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13477 tree ret = NULL_TREE;
13478
13479 switch (fcode)
13480 {
13481 case BUILT_IN_SPRINTF_CHK:
13482 case BUILT_IN_VSPRINTF_CHK:
13483 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13484 break;
13485
13486 case BUILT_IN_SNPRINTF_CHK:
13487 case BUILT_IN_VSNPRINTF_CHK:
13488 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13489
13490 default:
13491 break;
13492 }
13493 if (ret)
13494 {
13495 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13496 TREE_NO_WARNING (ret) = 1;
13497 return ret;
13498 }
13499 return NULL_TREE;
13500 }
13501
13502 /* A wrapper function for builtin folding that prevents warnings for
13503 "statement without effect" and the like, caused by removing the
13504 call node earlier than the warning is generated. */
13505
13506 tree
13507 fold_call_stmt (gimple stmt, bool ignore)
13508 {
13509 tree ret = NULL_TREE;
13510 tree fndecl = gimple_call_fndecl (stmt);
13511 if (fndecl
13512 && TREE_CODE (fndecl) == FUNCTION_DECL
13513 && DECL_BUILT_IN (fndecl)
13514 && !gimple_call_va_arg_pack_p (stmt))
13515 {
13516 int nargs = gimple_call_num_args (stmt);
13517
13518 /* FIXME: Don't use a list in this interface. */
13519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13520 {
13521 tree arglist = NULL_TREE;
13522 int i;
13523 for (i = nargs - 1; i >= 0; i--)
13524 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13525 return targetm.fold_builtin (fndecl, arglist, ignore);
13526 }
13527 else
13528 {
13529 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13530 {
13531 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13532 int i;
13533 for (i = 0; i < nargs; i++)
13534 args[i] = gimple_call_arg (stmt, i);
13535 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13536 }
13537 if (!ret)
13538 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13539 if (ret)
13540 {
13541 /* Propagate location information from original call to
13542 expansion of builtin. Otherwise things like
13543 maybe_emit_chk_warning, that operate on the expansion
13544 of a builtin, will use the wrong location information. */
13545 if (gimple_has_location (stmt))
13546 {
13547 tree realret = ret;
13548 if (TREE_CODE (ret) == NOP_EXPR)
13549 realret = TREE_OPERAND (ret, 0);
13550 if (CAN_HAVE_LOCATION_P (realret)
13551 && !EXPR_HAS_LOCATION (realret))
13552 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13553 return realret;
13554 }
13555 return ret;
13556 }
13557 }
13558 }
13559 return NULL_TREE;
13560 }