re PR preprocessor/36674 (#include location is offset by one row in errors from prepr...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
53
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
80
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
191 enum tree_code);
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
199
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
208
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
223
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
243
244 bool
245 is_builtin_name (const char *name)
246 {
247 if (strncmp (name, "__builtin_", 10) == 0)
248 return true;
249 if (strncmp (name, "__sync_", 7) == 0)
250 return true;
251 return false;
252 }
253
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258 static bool
259 called_as_built_in (tree node)
260 {
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266 }
267
268 /* Return the alignment in bits of EXP, an object.
269 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
270 guessed alignment e.g. from type alignment. */
271
272 int
273 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
274 {
275 unsigned int inner;
276
277 inner = max_align;
278 if (handled_component_p (exp))
279 {
280 HOST_WIDE_INT bitsize, bitpos;
281 tree offset;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
284
285 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
286 &mode, &unsignedp, &volatilep, true);
287 if (bitpos)
288 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
289 while (offset)
290 {
291 tree next_offset;
292
293 if (TREE_CODE (offset) == PLUS_EXPR)
294 {
295 next_offset = TREE_OPERAND (offset, 0);
296 offset = TREE_OPERAND (offset, 1);
297 }
298 else
299 next_offset = NULL;
300 if (host_integerp (offset, 1))
301 {
302 /* Any overflow in calculating offset_bits won't change
303 the alignment. */
304 unsigned offset_bits
305 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
306
307 if (offset_bits)
308 inner = MIN (inner, (offset_bits & -offset_bits));
309 }
310 else if (TREE_CODE (offset) == MULT_EXPR
311 && host_integerp (TREE_OPERAND (offset, 1), 1))
312 {
313 /* Any overflow in calculating offset_factor won't change
314 the alignment. */
315 unsigned offset_factor
316 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
317 * BITS_PER_UNIT);
318
319 if (offset_factor)
320 inner = MIN (inner, (offset_factor & -offset_factor));
321 }
322 else
323 {
324 inner = MIN (inner, BITS_PER_UNIT);
325 break;
326 }
327 offset = next_offset;
328 }
329 }
330 if (DECL_P (exp))
331 align = MIN (inner, DECL_ALIGN (exp));
332 #ifdef CONSTANT_ALIGNMENT
333 else if (CONSTANT_CLASS_P (exp))
334 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
335 #endif
336 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
337 || TREE_CODE (exp) == INDIRECT_REF)
338 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
339 else
340 align = MIN (align, inner);
341 return MIN (align, max_align);
342 }
343
344 /* Return the alignment in bits of EXP, a pointer valued expression.
345 But don't return more than MAX_ALIGN no matter what.
346 The alignment returned is, by default, the alignment of the thing that
347 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
348
349 Otherwise, look at the expression to see if we can do better, i.e., if the
350 expression is actually pointing at an object whose alignment is tighter. */
351
352 int
353 get_pointer_alignment (tree exp, unsigned int max_align)
354 {
355 unsigned int align, inner;
356
357 /* We rely on TER to compute accurate alignment information. */
358 if (!(optimize && flag_tree_ter))
359 return 0;
360
361 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
362 return 0;
363
364 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
365 align = MIN (align, max_align);
366
367 while (1)
368 {
369 switch (TREE_CODE (exp))
370 {
371 CASE_CONVERT:
372 exp = TREE_OPERAND (exp, 0);
373 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
374 return align;
375
376 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (inner, max_align);
378 break;
379
380 case POINTER_PLUS_EXPR:
381 /* If sum of pointer + int, restrict our maximum alignment to that
382 imposed by the integer. If not, we can't do any better than
383 ALIGN. */
384 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
385 return align;
386
387 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
388 & (max_align / BITS_PER_UNIT - 1))
389 != 0)
390 max_align >>= 1;
391
392 exp = TREE_OPERAND (exp, 0);
393 break;
394
395 case ADDR_EXPR:
396 /* See what we are pointing at and look at its alignment. */
397 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
398
399 default:
400 return align;
401 }
402 }
403 }
404
405 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
406 way, because it could contain a zero byte in the middle.
407 TREE_STRING_LENGTH is the size of the character array, not the string.
408
409 ONLY_VALUE should be nonzero if the result is not going to be emitted
410 into the instruction stream and zero if it is going to be expanded.
411 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
412 is returned, otherwise NULL, since
413 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
414 evaluate the side-effects.
415
416 The value returned is of type `ssizetype'.
417
418 Unfortunately, string_constant can't access the values of const char
419 arrays with initializers, so neither can we do so here. */
420
421 tree
422 c_strlen (tree src, int only_value)
423 {
424 tree offset_node;
425 HOST_WIDE_INT offset;
426 int max;
427 const char *ptr;
428
429 STRIP_NOPS (src);
430 if (TREE_CODE (src) == COND_EXPR
431 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
432 {
433 tree len1, len2;
434
435 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
436 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
437 if (tree_int_cst_equal (len1, len2))
438 return len1;
439 }
440
441 if (TREE_CODE (src) == COMPOUND_EXPR
442 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
443 return c_strlen (TREE_OPERAND (src, 1), only_value);
444
445 src = string_constant (src, &offset_node);
446 if (src == 0)
447 return NULL_TREE;
448
449 max = TREE_STRING_LENGTH (src) - 1;
450 ptr = TREE_STRING_POINTER (src);
451
452 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
453 {
454 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
455 compute the offset to the following null if we don't know where to
456 start searching for it. */
457 int i;
458
459 for (i = 0; i < max; i++)
460 if (ptr[i] == 0)
461 return NULL_TREE;
462
463 /* We don't know the starting offset, but we do know that the string
464 has no internal zero bytes. We can assume that the offset falls
465 within the bounds of the string; otherwise, the programmer deserves
466 what he gets. Subtract the offset from the length of the string,
467 and return that. This would perhaps not be valid if we were dealing
468 with named arrays in addition to literal string constants. */
469
470 return size_diffop (size_int (max), offset_node);
471 }
472
473 /* We have a known offset into the string. Start searching there for
474 a null character if we can represent it as a single HOST_WIDE_INT. */
475 if (offset_node == 0)
476 offset = 0;
477 else if (! host_integerp (offset_node, 0))
478 offset = -1;
479 else
480 offset = tree_low_cst (offset_node, 0);
481
482 /* If the offset is known to be out of bounds, warn, and call strlen at
483 runtime. */
484 if (offset < 0 || offset > max)
485 {
486 /* Suppress multiple warnings for propagated constant strings. */
487 if (! TREE_NO_WARNING (src))
488 {
489 warning (0, "offset outside bounds of constant string");
490 TREE_NO_WARNING (src) = 1;
491 }
492 return NULL_TREE;
493 }
494
495 /* Use strlen to search for the first zero byte. Since any strings
496 constructed with build_string will have nulls appended, we win even
497 if we get handed something like (char[4])"abcd".
498
499 Since OFFSET is our starting index into the string, no further
500 calculation is needed. */
501 return ssize_int (strlen (ptr + offset));
502 }
503
504 /* Return a char pointer for a C string if it is a string constant
505 or sum of string constant and integer constant. */
506
507 static const char *
508 c_getstr (tree src)
509 {
510 tree offset_node;
511
512 src = string_constant (src, &offset_node);
513 if (src == 0)
514 return 0;
515
516 if (offset_node == 0)
517 return TREE_STRING_POINTER (src);
518 else if (!host_integerp (offset_node, 1)
519 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
520 return 0;
521
522 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
523 }
524
525 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
526 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
527
528 static rtx
529 c_readstr (const char *str, enum machine_mode mode)
530 {
531 HOST_WIDE_INT c[2];
532 HOST_WIDE_INT ch;
533 unsigned int i, j;
534
535 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
536
537 c[0] = 0;
538 c[1] = 0;
539 ch = 1;
540 for (i = 0; i < GET_MODE_SIZE (mode); i++)
541 {
542 j = i;
543 if (WORDS_BIG_ENDIAN)
544 j = GET_MODE_SIZE (mode) - i - 1;
545 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
546 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
547 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
548 j *= BITS_PER_UNIT;
549 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
550
551 if (ch)
552 ch = (unsigned char) str[i];
553 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
554 }
555 return immed_double_const (c[0], c[1], mode);
556 }
557
558 /* Cast a target constant CST to target CHAR and if that value fits into
559 host char type, return zero and put that value into variable pointed to by
560 P. */
561
562 static int
563 target_char_cast (tree cst, char *p)
564 {
565 unsigned HOST_WIDE_INT val, hostval;
566
567 if (!host_integerp (cst, 1)
568 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
569 return 1;
570
571 val = tree_low_cst (cst, 1);
572 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
573 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
574
575 hostval = val;
576 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
577 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
578
579 if (val != hostval)
580 return 1;
581
582 *p = hostval;
583 return 0;
584 }
585
586 /* Similar to save_expr, but assumes that arbitrary code is not executed
587 in between the multiple evaluations. In particular, we assume that a
588 non-addressable local variable will not be modified. */
589
590 static tree
591 builtin_save_expr (tree exp)
592 {
593 if (TREE_ADDRESSABLE (exp) == 0
594 && (TREE_CODE (exp) == PARM_DECL
595 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
596 return exp;
597
598 return save_expr (exp);
599 }
600
601 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
602 times to get the address of either a higher stack frame, or a return
603 address located within it (depending on FNDECL_CODE). */
604
605 static rtx
606 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
607 {
608 int i;
609
610 #ifdef INITIAL_FRAME_ADDRESS_RTX
611 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
612 #else
613 rtx tem;
614
615 /* For a zero count with __builtin_return_address, we don't care what
616 frame address we return, because target-specific definitions will
617 override us. Therefore frame pointer elimination is OK, and using
618 the soft frame pointer is OK.
619
620 For a nonzero count, or a zero count with __builtin_frame_address,
621 we require a stable offset from the current frame pointer to the
622 previous one, so we must use the hard frame pointer, and
623 we must disable frame pointer elimination. */
624 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
625 tem = frame_pointer_rtx;
626 else
627 {
628 tem = hard_frame_pointer_rtx;
629
630 /* Tell reload not to eliminate the frame pointer. */
631 crtl->accesses_prior_frames = 1;
632 }
633 #endif
634
635 /* Some machines need special handling before we can access
636 arbitrary frames. For example, on the SPARC, we must first flush
637 all register windows to the stack. */
638 #ifdef SETUP_FRAME_ADDRESSES
639 if (count > 0)
640 SETUP_FRAME_ADDRESSES ();
641 #endif
642
643 /* On the SPARC, the return address is not in the frame, it is in a
644 register. There is no way to access it off of the current frame
645 pointer, but it can be accessed off the previous frame pointer by
646 reading the value from the register window save area. */
647 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
648 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
649 count--;
650 #endif
651
652 /* Scan back COUNT frames to the specified frame. */
653 for (i = 0; i < count; i++)
654 {
655 /* Assume the dynamic chain pointer is in the word that the
656 frame address points to, unless otherwise specified. */
657 #ifdef DYNAMIC_CHAIN_ADDRESS
658 tem = DYNAMIC_CHAIN_ADDRESS (tem);
659 #endif
660 tem = memory_address (Pmode, tem);
661 tem = gen_frame_mem (Pmode, tem);
662 tem = copy_to_reg (tem);
663 }
664
665 /* For __builtin_frame_address, return what we've got. But, on
666 the SPARC for example, we may have to add a bias. */
667 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
668 #ifdef FRAME_ADDR_RTX
669 return FRAME_ADDR_RTX (tem);
670 #else
671 return tem;
672 #endif
673
674 /* For __builtin_return_address, get the return address from that frame. */
675 #ifdef RETURN_ADDR_RTX
676 tem = RETURN_ADDR_RTX (count, tem);
677 #else
678 tem = memory_address (Pmode,
679 plus_constant (tem, GET_MODE_SIZE (Pmode)));
680 tem = gen_frame_mem (Pmode, tem);
681 #endif
682 return tem;
683 }
684
685 /* Alias set used for setjmp buffer. */
686 static alias_set_type setjmp_alias_set = -1;
687
688 /* Construct the leading half of a __builtin_setjmp call. Control will
689 return to RECEIVER_LABEL. This is also called directly by the SJLJ
690 exception handling code. */
691
692 void
693 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
694 {
695 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
696 rtx stack_save;
697 rtx mem;
698
699 if (setjmp_alias_set == -1)
700 setjmp_alias_set = new_alias_set ();
701
702 buf_addr = convert_memory_address (Pmode, buf_addr);
703
704 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
705
706 /* We store the frame pointer and the address of receiver_label in
707 the buffer and use the rest of it for the stack save area, which
708 is machine-dependent. */
709
710 mem = gen_rtx_MEM (Pmode, buf_addr);
711 set_mem_alias_set (mem, setjmp_alias_set);
712 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
713
714 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
715 set_mem_alias_set (mem, setjmp_alias_set);
716
717 emit_move_insn (validize_mem (mem),
718 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
719
720 stack_save = gen_rtx_MEM (sa_mode,
721 plus_constant (buf_addr,
722 2 * GET_MODE_SIZE (Pmode)));
723 set_mem_alias_set (stack_save, setjmp_alias_set);
724 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
725
726 /* If there is further processing to do, do it. */
727 #ifdef HAVE_builtin_setjmp_setup
728 if (HAVE_builtin_setjmp_setup)
729 emit_insn (gen_builtin_setjmp_setup (buf_addr));
730 #endif
731
732 /* Tell optimize_save_area_alloca that extra work is going to
733 need to go on during alloca. */
734 cfun->calls_setjmp = 1;
735
736 /* We have a nonlocal label. */
737 cfun->has_nonlocal_label = 1;
738 }
739
740 /* Construct the trailing part of a __builtin_setjmp call. This is
741 also called directly by the SJLJ exception handling code. */
742
743 void
744 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
745 {
746 /* Clobber the FP when we get here, so we have to make sure it's
747 marked as used by this function. */
748 emit_use (hard_frame_pointer_rtx);
749
750 /* Mark the static chain as clobbered here so life information
751 doesn't get messed up for it. */
752 emit_clobber (static_chain_rtx);
753
754 /* Now put in the code to restore the frame pointer, and argument
755 pointer, if needed. */
756 #ifdef HAVE_nonlocal_goto
757 if (! HAVE_nonlocal_goto)
758 #endif
759 {
760 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
761 /* This might change the hard frame pointer in ways that aren't
762 apparent to early optimization passes, so force a clobber. */
763 emit_clobber (hard_frame_pointer_rtx);
764 }
765
766 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
767 if (fixed_regs[ARG_POINTER_REGNUM])
768 {
769 #ifdef ELIMINABLE_REGS
770 size_t i;
771 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
772
773 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
774 if (elim_regs[i].from == ARG_POINTER_REGNUM
775 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
776 break;
777
778 if (i == ARRAY_SIZE (elim_regs))
779 #endif
780 {
781 /* Now restore our arg pointer from the address at which it
782 was saved in our stack frame. */
783 emit_move_insn (crtl->args.internal_arg_pointer,
784 copy_to_reg (get_arg_pointer_save_area ()));
785 }
786 }
787 #endif
788
789 #ifdef HAVE_builtin_setjmp_receiver
790 if (HAVE_builtin_setjmp_receiver)
791 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
792 else
793 #endif
794 #ifdef HAVE_nonlocal_goto_receiver
795 if (HAVE_nonlocal_goto_receiver)
796 emit_insn (gen_nonlocal_goto_receiver ());
797 else
798 #endif
799 { /* Nothing */ }
800
801 /* We must not allow the code we just generated to be reordered by
802 scheduling. Specifically, the update of the frame pointer must
803 happen immediately, not later. */
804 emit_insn (gen_blockage ());
805 }
806
807 /* __builtin_longjmp is passed a pointer to an array of five words (not
808 all will be used on all machines). It operates similarly to the C
809 library function of the same name, but is more efficient. Much of
810 the code below is copied from the handling of non-local gotos. */
811
812 static void
813 expand_builtin_longjmp (rtx buf_addr, rtx value)
814 {
815 rtx fp, lab, stack, insn, last;
816 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
817
818 /* DRAP is needed for stack realign if longjmp is expanded to current
819 function */
820 if (SUPPORTS_STACK_ALIGNMENT)
821 crtl->need_drap = true;
822
823 if (setjmp_alias_set == -1)
824 setjmp_alias_set = new_alias_set ();
825
826 buf_addr = convert_memory_address (Pmode, buf_addr);
827
828 buf_addr = force_reg (Pmode, buf_addr);
829
830 /* We used to store value in static_chain_rtx, but that fails if pointers
831 are smaller than integers. We instead require that the user must pass
832 a second argument of 1, because that is what builtin_setjmp will
833 return. This also makes EH slightly more efficient, since we are no
834 longer copying around a value that we don't care about. */
835 gcc_assert (value == const1_rtx);
836
837 last = get_last_insn ();
838 #ifdef HAVE_builtin_longjmp
839 if (HAVE_builtin_longjmp)
840 emit_insn (gen_builtin_longjmp (buf_addr));
841 else
842 #endif
843 {
844 fp = gen_rtx_MEM (Pmode, buf_addr);
845 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
846 GET_MODE_SIZE (Pmode)));
847
848 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
849 2 * GET_MODE_SIZE (Pmode)));
850 set_mem_alias_set (fp, setjmp_alias_set);
851 set_mem_alias_set (lab, setjmp_alias_set);
852 set_mem_alias_set (stack, setjmp_alias_set);
853
854 /* Pick up FP, label, and SP from the block and jump. This code is
855 from expand_goto in stmt.c; see there for detailed comments. */
856 #ifdef HAVE_nonlocal_goto
857 if (HAVE_nonlocal_goto)
858 /* We have to pass a value to the nonlocal_goto pattern that will
859 get copied into the static_chain pointer, but it does not matter
860 what that value is, because builtin_setjmp does not use it. */
861 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
862 else
863 #endif
864 {
865 lab = copy_to_reg (lab);
866
867 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
868 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
869
870 emit_move_insn (hard_frame_pointer_rtx, fp);
871 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
872
873 emit_use (hard_frame_pointer_rtx);
874 emit_use (stack_pointer_rtx);
875 emit_indirect_jump (lab);
876 }
877 }
878
879 /* Search backwards and mark the jump insn as a non-local goto.
880 Note that this precludes the use of __builtin_longjmp to a
881 __builtin_setjmp target in the same function. However, we've
882 already cautioned the user that these functions are for
883 internal exception handling use only. */
884 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
885 {
886 gcc_assert (insn != last);
887
888 if (JUMP_P (insn))
889 {
890 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
891 break;
892 }
893 else if (CALL_P (insn))
894 break;
895 }
896 }
897
898 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
899 and the address of the save area. */
900
901 static rtx
902 expand_builtin_nonlocal_goto (tree exp)
903 {
904 tree t_label, t_save_area;
905 rtx r_label, r_save_area, r_fp, r_sp, insn;
906
907 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
908 return NULL_RTX;
909
910 t_label = CALL_EXPR_ARG (exp, 0);
911 t_save_area = CALL_EXPR_ARG (exp, 1);
912
913 r_label = expand_normal (t_label);
914 r_label = convert_memory_address (Pmode, r_label);
915 r_save_area = expand_normal (t_save_area);
916 r_save_area = convert_memory_address (Pmode, r_save_area);
917 /* Copy the address of the save location to a register just in case it was based
918 on the frame pointer. */
919 r_save_area = copy_to_reg (r_save_area);
920 r_fp = gen_rtx_MEM (Pmode, r_save_area);
921 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
922 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
923
924 crtl->has_nonlocal_goto = 1;
925
926 #ifdef HAVE_nonlocal_goto
927 /* ??? We no longer need to pass the static chain value, afaik. */
928 if (HAVE_nonlocal_goto)
929 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
930 else
931 #endif
932 {
933 r_label = copy_to_reg (r_label);
934
935 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
936 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
937
938 /* Restore frame pointer for containing function.
939 This sets the actual hard register used for the frame pointer
940 to the location of the function's incoming static chain info.
941 The non-local goto handler will then adjust it to contain the
942 proper value and reload the argument pointer, if needed. */
943 emit_move_insn (hard_frame_pointer_rtx, r_fp);
944 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
945
946 /* USE of hard_frame_pointer_rtx added for consistency;
947 not clear if really needed. */
948 emit_use (hard_frame_pointer_rtx);
949 emit_use (stack_pointer_rtx);
950
951 /* If the architecture is using a GP register, we must
952 conservatively assume that the target function makes use of it.
953 The prologue of functions with nonlocal gotos must therefore
954 initialize the GP register to the appropriate value, and we
955 must then make sure that this value is live at the point
956 of the jump. (Note that this doesn't necessarily apply
957 to targets with a nonlocal_goto pattern; they are free
958 to implement it in their own way. Note also that this is
959 a no-op if the GP register is a global invariant.) */
960 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
961 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
962 emit_use (pic_offset_table_rtx);
963
964 emit_indirect_jump (r_label);
965 }
966
967 /* Search backwards to the jump insn and mark it as a
968 non-local goto. */
969 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
970 {
971 if (JUMP_P (insn))
972 {
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
974 break;
975 }
976 else if (CALL_P (insn))
977 break;
978 }
979
980 return const0_rtx;
981 }
982
983 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
984 (not all will be used on all machines) that was passed to __builtin_setjmp.
985 It updates the stack pointer in that block to correspond to the current
986 stack pointer. */
987
988 static void
989 expand_builtin_update_setjmp_buf (rtx buf_addr)
990 {
991 enum machine_mode sa_mode = Pmode;
992 rtx stack_save;
993
994
995 #ifdef HAVE_save_stack_nonlocal
996 if (HAVE_save_stack_nonlocal)
997 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
998 #endif
999 #ifdef STACK_SAVEAREA_MODE
1000 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1001 #endif
1002
1003 stack_save
1004 = gen_rtx_MEM (sa_mode,
1005 memory_address
1006 (sa_mode,
1007 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1008
1009 #ifdef HAVE_setjmp
1010 if (HAVE_setjmp)
1011 emit_insn (gen_setjmp ());
1012 #endif
1013
1014 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1015 }
1016
1017 /* Expand a call to __builtin_prefetch. For a target that does not support
1018 data prefetch, evaluate the memory address argument in case it has side
1019 effects. */
1020
1021 static void
1022 expand_builtin_prefetch (tree exp)
1023 {
1024 tree arg0, arg1, arg2;
1025 int nargs;
1026 rtx op0, op1, op2;
1027
1028 if (!validate_arglist (exp, POINTER_TYPE, 0))
1029 return;
1030
1031 arg0 = CALL_EXPR_ARG (exp, 0);
1032
1033 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1034 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1035 locality). */
1036 nargs = call_expr_nargs (exp);
1037 if (nargs > 1)
1038 arg1 = CALL_EXPR_ARG (exp, 1);
1039 else
1040 arg1 = integer_zero_node;
1041 if (nargs > 2)
1042 arg2 = CALL_EXPR_ARG (exp, 2);
1043 else
1044 arg2 = build_int_cst (NULL_TREE, 3);
1045
1046 /* Argument 0 is an address. */
1047 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1048
1049 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1050 if (TREE_CODE (arg1) != INTEGER_CST)
1051 {
1052 error ("second argument to %<__builtin_prefetch%> must be a constant");
1053 arg1 = integer_zero_node;
1054 }
1055 op1 = expand_normal (arg1);
1056 /* Argument 1 must be either zero or one. */
1057 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1058 {
1059 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1060 " using zero");
1061 op1 = const0_rtx;
1062 }
1063
1064 /* Argument 2 (locality) must be a compile-time constant int. */
1065 if (TREE_CODE (arg2) != INTEGER_CST)
1066 {
1067 error ("third argument to %<__builtin_prefetch%> must be a constant");
1068 arg2 = integer_zero_node;
1069 }
1070 op2 = expand_normal (arg2);
1071 /* Argument 2 must be 0, 1, 2, or 3. */
1072 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1073 {
1074 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1075 op2 = const0_rtx;
1076 }
1077
1078 #ifdef HAVE_prefetch
1079 if (HAVE_prefetch)
1080 {
1081 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1082 (op0,
1083 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1084 || (GET_MODE (op0) != Pmode))
1085 {
1086 op0 = convert_memory_address (Pmode, op0);
1087 op0 = force_reg (Pmode, op0);
1088 }
1089 emit_insn (gen_prefetch (op0, op1, op2));
1090 }
1091 #endif
1092
1093 /* Don't do anything with direct references to volatile memory, but
1094 generate code to handle other side effects. */
1095 if (!MEM_P (op0) && side_effects_p (op0))
1096 emit_insn (op0);
1097 }
1098
1099 /* Get a MEM rtx for expression EXP which is the address of an operand
1100 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1101 the maximum length of the block of memory that might be accessed or
1102 NULL if unknown. */
1103
1104 static rtx
1105 get_memory_rtx (tree exp, tree len)
1106 {
1107 tree orig_exp = exp;
1108 rtx addr, mem;
1109 HOST_WIDE_INT off;
1110
1111 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1112 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1113 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1114 exp = TREE_OPERAND (exp, 0);
1115
1116 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1117 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1118
1119 /* Get an expression we can use to find the attributes to assign to MEM.
1120 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1121 we can. First remove any nops. */
1122 while (CONVERT_EXPR_P (exp)
1123 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1124 exp = TREE_OPERAND (exp, 0);
1125
1126 off = 0;
1127 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1128 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1129 && host_integerp (TREE_OPERAND (exp, 1), 0)
1130 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1131 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1132 else if (TREE_CODE (exp) == ADDR_EXPR)
1133 exp = TREE_OPERAND (exp, 0);
1134 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1135 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1136 else
1137 exp = NULL;
1138
1139 /* Honor attributes derived from exp, except for the alias set
1140 (as builtin stringops may alias with anything) and the size
1141 (as stringops may access multiple array elements). */
1142 if (exp)
1143 {
1144 set_mem_attributes (mem, exp, 0);
1145
1146 if (off)
1147 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1148
1149 /* Allow the string and memory builtins to overflow from one
1150 field into another, see http://gcc.gnu.org/PR23561.
1151 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1152 memory accessed by the string or memory builtin will fit
1153 within the field. */
1154 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1155 {
1156 tree mem_expr = MEM_EXPR (mem);
1157 HOST_WIDE_INT offset = -1, length = -1;
1158 tree inner = exp;
1159
1160 while (TREE_CODE (inner) == ARRAY_REF
1161 || CONVERT_EXPR_P (inner)
1162 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1163 || TREE_CODE (inner) == SAVE_EXPR)
1164 inner = TREE_OPERAND (inner, 0);
1165
1166 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1167
1168 if (MEM_OFFSET (mem)
1169 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1170 offset = INTVAL (MEM_OFFSET (mem));
1171
1172 if (offset >= 0 && len && host_integerp (len, 0))
1173 length = tree_low_cst (len, 0);
1174
1175 while (TREE_CODE (inner) == COMPONENT_REF)
1176 {
1177 tree field = TREE_OPERAND (inner, 1);
1178 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1179 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1180
1181 /* Bitfields are generally not byte-addressable. */
1182 gcc_assert (!DECL_BIT_FIELD (field)
1183 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1184 % BITS_PER_UNIT) == 0
1185 && host_integerp (DECL_SIZE (field), 0)
1186 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1187 % BITS_PER_UNIT) == 0));
1188
1189 /* If we can prove that the memory starting at XEXP (mem, 0) and
1190 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1191 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1192 fields without DECL_SIZE_UNIT like flexible array members. */
1193 if (length >= 0
1194 && DECL_SIZE_UNIT (field)
1195 && host_integerp (DECL_SIZE_UNIT (field), 0))
1196 {
1197 HOST_WIDE_INT size
1198 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1199 if (offset <= size
1200 && length <= size
1201 && offset + length <= size)
1202 break;
1203 }
1204
1205 if (offset >= 0
1206 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1207 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1208 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1209 / BITS_PER_UNIT;
1210 else
1211 {
1212 offset = -1;
1213 length = -1;
1214 }
1215
1216 mem_expr = TREE_OPERAND (mem_expr, 0);
1217 inner = TREE_OPERAND (inner, 0);
1218 }
1219
1220 if (mem_expr == NULL)
1221 offset = -1;
1222 if (mem_expr != MEM_EXPR (mem))
1223 {
1224 set_mem_expr (mem, mem_expr);
1225 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1226 }
1227 }
1228 set_mem_alias_set (mem, 0);
1229 set_mem_size (mem, NULL_RTX);
1230 }
1231
1232 return mem;
1233 }
1234 \f
1235 /* Built-in functions to perform an untyped call and return. */
1236
1237 /* For each register that may be used for calling a function, this
1238 gives a mode used to copy the register's value. VOIDmode indicates
1239 the register is not used for calling a function. If the machine
1240 has register windows, this gives only the outbound registers.
1241 INCOMING_REGNO gives the corresponding inbound register. */
1242 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1243
1244 /* For each register that may be used for returning values, this gives
1245 a mode used to copy the register's value. VOIDmode indicates the
1246 register is not used for returning values. If the machine has
1247 register windows, this gives only the outbound registers.
1248 INCOMING_REGNO gives the corresponding inbound register. */
1249 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1250
1251 /* For each register that may be used for calling a function, this
1252 gives the offset of that register into the block returned by
1253 __builtin_apply_args. 0 indicates that the register is not
1254 used for calling a function. */
1255 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1256
1257 /* Return the size required for the block returned by __builtin_apply_args,
1258 and initialize apply_args_mode. */
1259
1260 static int
1261 apply_args_size (void)
1262 {
1263 static int size = -1;
1264 int align;
1265 unsigned int regno;
1266 enum machine_mode mode;
1267
1268 /* The values computed by this function never change. */
1269 if (size < 0)
1270 {
1271 /* The first value is the incoming arg-pointer. */
1272 size = GET_MODE_SIZE (Pmode);
1273
1274 /* The second value is the structure value address unless this is
1275 passed as an "invisible" first argument. */
1276 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1277 size += GET_MODE_SIZE (Pmode);
1278
1279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1280 if (FUNCTION_ARG_REGNO_P (regno))
1281 {
1282 mode = reg_raw_mode[regno];
1283
1284 gcc_assert (mode != VOIDmode);
1285
1286 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1287 if (size % align != 0)
1288 size = CEIL (size, align) * align;
1289 apply_args_reg_offset[regno] = size;
1290 size += GET_MODE_SIZE (mode);
1291 apply_args_mode[regno] = mode;
1292 }
1293 else
1294 {
1295 apply_args_mode[regno] = VOIDmode;
1296 apply_args_reg_offset[regno] = 0;
1297 }
1298 }
1299 return size;
1300 }
1301
1302 /* Return the size required for the block returned by __builtin_apply,
1303 and initialize apply_result_mode. */
1304
1305 static int
1306 apply_result_size (void)
1307 {
1308 static int size = -1;
1309 int align, regno;
1310 enum machine_mode mode;
1311
1312 /* The values computed by this function never change. */
1313 if (size < 0)
1314 {
1315 size = 0;
1316
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if (FUNCTION_VALUE_REGNO_P (regno))
1319 {
1320 mode = reg_raw_mode[regno];
1321
1322 gcc_assert (mode != VOIDmode);
1323
1324 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1325 if (size % align != 0)
1326 size = CEIL (size, align) * align;
1327 size += GET_MODE_SIZE (mode);
1328 apply_result_mode[regno] = mode;
1329 }
1330 else
1331 apply_result_mode[regno] = VOIDmode;
1332
1333 /* Allow targets that use untyped_call and untyped_return to override
1334 the size so that machine-specific information can be stored here. */
1335 #ifdef APPLY_RESULT_SIZE
1336 size = APPLY_RESULT_SIZE;
1337 #endif
1338 }
1339 return size;
1340 }
1341
1342 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1343 /* Create a vector describing the result block RESULT. If SAVEP is true,
1344 the result block is used to save the values; otherwise it is used to
1345 restore the values. */
1346
1347 static rtx
1348 result_vector (int savep, rtx result)
1349 {
1350 int regno, size, align, nelts;
1351 enum machine_mode mode;
1352 rtx reg, mem;
1353 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1354
1355 size = nelts = 0;
1356 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1357 if ((mode = apply_result_mode[regno]) != VOIDmode)
1358 {
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1363 mem = adjust_address (result, mode, size);
1364 savevec[nelts++] = (savep
1365 ? gen_rtx_SET (VOIDmode, mem, reg)
1366 : gen_rtx_SET (VOIDmode, reg, mem));
1367 size += GET_MODE_SIZE (mode);
1368 }
1369 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1370 }
1371 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1372
1373 /* Save the state required to perform an untyped call with the same
1374 arguments as were passed to the current function. */
1375
1376 static rtx
1377 expand_builtin_apply_args_1 (void)
1378 {
1379 rtx registers, tem;
1380 int size, align, regno;
1381 enum machine_mode mode;
1382 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1383
1384 /* Create a block where the arg-pointer, structure value address,
1385 and argument registers can be saved. */
1386 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1387
1388 /* Walk past the arg-pointer and structure value address. */
1389 size = GET_MODE_SIZE (Pmode);
1390 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1391 size += GET_MODE_SIZE (Pmode);
1392
1393 /* Save each register used in calling a function to the block. */
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if ((mode = apply_args_mode[regno]) != VOIDmode)
1396 {
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400
1401 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1402
1403 emit_move_insn (adjust_address (registers, mode, size), tem);
1404 size += GET_MODE_SIZE (mode);
1405 }
1406
1407 /* Save the arg pointer to the block. */
1408 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1409 #ifdef STACK_GROWS_DOWNWARD
1410 /* We need the pointer as the caller actually passed them to us, not
1411 as we might have pretended they were passed. Make sure it's a valid
1412 operand, as emit_move_insn isn't expected to handle a PLUS. */
1413 tem
1414 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1415 NULL_RTX);
1416 #endif
1417 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1418
1419 size = GET_MODE_SIZE (Pmode);
1420
1421 /* Save the structure value address unless this is passed as an
1422 "invisible" first argument. */
1423 if (struct_incoming_value)
1424 {
1425 emit_move_insn (adjust_address (registers, Pmode, size),
1426 copy_to_reg (struct_incoming_value));
1427 size += GET_MODE_SIZE (Pmode);
1428 }
1429
1430 /* Return the address of the block. */
1431 return copy_addr_to_reg (XEXP (registers, 0));
1432 }
1433
1434 /* __builtin_apply_args returns block of memory allocated on
1435 the stack into which is stored the arg pointer, structure
1436 value address, static chain, and all the registers that might
1437 possibly be used in performing a function call. The code is
1438 moved to the start of the function so the incoming values are
1439 saved. */
1440
1441 static rtx
1442 expand_builtin_apply_args (void)
1443 {
1444 /* Don't do __builtin_apply_args more than once in a function.
1445 Save the result of the first call and reuse it. */
1446 if (apply_args_value != 0)
1447 return apply_args_value;
1448 {
1449 /* When this function is called, it means that registers must be
1450 saved on entry to this function. So we migrate the
1451 call to the first insn of this function. */
1452 rtx temp;
1453 rtx seq;
1454
1455 start_sequence ();
1456 temp = expand_builtin_apply_args_1 ();
1457 seq = get_insns ();
1458 end_sequence ();
1459
1460 apply_args_value = temp;
1461
1462 /* Put the insns after the NOTE that starts the function.
1463 If this is inside a start_sequence, make the outer-level insn
1464 chain current, so the code is placed at the start of the
1465 function. If internal_arg_pointer is a non-virtual pseudo,
1466 it needs to be placed after the function that initializes
1467 that pseudo. */
1468 push_topmost_sequence ();
1469 if (REG_P (crtl->args.internal_arg_pointer)
1470 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1471 emit_insn_before (seq, parm_birth_insn);
1472 else
1473 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1474 pop_topmost_sequence ();
1475 return temp;
1476 }
1477 }
1478
1479 /* Perform an untyped call and save the state required to perform an
1480 untyped return of whatever value was returned by the given function. */
1481
1482 static rtx
1483 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1484 {
1485 int size, align, regno;
1486 enum machine_mode mode;
1487 rtx incoming_args, result, reg, dest, src, call_insn;
1488 rtx old_stack_level = 0;
1489 rtx call_fusage = 0;
1490 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1491
1492 arguments = convert_memory_address (Pmode, arguments);
1493
1494 /* Create a block where the return registers can be saved. */
1495 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1496
1497 /* Fetch the arg pointer from the ARGUMENTS block. */
1498 incoming_args = gen_reg_rtx (Pmode);
1499 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1500 #ifndef STACK_GROWS_DOWNWARD
1501 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1502 incoming_args, 0, OPTAB_LIB_WIDEN);
1503 #endif
1504
1505 /* Push a new argument block and copy the arguments. Do not allow
1506 the (potential) memcpy call below to interfere with our stack
1507 manipulations. */
1508 do_pending_stack_adjust ();
1509 NO_DEFER_POP;
1510
1511 /* Save the stack with nonlocal if available. */
1512 #ifdef HAVE_save_stack_nonlocal
1513 if (HAVE_save_stack_nonlocal)
1514 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1515 else
1516 #endif
1517 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1518
1519 /* Allocate a block of memory onto the stack and copy the memory
1520 arguments to the outgoing arguments address. */
1521 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1522
1523 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1524 may have already set current_function_calls_alloca to true.
1525 current_function_calls_alloca won't be set if argsize is zero,
1526 so we have to guarantee need_drap is true here. */
1527 if (SUPPORTS_STACK_ALIGNMENT)
1528 crtl->need_drap = true;
1529
1530 dest = virtual_outgoing_args_rtx;
1531 #ifndef STACK_GROWS_DOWNWARD
1532 if (GET_CODE (argsize) == CONST_INT)
1533 dest = plus_constant (dest, -INTVAL (argsize));
1534 else
1535 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1536 #endif
1537 dest = gen_rtx_MEM (BLKmode, dest);
1538 set_mem_align (dest, PARM_BOUNDARY);
1539 src = gen_rtx_MEM (BLKmode, incoming_args);
1540 set_mem_align (src, PARM_BOUNDARY);
1541 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1542
1543 /* Refer to the argument block. */
1544 apply_args_size ();
1545 arguments = gen_rtx_MEM (BLKmode, arguments);
1546 set_mem_align (arguments, PARM_BOUNDARY);
1547
1548 /* Walk past the arg-pointer and structure value address. */
1549 size = GET_MODE_SIZE (Pmode);
1550 if (struct_value)
1551 size += GET_MODE_SIZE (Pmode);
1552
1553 /* Restore each of the registers previously saved. Make USE insns
1554 for each of these registers for use in making the call. */
1555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1556 if ((mode = apply_args_mode[regno]) != VOIDmode)
1557 {
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 reg = gen_rtx_REG (mode, regno);
1562 emit_move_insn (reg, adjust_address (arguments, mode, size));
1563 use_reg (&call_fusage, reg);
1564 size += GET_MODE_SIZE (mode);
1565 }
1566
1567 /* Restore the structure value address unless this is passed as an
1568 "invisible" first argument. */
1569 size = GET_MODE_SIZE (Pmode);
1570 if (struct_value)
1571 {
1572 rtx value = gen_reg_rtx (Pmode);
1573 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1574 emit_move_insn (struct_value, value);
1575 if (REG_P (struct_value))
1576 use_reg (&call_fusage, struct_value);
1577 size += GET_MODE_SIZE (Pmode);
1578 }
1579
1580 /* All arguments and registers used for the call are set up by now! */
1581 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1582
1583 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1584 and we don't want to load it into a register as an optimization,
1585 because prepare_call_address already did it if it should be done. */
1586 if (GET_CODE (function) != SYMBOL_REF)
1587 function = memory_address (FUNCTION_MODE, function);
1588
1589 /* Generate the actual call instruction and save the return value. */
1590 #ifdef HAVE_untyped_call
1591 if (HAVE_untyped_call)
1592 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1593 result, result_vector (1, result)));
1594 else
1595 #endif
1596 #ifdef HAVE_call_value
1597 if (HAVE_call_value)
1598 {
1599 rtx valreg = 0;
1600
1601 /* Locate the unique return register. It is not possible to
1602 express a call that sets more than one return register using
1603 call_value; use untyped_call for that. In fact, untyped_call
1604 only needs to save the return registers in the given block. */
1605 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1606 if ((mode = apply_result_mode[regno]) != VOIDmode)
1607 {
1608 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1609
1610 valreg = gen_rtx_REG (mode, regno);
1611 }
1612
1613 emit_call_insn (GEN_CALL_VALUE (valreg,
1614 gen_rtx_MEM (FUNCTION_MODE, function),
1615 const0_rtx, NULL_RTX, const0_rtx));
1616
1617 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1618 }
1619 else
1620 #endif
1621 gcc_unreachable ();
1622
1623 /* Find the CALL insn we just emitted, and attach the register usage
1624 information. */
1625 call_insn = last_call_insn ();
1626 add_function_usage_to (call_insn, call_fusage);
1627
1628 /* Restore the stack. */
1629 #ifdef HAVE_save_stack_nonlocal
1630 if (HAVE_save_stack_nonlocal)
1631 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1632 else
1633 #endif
1634 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1635
1636 OK_DEFER_POP;
1637
1638 /* Return the address of the result block. */
1639 result = copy_addr_to_reg (XEXP (result, 0));
1640 return convert_memory_address (ptr_mode, result);
1641 }
1642
1643 /* Perform an untyped return. */
1644
1645 static void
1646 expand_builtin_return (rtx result)
1647 {
1648 int size, align, regno;
1649 enum machine_mode mode;
1650 rtx reg;
1651 rtx call_fusage = 0;
1652
1653 result = convert_memory_address (Pmode, result);
1654
1655 apply_result_size ();
1656 result = gen_rtx_MEM (BLKmode, result);
1657
1658 #ifdef HAVE_untyped_return
1659 if (HAVE_untyped_return)
1660 {
1661 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1662 emit_barrier ();
1663 return;
1664 }
1665 #endif
1666
1667 /* Restore the return value and note that each value is used. */
1668 size = 0;
1669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1670 if ((mode = apply_result_mode[regno]) != VOIDmode)
1671 {
1672 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1673 if (size % align != 0)
1674 size = CEIL (size, align) * align;
1675 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1676 emit_move_insn (reg, adjust_address (result, mode, size));
1677
1678 push_to_sequence (call_fusage);
1679 emit_use (reg);
1680 call_fusage = get_insns ();
1681 end_sequence ();
1682 size += GET_MODE_SIZE (mode);
1683 }
1684
1685 /* Put the USE insns before the return. */
1686 emit_insn (call_fusage);
1687
1688 /* Return whatever values was restored by jumping directly to the end
1689 of the function. */
1690 expand_naked_return ();
1691 }
1692
1693 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1694
1695 static enum type_class
1696 type_to_class (tree type)
1697 {
1698 switch (TREE_CODE (type))
1699 {
1700 case VOID_TYPE: return void_type_class;
1701 case INTEGER_TYPE: return integer_type_class;
1702 case ENUMERAL_TYPE: return enumeral_type_class;
1703 case BOOLEAN_TYPE: return boolean_type_class;
1704 case POINTER_TYPE: return pointer_type_class;
1705 case REFERENCE_TYPE: return reference_type_class;
1706 case OFFSET_TYPE: return offset_type_class;
1707 case REAL_TYPE: return real_type_class;
1708 case COMPLEX_TYPE: return complex_type_class;
1709 case FUNCTION_TYPE: return function_type_class;
1710 case METHOD_TYPE: return method_type_class;
1711 case RECORD_TYPE: return record_type_class;
1712 case UNION_TYPE:
1713 case QUAL_UNION_TYPE: return union_type_class;
1714 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1715 ? string_type_class : array_type_class);
1716 case LANG_TYPE: return lang_type_class;
1717 default: return no_type_class;
1718 }
1719 }
1720
1721 /* Expand a call EXP to __builtin_classify_type. */
1722
1723 static rtx
1724 expand_builtin_classify_type (tree exp)
1725 {
1726 if (call_expr_nargs (exp))
1727 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1728 return GEN_INT (no_type_class);
1729 }
1730
1731 /* This helper macro, meant to be used in mathfn_built_in below,
1732 determines which among a set of three builtin math functions is
1733 appropriate for a given type mode. The `F' and `L' cases are
1734 automatically generated from the `double' case. */
1735 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1736 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1737 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1738 fcodel = BUILT_IN_MATHFN##L ; break;
1739 /* Similar to above, but appends _R after any F/L suffix. */
1740 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1741 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1742 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1743 fcodel = BUILT_IN_MATHFN##L_R ; break;
1744
1745 /* Return mathematic function equivalent to FN but operating directly
1746 on TYPE, if available. If IMPLICIT is true find the function in
1747 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1748 can't do the conversion, return zero. */
1749
1750 static tree
1751 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1752 {
1753 tree const *const fn_arr
1754 = implicit ? implicit_built_in_decls : built_in_decls;
1755 enum built_in_function fcode, fcodef, fcodel;
1756
1757 switch (fn)
1758 {
1759 CASE_MATHFN (BUILT_IN_ACOS)
1760 CASE_MATHFN (BUILT_IN_ACOSH)
1761 CASE_MATHFN (BUILT_IN_ASIN)
1762 CASE_MATHFN (BUILT_IN_ASINH)
1763 CASE_MATHFN (BUILT_IN_ATAN)
1764 CASE_MATHFN (BUILT_IN_ATAN2)
1765 CASE_MATHFN (BUILT_IN_ATANH)
1766 CASE_MATHFN (BUILT_IN_CBRT)
1767 CASE_MATHFN (BUILT_IN_CEIL)
1768 CASE_MATHFN (BUILT_IN_CEXPI)
1769 CASE_MATHFN (BUILT_IN_COPYSIGN)
1770 CASE_MATHFN (BUILT_IN_COS)
1771 CASE_MATHFN (BUILT_IN_COSH)
1772 CASE_MATHFN (BUILT_IN_DREM)
1773 CASE_MATHFN (BUILT_IN_ERF)
1774 CASE_MATHFN (BUILT_IN_ERFC)
1775 CASE_MATHFN (BUILT_IN_EXP)
1776 CASE_MATHFN (BUILT_IN_EXP10)
1777 CASE_MATHFN (BUILT_IN_EXP2)
1778 CASE_MATHFN (BUILT_IN_EXPM1)
1779 CASE_MATHFN (BUILT_IN_FABS)
1780 CASE_MATHFN (BUILT_IN_FDIM)
1781 CASE_MATHFN (BUILT_IN_FLOOR)
1782 CASE_MATHFN (BUILT_IN_FMA)
1783 CASE_MATHFN (BUILT_IN_FMAX)
1784 CASE_MATHFN (BUILT_IN_FMIN)
1785 CASE_MATHFN (BUILT_IN_FMOD)
1786 CASE_MATHFN (BUILT_IN_FREXP)
1787 CASE_MATHFN (BUILT_IN_GAMMA)
1788 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1789 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1790 CASE_MATHFN (BUILT_IN_HYPOT)
1791 CASE_MATHFN (BUILT_IN_ILOGB)
1792 CASE_MATHFN (BUILT_IN_INF)
1793 CASE_MATHFN (BUILT_IN_ISINF)
1794 CASE_MATHFN (BUILT_IN_J0)
1795 CASE_MATHFN (BUILT_IN_J1)
1796 CASE_MATHFN (BUILT_IN_JN)
1797 CASE_MATHFN (BUILT_IN_LCEIL)
1798 CASE_MATHFN (BUILT_IN_LDEXP)
1799 CASE_MATHFN (BUILT_IN_LFLOOR)
1800 CASE_MATHFN (BUILT_IN_LGAMMA)
1801 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1802 CASE_MATHFN (BUILT_IN_LLCEIL)
1803 CASE_MATHFN (BUILT_IN_LLFLOOR)
1804 CASE_MATHFN (BUILT_IN_LLRINT)
1805 CASE_MATHFN (BUILT_IN_LLROUND)
1806 CASE_MATHFN (BUILT_IN_LOG)
1807 CASE_MATHFN (BUILT_IN_LOG10)
1808 CASE_MATHFN (BUILT_IN_LOG1P)
1809 CASE_MATHFN (BUILT_IN_LOG2)
1810 CASE_MATHFN (BUILT_IN_LOGB)
1811 CASE_MATHFN (BUILT_IN_LRINT)
1812 CASE_MATHFN (BUILT_IN_LROUND)
1813 CASE_MATHFN (BUILT_IN_MODF)
1814 CASE_MATHFN (BUILT_IN_NAN)
1815 CASE_MATHFN (BUILT_IN_NANS)
1816 CASE_MATHFN (BUILT_IN_NEARBYINT)
1817 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1818 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1819 CASE_MATHFN (BUILT_IN_POW)
1820 CASE_MATHFN (BUILT_IN_POWI)
1821 CASE_MATHFN (BUILT_IN_POW10)
1822 CASE_MATHFN (BUILT_IN_REMAINDER)
1823 CASE_MATHFN (BUILT_IN_REMQUO)
1824 CASE_MATHFN (BUILT_IN_RINT)
1825 CASE_MATHFN (BUILT_IN_ROUND)
1826 CASE_MATHFN (BUILT_IN_SCALB)
1827 CASE_MATHFN (BUILT_IN_SCALBLN)
1828 CASE_MATHFN (BUILT_IN_SCALBN)
1829 CASE_MATHFN (BUILT_IN_SIGNBIT)
1830 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1831 CASE_MATHFN (BUILT_IN_SIN)
1832 CASE_MATHFN (BUILT_IN_SINCOS)
1833 CASE_MATHFN (BUILT_IN_SINH)
1834 CASE_MATHFN (BUILT_IN_SQRT)
1835 CASE_MATHFN (BUILT_IN_TAN)
1836 CASE_MATHFN (BUILT_IN_TANH)
1837 CASE_MATHFN (BUILT_IN_TGAMMA)
1838 CASE_MATHFN (BUILT_IN_TRUNC)
1839 CASE_MATHFN (BUILT_IN_Y0)
1840 CASE_MATHFN (BUILT_IN_Y1)
1841 CASE_MATHFN (BUILT_IN_YN)
1842
1843 default:
1844 return NULL_TREE;
1845 }
1846
1847 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1848 return fn_arr[fcode];
1849 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1850 return fn_arr[fcodef];
1851 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1852 return fn_arr[fcodel];
1853 else
1854 return NULL_TREE;
1855 }
1856
1857 /* Like mathfn_built_in_1(), but always use the implicit array. */
1858
1859 tree
1860 mathfn_built_in (tree type, enum built_in_function fn)
1861 {
1862 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1863 }
1864
1865 /* If errno must be maintained, expand the RTL to check if the result,
1866 TARGET, of a built-in function call, EXP, is NaN, and if so set
1867 errno to EDOM. */
1868
1869 static void
1870 expand_errno_check (tree exp, rtx target)
1871 {
1872 rtx lab = gen_label_rtx ();
1873
1874 /* Test the result; if it is NaN, set errno=EDOM because
1875 the argument was not in the domain. */
1876 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1877 0, lab);
1878
1879 #ifdef TARGET_EDOM
1880 /* If this built-in doesn't throw an exception, set errno directly. */
1881 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1882 {
1883 #ifdef GEN_ERRNO_RTX
1884 rtx errno_rtx = GEN_ERRNO_RTX;
1885 #else
1886 rtx errno_rtx
1887 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1888 #endif
1889 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1890 emit_label (lab);
1891 return;
1892 }
1893 #endif
1894
1895 /* Make sure the library call isn't expanded as a tail call. */
1896 CALL_EXPR_TAILCALL (exp) = 0;
1897
1898 /* We can't set errno=EDOM directly; let the library call do it.
1899 Pop the arguments right away in case the call gets deleted. */
1900 NO_DEFER_POP;
1901 expand_call (exp, target, 0);
1902 OK_DEFER_POP;
1903 emit_label (lab);
1904 }
1905
1906 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1907 Return NULL_RTX if a normal call should be emitted rather than expanding
1908 the function in-line. EXP is the expression that is a call to the builtin
1909 function; if convenient, the result should be placed in TARGET.
1910 SUBTARGET may be used as the target for computing one of EXP's operands. */
1911
1912 static rtx
1913 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1914 {
1915 optab builtin_optab;
1916 rtx op0, insns, before_call;
1917 tree fndecl = get_callee_fndecl (exp);
1918 enum machine_mode mode;
1919 bool errno_set = false;
1920 tree arg;
1921
1922 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1923 return NULL_RTX;
1924
1925 arg = CALL_EXPR_ARG (exp, 0);
1926
1927 switch (DECL_FUNCTION_CODE (fndecl))
1928 {
1929 CASE_FLT_FN (BUILT_IN_SQRT):
1930 errno_set = ! tree_expr_nonnegative_p (arg);
1931 builtin_optab = sqrt_optab;
1932 break;
1933 CASE_FLT_FN (BUILT_IN_EXP):
1934 errno_set = true; builtin_optab = exp_optab; break;
1935 CASE_FLT_FN (BUILT_IN_EXP10):
1936 CASE_FLT_FN (BUILT_IN_POW10):
1937 errno_set = true; builtin_optab = exp10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP2):
1939 errno_set = true; builtin_optab = exp2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_EXPM1):
1941 errno_set = true; builtin_optab = expm1_optab; break;
1942 CASE_FLT_FN (BUILT_IN_LOGB):
1943 errno_set = true; builtin_optab = logb_optab; break;
1944 CASE_FLT_FN (BUILT_IN_LOG):
1945 errno_set = true; builtin_optab = log_optab; break;
1946 CASE_FLT_FN (BUILT_IN_LOG10):
1947 errno_set = true; builtin_optab = log10_optab; break;
1948 CASE_FLT_FN (BUILT_IN_LOG2):
1949 errno_set = true; builtin_optab = log2_optab; break;
1950 CASE_FLT_FN (BUILT_IN_LOG1P):
1951 errno_set = true; builtin_optab = log1p_optab; break;
1952 CASE_FLT_FN (BUILT_IN_ASIN):
1953 builtin_optab = asin_optab; break;
1954 CASE_FLT_FN (BUILT_IN_ACOS):
1955 builtin_optab = acos_optab; break;
1956 CASE_FLT_FN (BUILT_IN_TAN):
1957 builtin_optab = tan_optab; break;
1958 CASE_FLT_FN (BUILT_IN_ATAN):
1959 builtin_optab = atan_optab; break;
1960 CASE_FLT_FN (BUILT_IN_FLOOR):
1961 builtin_optab = floor_optab; break;
1962 CASE_FLT_FN (BUILT_IN_CEIL):
1963 builtin_optab = ceil_optab; break;
1964 CASE_FLT_FN (BUILT_IN_TRUNC):
1965 builtin_optab = btrunc_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ROUND):
1967 builtin_optab = round_optab; break;
1968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1969 builtin_optab = nearbyint_optab;
1970 if (flag_trapping_math)
1971 break;
1972 /* Else fallthrough and expand as rint. */
1973 CASE_FLT_FN (BUILT_IN_RINT):
1974 builtin_optab = rint_optab; break;
1975 default:
1976 gcc_unreachable ();
1977 }
1978
1979 /* Make a suitable register to place result in. */
1980 mode = TYPE_MODE (TREE_TYPE (exp));
1981
1982 if (! flag_errno_math || ! HONOR_NANS (mode))
1983 errno_set = false;
1984
1985 /* Before working hard, check whether the instruction is available. */
1986 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1987 {
1988 target = gen_reg_rtx (mode);
1989
1990 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1991 need to expand the argument again. This way, we will not perform
1992 side-effects more the once. */
1993 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1994
1995 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1996
1997 start_sequence ();
1998
1999 /* Compute into TARGET.
2000 Set TARGET to wherever the result comes back. */
2001 target = expand_unop (mode, builtin_optab, op0, target, 0);
2002
2003 if (target != 0)
2004 {
2005 if (errno_set)
2006 expand_errno_check (exp, target);
2007
2008 /* Output the entire sequence. */
2009 insns = get_insns ();
2010 end_sequence ();
2011 emit_insn (insns);
2012 return target;
2013 }
2014
2015 /* If we were unable to expand via the builtin, stop the sequence
2016 (without outputting the insns) and call to the library function
2017 with the stabilized argument list. */
2018 end_sequence ();
2019 }
2020
2021 before_call = get_last_insn ();
2022
2023 return expand_call (exp, target, target == const0_rtx);
2024 }
2025
2026 /* Expand a call to the builtin binary math functions (pow and atan2).
2027 Return NULL_RTX if a normal call should be emitted rather than expanding the
2028 function in-line. EXP is the expression that is a call to the builtin
2029 function; if convenient, the result should be placed in TARGET.
2030 SUBTARGET may be used as the target for computing one of EXP's
2031 operands. */
2032
2033 static rtx
2034 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2035 {
2036 optab builtin_optab;
2037 rtx op0, op1, insns;
2038 int op1_type = REAL_TYPE;
2039 tree fndecl = get_callee_fndecl (exp);
2040 tree arg0, arg1;
2041 enum machine_mode mode;
2042 bool errno_set = true;
2043
2044 switch (DECL_FUNCTION_CODE (fndecl))
2045 {
2046 CASE_FLT_FN (BUILT_IN_SCALBN):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN):
2048 CASE_FLT_FN (BUILT_IN_LDEXP):
2049 op1_type = INTEGER_TYPE;
2050 default:
2051 break;
2052 }
2053
2054 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2055 return NULL_RTX;
2056
2057 arg0 = CALL_EXPR_ARG (exp, 0);
2058 arg1 = CALL_EXPR_ARG (exp, 1);
2059
2060 switch (DECL_FUNCTION_CODE (fndecl))
2061 {
2062 CASE_FLT_FN (BUILT_IN_POW):
2063 builtin_optab = pow_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN2):
2065 builtin_optab = atan2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_SCALB):
2067 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2068 return 0;
2069 builtin_optab = scalb_optab; break;
2070 CASE_FLT_FN (BUILT_IN_SCALBN):
2071 CASE_FLT_FN (BUILT_IN_SCALBLN):
2072 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2073 return 0;
2074 /* Fall through... */
2075 CASE_FLT_FN (BUILT_IN_LDEXP):
2076 builtin_optab = ldexp_optab; break;
2077 CASE_FLT_FN (BUILT_IN_FMOD):
2078 builtin_optab = fmod_optab; break;
2079 CASE_FLT_FN (BUILT_IN_REMAINDER):
2080 CASE_FLT_FN (BUILT_IN_DREM):
2081 builtin_optab = remainder_optab; break;
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2088
2089 /* Before working hard, check whether the instruction is available. */
2090 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2091 return NULL_RTX;
2092
2093 target = gen_reg_rtx (mode);
2094
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2096 errno_set = false;
2097
2098 /* Always stabilize the argument list. */
2099 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2100 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2101
2102 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2103 op1 = expand_normal (arg1);
2104
2105 start_sequence ();
2106
2107 /* Compute into TARGET.
2108 Set TARGET to wherever the result comes back. */
2109 target = expand_binop (mode, builtin_optab, op0, op1,
2110 target, 0, OPTAB_DIRECT);
2111
2112 /* If we were unable to expand via the builtin, stop the sequence
2113 (without outputting the insns) and call to the library function
2114 with the stabilized argument list. */
2115 if (target == 0)
2116 {
2117 end_sequence ();
2118 return expand_call (exp, target, target == const0_rtx);
2119 }
2120
2121 if (errno_set)
2122 expand_errno_check (exp, target);
2123
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2126 end_sequence ();
2127 emit_insn (insns);
2128
2129 return target;
2130 }
2131
2132 /* Expand a call to the builtin sin and cos math functions.
2133 Return NULL_RTX if a normal call should be emitted rather than expanding the
2134 function in-line. EXP is the expression that is a call to the builtin
2135 function; if convenient, the result should be placed in TARGET.
2136 SUBTARGET may be used as the target for computing one of EXP's
2137 operands. */
2138
2139 static rtx
2140 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2141 {
2142 optab builtin_optab;
2143 rtx op0, insns;
2144 tree fndecl = get_callee_fndecl (exp);
2145 enum machine_mode mode;
2146 tree arg;
2147
2148 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2149 return NULL_RTX;
2150
2151 arg = CALL_EXPR_ARG (exp, 0);
2152
2153 switch (DECL_FUNCTION_CODE (fndecl))
2154 {
2155 CASE_FLT_FN (BUILT_IN_SIN):
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 builtin_optab = sincos_optab; break;
2158 default:
2159 gcc_unreachable ();
2160 }
2161
2162 /* Make a suitable register to place result in. */
2163 mode = TYPE_MODE (TREE_TYPE (exp));
2164
2165 /* Check if sincos insn is available, otherwise fallback
2166 to sin or cos insn. */
2167 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2168 switch (DECL_FUNCTION_CODE (fndecl))
2169 {
2170 CASE_FLT_FN (BUILT_IN_SIN):
2171 builtin_optab = sin_optab; break;
2172 CASE_FLT_FN (BUILT_IN_COS):
2173 builtin_optab = cos_optab; break;
2174 default:
2175 gcc_unreachable ();
2176 }
2177
2178 /* Before working hard, check whether the instruction is available. */
2179 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2180 {
2181 target = gen_reg_rtx (mode);
2182
2183 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2184 need to expand the argument again. This way, we will not perform
2185 side-effects more the once. */
2186 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2187
2188 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2189
2190 start_sequence ();
2191
2192 /* Compute into TARGET.
2193 Set TARGET to wherever the result comes back. */
2194 if (builtin_optab == sincos_optab)
2195 {
2196 int result;
2197
2198 switch (DECL_FUNCTION_CODE (fndecl))
2199 {
2200 CASE_FLT_FN (BUILT_IN_SIN):
2201 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2202 break;
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2205 break;
2206 default:
2207 gcc_unreachable ();
2208 }
2209 gcc_assert (result);
2210 }
2211 else
2212 {
2213 target = expand_unop (mode, builtin_optab, op0, target, 0);
2214 }
2215
2216 if (target != 0)
2217 {
2218 /* Output the entire sequence. */
2219 insns = get_insns ();
2220 end_sequence ();
2221 emit_insn (insns);
2222 return target;
2223 }
2224
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2228 end_sequence ();
2229 }
2230
2231 target = expand_call (exp, target, target == const0_rtx);
2232
2233 return target;
2234 }
2235
2236 /* Expand a call to one of the builtin math functions that operate on
2237 floating point argument and output an integer result (ilogb, isinf,
2238 isnan, etc).
2239 Return 0 if a normal call should be emitted rather than expanding the
2240 function in-line. EXP is the expression that is a call to the builtin
2241 function; if convenient, the result should be placed in TARGET.
2242 SUBTARGET may be used as the target for computing one of EXP's operands. */
2243
2244 static rtx
2245 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2246 {
2247 optab builtin_optab = 0;
2248 enum insn_code icode = CODE_FOR_nothing;
2249 rtx op0;
2250 tree fndecl = get_callee_fndecl (exp);
2251 enum machine_mode mode;
2252 bool errno_set = false;
2253 tree arg;
2254
2255 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2256 return NULL_RTX;
2257
2258 arg = CALL_EXPR_ARG (exp, 0);
2259
2260 switch (DECL_FUNCTION_CODE (fndecl))
2261 {
2262 CASE_FLT_FN (BUILT_IN_ILOGB):
2263 errno_set = true; builtin_optab = ilogb_optab; break;
2264 CASE_FLT_FN (BUILT_IN_ISINF):
2265 builtin_optab = isinf_optab; break;
2266 case BUILT_IN_ISNORMAL:
2267 case BUILT_IN_ISFINITE:
2268 CASE_FLT_FN (BUILT_IN_FINITE):
2269 /* These builtins have no optabs (yet). */
2270 break;
2271 default:
2272 gcc_unreachable ();
2273 }
2274
2275 /* There's no easy way to detect the case we need to set EDOM. */
2276 if (flag_errno_math && errno_set)
2277 return NULL_RTX;
2278
2279 /* Optab mode depends on the mode of the input argument. */
2280 mode = TYPE_MODE (TREE_TYPE (arg));
2281
2282 if (builtin_optab)
2283 icode = optab_handler (builtin_optab, mode)->insn_code;
2284
2285 /* Before working hard, check whether the instruction is available. */
2286 if (icode != CODE_FOR_nothing)
2287 {
2288 /* Make a suitable register to place result in. */
2289 if (!target
2290 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2291 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2292
2293 gcc_assert (insn_data[icode].operand[0].predicate
2294 (target, GET_MODE (target)));
2295
2296 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2297 need to expand the argument again. This way, we will not perform
2298 side-effects more the once. */
2299 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2300
2301 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2302
2303 if (mode != GET_MODE (op0))
2304 op0 = convert_to_mode (mode, op0, 0);
2305
2306 /* Compute into TARGET.
2307 Set TARGET to wherever the result comes back. */
2308 emit_unop_insn (icode, target, op0, UNKNOWN);
2309 return target;
2310 }
2311
2312 /* If there is no optab, try generic code. */
2313 switch (DECL_FUNCTION_CODE (fndecl))
2314 {
2315 tree result;
2316
2317 CASE_FLT_FN (BUILT_IN_ISINF):
2318 {
2319 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2320 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2321 tree const type = TREE_TYPE (arg);
2322 REAL_VALUE_TYPE r;
2323 char buf[128];
2324
2325 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2326 real_from_string (&r, buf);
2327 result = build_call_expr (isgr_fn, 2,
2328 fold_build1 (ABS_EXPR, type, arg),
2329 build_real (type, r));
2330 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2331 }
2332 CASE_FLT_FN (BUILT_IN_FINITE):
2333 case BUILT_IN_ISFINITE:
2334 {
2335 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2336 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2337 tree const type = TREE_TYPE (arg);
2338 REAL_VALUE_TYPE r;
2339 char buf[128];
2340
2341 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2342 real_from_string (&r, buf);
2343 result = build_call_expr (isle_fn, 2,
2344 fold_build1 (ABS_EXPR, type, arg),
2345 build_real (type, r));
2346 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2347 }
2348 case BUILT_IN_ISNORMAL:
2349 {
2350 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2351 islessequal(fabs(x),DBL_MAX). */
2352 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2353 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2354 tree const type = TREE_TYPE (arg);
2355 REAL_VALUE_TYPE rmax, rmin;
2356 char buf[128];
2357
2358 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2359 real_from_string (&rmax, buf);
2360 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2361 real_from_string (&rmin, buf);
2362 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2363 result = build_call_expr (isle_fn, 2, arg,
2364 build_real (type, rmax));
2365 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2366 build_call_expr (isge_fn, 2, arg,
2367 build_real (type, rmin)));
2368 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2369 }
2370 default:
2371 break;
2372 }
2373
2374 target = expand_call (exp, target, target == const0_rtx);
2375
2376 return target;
2377 }
2378
2379 /* Expand a call to the builtin sincos math function.
2380 Return NULL_RTX if a normal call should be emitted rather than expanding the
2381 function in-line. EXP is the expression that is a call to the builtin
2382 function. */
2383
2384 static rtx
2385 expand_builtin_sincos (tree exp)
2386 {
2387 rtx op0, op1, op2, target1, target2;
2388 enum machine_mode mode;
2389 tree arg, sinp, cosp;
2390 int result;
2391
2392 if (!validate_arglist (exp, REAL_TYPE,
2393 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2394 return NULL_RTX;
2395
2396 arg = CALL_EXPR_ARG (exp, 0);
2397 sinp = CALL_EXPR_ARG (exp, 1);
2398 cosp = CALL_EXPR_ARG (exp, 2);
2399
2400 /* Make a suitable register to place result in. */
2401 mode = TYPE_MODE (TREE_TYPE (arg));
2402
2403 /* Check if sincos insn is available, otherwise emit the call. */
2404 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2405 return NULL_RTX;
2406
2407 target1 = gen_reg_rtx (mode);
2408 target2 = gen_reg_rtx (mode);
2409
2410 op0 = expand_normal (arg);
2411 op1 = expand_normal (build_fold_indirect_ref (sinp));
2412 op2 = expand_normal (build_fold_indirect_ref (cosp));
2413
2414 /* Compute into target1 and target2.
2415 Set TARGET to wherever the result comes back. */
2416 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2417 gcc_assert (result);
2418
2419 /* Move target1 and target2 to the memory locations indicated
2420 by op1 and op2. */
2421 emit_move_insn (op1, target1);
2422 emit_move_insn (op2, target2);
2423
2424 return const0_rtx;
2425 }
2426
2427 /* Expand a call to the internal cexpi builtin to the sincos math function.
2428 EXP is the expression that is a call to the builtin function; if convenient,
2429 the result should be placed in TARGET. SUBTARGET may be used as the target
2430 for computing one of EXP's operands. */
2431
2432 static rtx
2433 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2434 {
2435 tree fndecl = get_callee_fndecl (exp);
2436 tree arg, type;
2437 enum machine_mode mode;
2438 rtx op0, op1, op2;
2439
2440 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2441 return NULL_RTX;
2442
2443 arg = CALL_EXPR_ARG (exp, 0);
2444 type = TREE_TYPE (arg);
2445 mode = TYPE_MODE (TREE_TYPE (arg));
2446
2447 /* Try expanding via a sincos optab, fall back to emitting a libcall
2448 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2449 is only generated from sincos, cexp or if we have either of them. */
2450 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2451 {
2452 op1 = gen_reg_rtx (mode);
2453 op2 = gen_reg_rtx (mode);
2454
2455 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2456
2457 /* Compute into op1 and op2. */
2458 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2459 }
2460 else if (TARGET_HAS_SINCOS)
2461 {
2462 tree call, fn = NULL_TREE;
2463 tree top1, top2;
2464 rtx op1a, op2a;
2465
2466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2467 fn = built_in_decls[BUILT_IN_SINCOSF];
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2469 fn = built_in_decls[BUILT_IN_SINCOS];
2470 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2471 fn = built_in_decls[BUILT_IN_SINCOSL];
2472 else
2473 gcc_unreachable ();
2474
2475 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2476 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2477 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2478 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2479 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2480 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2481
2482 /* Make sure not to fold the sincos call again. */
2483 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2484 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2485 call, 3, arg, top1, top2));
2486 }
2487 else
2488 {
2489 tree call, fn = NULL_TREE, narg;
2490 tree ctype = build_complex_type (type);
2491
2492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2493 fn = built_in_decls[BUILT_IN_CEXPF];
2494 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2495 fn = built_in_decls[BUILT_IN_CEXP];
2496 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2497 fn = built_in_decls[BUILT_IN_CEXPL];
2498 else
2499 gcc_unreachable ();
2500
2501 /* If we don't have a decl for cexp create one. This is the
2502 friendliest fallback if the user calls __builtin_cexpi
2503 without full target C99 function support. */
2504 if (fn == NULL_TREE)
2505 {
2506 tree fntype;
2507 const char *name = NULL;
2508
2509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2510 name = "cexpf";
2511 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2512 name = "cexp";
2513 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2514 name = "cexpl";
2515
2516 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2517 fn = build_fn_decl (name, fntype);
2518 }
2519
2520 narg = fold_build2 (COMPLEX_EXPR, ctype,
2521 build_real (type, dconst0), arg);
2522
2523 /* Make sure not to fold the cexp call again. */
2524 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2525 return expand_expr (build_call_nary (ctype, call, 1, narg),
2526 target, VOIDmode, EXPAND_NORMAL);
2527 }
2528
2529 /* Now build the proper return type. */
2530 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2531 make_tree (TREE_TYPE (arg), op2),
2532 make_tree (TREE_TYPE (arg), op1)),
2533 target, VOIDmode, EXPAND_NORMAL);
2534 }
2535
2536 /* Expand a call to one of the builtin rounding functions gcc defines
2537 as an extension (lfloor and lceil). As these are gcc extensions we
2538 do not need to worry about setting errno to EDOM.
2539 If expanding via optab fails, lower expression to (int)(floor(x)).
2540 EXP is the expression that is a call to the builtin function;
2541 if convenient, the result should be placed in TARGET. */
2542
2543 static rtx
2544 expand_builtin_int_roundingfn (tree exp, rtx target)
2545 {
2546 convert_optab builtin_optab;
2547 rtx op0, insns, tmp;
2548 tree fndecl = get_callee_fndecl (exp);
2549 enum built_in_function fallback_fn;
2550 tree fallback_fndecl;
2551 enum machine_mode mode;
2552 tree arg;
2553
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2555 gcc_unreachable ();
2556
2557 arg = CALL_EXPR_ARG (exp, 0);
2558
2559 switch (DECL_FUNCTION_CODE (fndecl))
2560 {
2561 CASE_FLT_FN (BUILT_IN_LCEIL):
2562 CASE_FLT_FN (BUILT_IN_LLCEIL):
2563 builtin_optab = lceil_optab;
2564 fallback_fn = BUILT_IN_CEIL;
2565 break;
2566
2567 CASE_FLT_FN (BUILT_IN_LFLOOR):
2568 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2569 builtin_optab = lfloor_optab;
2570 fallback_fn = BUILT_IN_FLOOR;
2571 break;
2572
2573 default:
2574 gcc_unreachable ();
2575 }
2576
2577 /* Make a suitable register to place result in. */
2578 mode = TYPE_MODE (TREE_TYPE (exp));
2579
2580 target = gen_reg_rtx (mode);
2581
2582 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2583 need to expand the argument again. This way, we will not perform
2584 side-effects more the once. */
2585 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2586
2587 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2588
2589 start_sequence ();
2590
2591 /* Compute into TARGET. */
2592 if (expand_sfix_optab (target, op0, builtin_optab))
2593 {
2594 /* Output the entire sequence. */
2595 insns = get_insns ();
2596 end_sequence ();
2597 emit_insn (insns);
2598 return target;
2599 }
2600
2601 /* If we were unable to expand via the builtin, stop the sequence
2602 (without outputting the insns). */
2603 end_sequence ();
2604
2605 /* Fall back to floating point rounding optab. */
2606 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2607
2608 /* For non-C99 targets we may end up without a fallback fndecl here
2609 if the user called __builtin_lfloor directly. In this case emit
2610 a call to the floor/ceil variants nevertheless. This should result
2611 in the best user experience for not full C99 targets. */
2612 if (fallback_fndecl == NULL_TREE)
2613 {
2614 tree fntype;
2615 const char *name = NULL;
2616
2617 switch (DECL_FUNCTION_CODE (fndecl))
2618 {
2619 case BUILT_IN_LCEIL:
2620 case BUILT_IN_LLCEIL:
2621 name = "ceil";
2622 break;
2623 case BUILT_IN_LCEILF:
2624 case BUILT_IN_LLCEILF:
2625 name = "ceilf";
2626 break;
2627 case BUILT_IN_LCEILL:
2628 case BUILT_IN_LLCEILL:
2629 name = "ceill";
2630 break;
2631 case BUILT_IN_LFLOOR:
2632 case BUILT_IN_LLFLOOR:
2633 name = "floor";
2634 break;
2635 case BUILT_IN_LFLOORF:
2636 case BUILT_IN_LLFLOORF:
2637 name = "floorf";
2638 break;
2639 case BUILT_IN_LFLOORL:
2640 case BUILT_IN_LLFLOORL:
2641 name = "floorl";
2642 break;
2643 default:
2644 gcc_unreachable ();
2645 }
2646
2647 fntype = build_function_type_list (TREE_TYPE (arg),
2648 TREE_TYPE (arg), NULL_TREE);
2649 fallback_fndecl = build_fn_decl (name, fntype);
2650 }
2651
2652 exp = build_call_expr (fallback_fndecl, 1, arg);
2653
2654 tmp = expand_normal (exp);
2655
2656 /* Truncate the result of floating point optab to integer
2657 via expand_fix (). */
2658 target = gen_reg_rtx (mode);
2659 expand_fix (target, tmp, 0);
2660
2661 return target;
2662 }
2663
2664 /* Expand a call to one of the builtin math functions doing integer
2665 conversion (lrint).
2666 Return 0 if a normal call should be emitted rather than expanding the
2667 function in-line. EXP is the expression that is a call to the builtin
2668 function; if convenient, the result should be placed in TARGET. */
2669
2670 static rtx
2671 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2672 {
2673 convert_optab builtin_optab;
2674 rtx op0, insns;
2675 tree fndecl = get_callee_fndecl (exp);
2676 tree arg;
2677 enum machine_mode mode;
2678
2679 /* There's no easy way to detect the case we need to set EDOM. */
2680 if (flag_errno_math)
2681 return NULL_RTX;
2682
2683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2684 gcc_unreachable ();
2685
2686 arg = CALL_EXPR_ARG (exp, 0);
2687
2688 switch (DECL_FUNCTION_CODE (fndecl))
2689 {
2690 CASE_FLT_FN (BUILT_IN_LRINT):
2691 CASE_FLT_FN (BUILT_IN_LLRINT):
2692 builtin_optab = lrint_optab; break;
2693 CASE_FLT_FN (BUILT_IN_LROUND):
2694 CASE_FLT_FN (BUILT_IN_LLROUND):
2695 builtin_optab = lround_optab; break;
2696 default:
2697 gcc_unreachable ();
2698 }
2699
2700 /* Make a suitable register to place result in. */
2701 mode = TYPE_MODE (TREE_TYPE (exp));
2702
2703 target = gen_reg_rtx (mode);
2704
2705 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2706 need to expand the argument again. This way, we will not perform
2707 side-effects more the once. */
2708 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2709
2710 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2711
2712 start_sequence ();
2713
2714 if (expand_sfix_optab (target, op0, builtin_optab))
2715 {
2716 /* Output the entire sequence. */
2717 insns = get_insns ();
2718 end_sequence ();
2719 emit_insn (insns);
2720 return target;
2721 }
2722
2723 /* If we were unable to expand via the builtin, stop the sequence
2724 (without outputting the insns) and call to the library function
2725 with the stabilized argument list. */
2726 end_sequence ();
2727
2728 target = expand_call (exp, target, target == const0_rtx);
2729
2730 return target;
2731 }
2732
2733 /* To evaluate powi(x,n), the floating point value x raised to the
2734 constant integer exponent n, we use a hybrid algorithm that
2735 combines the "window method" with look-up tables. For an
2736 introduction to exponentiation algorithms and "addition chains",
2737 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2738 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2739 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2740 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2741
2742 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2743 multiplications to inline before calling the system library's pow
2744 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2745 so this default never requires calling pow, powf or powl. */
2746
2747 #ifndef POWI_MAX_MULTS
2748 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2749 #endif
2750
2751 /* The size of the "optimal power tree" lookup table. All
2752 exponents less than this value are simply looked up in the
2753 powi_table below. This threshold is also used to size the
2754 cache of pseudo registers that hold intermediate results. */
2755 #define POWI_TABLE_SIZE 256
2756
2757 /* The size, in bits of the window, used in the "window method"
2758 exponentiation algorithm. This is equivalent to a radix of
2759 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2760 #define POWI_WINDOW_SIZE 3
2761
2762 /* The following table is an efficient representation of an
2763 "optimal power tree". For each value, i, the corresponding
2764 value, j, in the table states than an optimal evaluation
2765 sequence for calculating pow(x,i) can be found by evaluating
2766 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2767 100 integers is given in Knuth's "Seminumerical algorithms". */
2768
2769 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2770 {
2771 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2772 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2773 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2774 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2775 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2776 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2777 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2778 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2779 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2780 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2781 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2782 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2783 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2784 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2785 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2786 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2787 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2788 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2789 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2790 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2791 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2792 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2793 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2794 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2795 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2796 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2797 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2798 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2799 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2800 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2801 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2802 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2803 };
2804
2805
2806 /* Return the number of multiplications required to calculate
2807 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2808 subroutine of powi_cost. CACHE is an array indicating
2809 which exponents have already been calculated. */
2810
2811 static int
2812 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2813 {
2814 /* If we've already calculated this exponent, then this evaluation
2815 doesn't require any additional multiplications. */
2816 if (cache[n])
2817 return 0;
2818
2819 cache[n] = true;
2820 return powi_lookup_cost (n - powi_table[n], cache)
2821 + powi_lookup_cost (powi_table[n], cache) + 1;
2822 }
2823
2824 /* Return the number of multiplications required to calculate
2825 powi(x,n) for an arbitrary x, given the exponent N. This
2826 function needs to be kept in sync with expand_powi below. */
2827
2828 static int
2829 powi_cost (HOST_WIDE_INT n)
2830 {
2831 bool cache[POWI_TABLE_SIZE];
2832 unsigned HOST_WIDE_INT digit;
2833 unsigned HOST_WIDE_INT val;
2834 int result;
2835
2836 if (n == 0)
2837 return 0;
2838
2839 /* Ignore the reciprocal when calculating the cost. */
2840 val = (n < 0) ? -n : n;
2841
2842 /* Initialize the exponent cache. */
2843 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2844 cache[1] = true;
2845
2846 result = 0;
2847
2848 while (val >= POWI_TABLE_SIZE)
2849 {
2850 if (val & 1)
2851 {
2852 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2853 result += powi_lookup_cost (digit, cache)
2854 + POWI_WINDOW_SIZE + 1;
2855 val >>= POWI_WINDOW_SIZE;
2856 }
2857 else
2858 {
2859 val >>= 1;
2860 result++;
2861 }
2862 }
2863
2864 return result + powi_lookup_cost (val, cache);
2865 }
2866
2867 /* Recursive subroutine of expand_powi. This function takes the array,
2868 CACHE, of already calculated exponents and an exponent N and returns
2869 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2870
2871 static rtx
2872 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2873 {
2874 unsigned HOST_WIDE_INT digit;
2875 rtx target, result;
2876 rtx op0, op1;
2877
2878 if (n < POWI_TABLE_SIZE)
2879 {
2880 if (cache[n])
2881 return cache[n];
2882
2883 target = gen_reg_rtx (mode);
2884 cache[n] = target;
2885
2886 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2887 op1 = expand_powi_1 (mode, powi_table[n], cache);
2888 }
2889 else if (n & 1)
2890 {
2891 target = gen_reg_rtx (mode);
2892 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2893 op0 = expand_powi_1 (mode, n - digit, cache);
2894 op1 = expand_powi_1 (mode, digit, cache);
2895 }
2896 else
2897 {
2898 target = gen_reg_rtx (mode);
2899 op0 = expand_powi_1 (mode, n >> 1, cache);
2900 op1 = op0;
2901 }
2902
2903 result = expand_mult (mode, op0, op1, target, 0);
2904 if (result != target)
2905 emit_move_insn (target, result);
2906 return target;
2907 }
2908
2909 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2910 floating point operand in mode MODE, and N is the exponent. This
2911 function needs to be kept in sync with powi_cost above. */
2912
2913 static rtx
2914 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2915 {
2916 unsigned HOST_WIDE_INT val;
2917 rtx cache[POWI_TABLE_SIZE];
2918 rtx result;
2919
2920 if (n == 0)
2921 return CONST1_RTX (mode);
2922
2923 val = (n < 0) ? -n : n;
2924
2925 memset (cache, 0, sizeof (cache));
2926 cache[1] = x;
2927
2928 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2929
2930 /* If the original exponent was negative, reciprocate the result. */
2931 if (n < 0)
2932 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2933 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2934
2935 return result;
2936 }
2937
2938 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2939 a normal call should be emitted rather than expanding the function
2940 in-line. EXP is the expression that is a call to the builtin
2941 function; if convenient, the result should be placed in TARGET. */
2942
2943 static rtx
2944 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2945 {
2946 tree arg0, arg1;
2947 tree fn, narg0;
2948 tree type = TREE_TYPE (exp);
2949 REAL_VALUE_TYPE cint, c, c2;
2950 HOST_WIDE_INT n;
2951 rtx op, op2;
2952 enum machine_mode mode = TYPE_MODE (type);
2953
2954 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2955 return NULL_RTX;
2956
2957 arg0 = CALL_EXPR_ARG (exp, 0);
2958 arg1 = CALL_EXPR_ARG (exp, 1);
2959
2960 if (TREE_CODE (arg1) != REAL_CST
2961 || TREE_OVERFLOW (arg1))
2962 return expand_builtin_mathfn_2 (exp, target, subtarget);
2963
2964 /* Handle constant exponents. */
2965
2966 /* For integer valued exponents we can expand to an optimal multiplication
2967 sequence using expand_powi. */
2968 c = TREE_REAL_CST (arg1);
2969 n = real_to_integer (&c);
2970 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2971 if (real_identical (&c, &cint)
2972 && ((n >= -1 && n <= 2)
2973 || (flag_unsafe_math_optimizations
2974 && optimize_insn_for_speed_p ()
2975 && powi_cost (n) <= POWI_MAX_MULTS)))
2976 {
2977 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2978 if (n != 1)
2979 {
2980 op = force_reg (mode, op);
2981 op = expand_powi (op, mode, n);
2982 }
2983 return op;
2984 }
2985
2986 narg0 = builtin_save_expr (arg0);
2987
2988 /* If the exponent is not integer valued, check if it is half of an integer.
2989 In this case we can expand to sqrt (x) * x**(n/2). */
2990 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2991 if (fn != NULL_TREE)
2992 {
2993 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2994 n = real_to_integer (&c2);
2995 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2996 if (real_identical (&c2, &cint)
2997 && ((flag_unsafe_math_optimizations
2998 && optimize_insn_for_speed_p ()
2999 && powi_cost (n/2) <= POWI_MAX_MULTS)
3000 || n == 1))
3001 {
3002 tree call_expr = build_call_expr (fn, 1, narg0);
3003 /* Use expand_expr in case the newly built call expression
3004 was folded to a non-call. */
3005 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3006 if (n != 1)
3007 {
3008 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3009 op2 = force_reg (mode, op2);
3010 op2 = expand_powi (op2, mode, abs (n / 2));
3011 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3012 0, OPTAB_LIB_WIDEN);
3013 /* If the original exponent was negative, reciprocate the
3014 result. */
3015 if (n < 0)
3016 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3017 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3018 }
3019 return op;
3020 }
3021 }
3022
3023 /* Try if the exponent is a third of an integer. In this case
3024 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3025 different from pow (x, 1./3.) due to rounding and behavior
3026 with negative x we need to constrain this transformation to
3027 unsafe math and positive x or finite math. */
3028 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3029 if (fn != NULL_TREE
3030 && flag_unsafe_math_optimizations
3031 && (tree_expr_nonnegative_p (arg0)
3032 || !HONOR_NANS (mode)))
3033 {
3034 REAL_VALUE_TYPE dconst3;
3035 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3036 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3037 real_round (&c2, mode, &c2);
3038 n = real_to_integer (&c2);
3039 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3040 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3041 real_convert (&c2, mode, &c2);
3042 if (real_identical (&c2, &c)
3043 && ((optimize_insn_for_speed_p ()
3044 && powi_cost (n/3) <= POWI_MAX_MULTS)
3045 || n == 1))
3046 {
3047 tree call_expr = build_call_expr (fn, 1,narg0);
3048 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3049 if (abs (n) % 3 == 2)
3050 op = expand_simple_binop (mode, MULT, op, op, op,
3051 0, OPTAB_LIB_WIDEN);
3052 if (n != 1)
3053 {
3054 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3055 op2 = force_reg (mode, op2);
3056 op2 = expand_powi (op2, mode, abs (n / 3));
3057 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3058 0, OPTAB_LIB_WIDEN);
3059 /* If the original exponent was negative, reciprocate the
3060 result. */
3061 if (n < 0)
3062 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3063 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3064 }
3065 return op;
3066 }
3067 }
3068
3069 /* Fall back to optab expansion. */
3070 return expand_builtin_mathfn_2 (exp, target, subtarget);
3071 }
3072
3073 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3074 a normal call should be emitted rather than expanding the function
3075 in-line. EXP is the expression that is a call to the builtin
3076 function; if convenient, the result should be placed in TARGET. */
3077
3078 static rtx
3079 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3080 {
3081 tree arg0, arg1;
3082 rtx op0, op1;
3083 enum machine_mode mode;
3084 enum machine_mode mode2;
3085
3086 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3087 return NULL_RTX;
3088
3089 arg0 = CALL_EXPR_ARG (exp, 0);
3090 arg1 = CALL_EXPR_ARG (exp, 1);
3091 mode = TYPE_MODE (TREE_TYPE (exp));
3092
3093 /* Handle constant power. */
3094
3095 if (TREE_CODE (arg1) == INTEGER_CST
3096 && !TREE_OVERFLOW (arg1))
3097 {
3098 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3099
3100 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3101 Otherwise, check the number of multiplications required. */
3102 if ((TREE_INT_CST_HIGH (arg1) == 0
3103 || TREE_INT_CST_HIGH (arg1) == -1)
3104 && ((n >= -1 && n <= 2)
3105 || (optimize_insn_for_speed_p ()
3106 && powi_cost (n) <= POWI_MAX_MULTS)))
3107 {
3108 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3109 op0 = force_reg (mode, op0);
3110 return expand_powi (op0, mode, n);
3111 }
3112 }
3113
3114 /* Emit a libcall to libgcc. */
3115
3116 /* Mode of the 2nd argument must match that of an int. */
3117 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3118
3119 if (target == NULL_RTX)
3120 target = gen_reg_rtx (mode);
3121
3122 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3123 if (GET_MODE (op0) != mode)
3124 op0 = convert_to_mode (mode, op0, 0);
3125 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3126 if (GET_MODE (op1) != mode2)
3127 op1 = convert_to_mode (mode2, op1, 0);
3128
3129 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3130 target, LCT_CONST, mode, 2,
3131 op0, mode, op1, mode2);
3132
3133 return target;
3134 }
3135
3136 /* Expand expression EXP which is a call to the strlen builtin. Return
3137 NULL_RTX if we failed the caller should emit a normal call, otherwise
3138 try to get the result in TARGET, if convenient. */
3139
3140 static rtx
3141 expand_builtin_strlen (tree exp, rtx target,
3142 enum machine_mode target_mode)
3143 {
3144 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3145 return NULL_RTX;
3146 else
3147 {
3148 rtx pat;
3149 tree len;
3150 tree src = CALL_EXPR_ARG (exp, 0);
3151 rtx result, src_reg, char_rtx, before_strlen;
3152 enum machine_mode insn_mode = target_mode, char_mode;
3153 enum insn_code icode = CODE_FOR_nothing;
3154 int align;
3155
3156 /* If the length can be computed at compile-time, return it. */
3157 len = c_strlen (src, 0);
3158 if (len)
3159 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3160
3161 /* If the length can be computed at compile-time and is constant
3162 integer, but there are side-effects in src, evaluate
3163 src for side-effects, then return len.
3164 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3165 can be optimized into: i++; x = 3; */
3166 len = c_strlen (src, 1);
3167 if (len && TREE_CODE (len) == INTEGER_CST)
3168 {
3169 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3171 }
3172
3173 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3174
3175 /* If SRC is not a pointer type, don't do this operation inline. */
3176 if (align == 0)
3177 return NULL_RTX;
3178
3179 /* Bail out if we can't compute strlen in the right mode. */
3180 while (insn_mode != VOIDmode)
3181 {
3182 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3183 if (icode != CODE_FOR_nothing)
3184 break;
3185
3186 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3187 }
3188 if (insn_mode == VOIDmode)
3189 return NULL_RTX;
3190
3191 /* Make a place to write the result of the instruction. */
3192 result = target;
3193 if (! (result != 0
3194 && REG_P (result)
3195 && GET_MODE (result) == insn_mode
3196 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3197 result = gen_reg_rtx (insn_mode);
3198
3199 /* Make a place to hold the source address. We will not expand
3200 the actual source until we are sure that the expansion will
3201 not fail -- there are trees that cannot be expanded twice. */
3202 src_reg = gen_reg_rtx (Pmode);
3203
3204 /* Mark the beginning of the strlen sequence so we can emit the
3205 source operand later. */
3206 before_strlen = get_last_insn ();
3207
3208 char_rtx = const0_rtx;
3209 char_mode = insn_data[(int) icode].operand[2].mode;
3210 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3211 char_mode))
3212 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3213
3214 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3215 char_rtx, GEN_INT (align));
3216 if (! pat)
3217 return NULL_RTX;
3218 emit_insn (pat);
3219
3220 /* Now that we are assured of success, expand the source. */
3221 start_sequence ();
3222 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3223 if (pat != src_reg)
3224 emit_move_insn (src_reg, pat);
3225 pat = get_insns ();
3226 end_sequence ();
3227
3228 if (before_strlen)
3229 emit_insn_after (pat, before_strlen);
3230 else
3231 emit_insn_before (pat, get_insns ());
3232
3233 /* Return the value in the proper mode for this function. */
3234 if (GET_MODE (result) == target_mode)
3235 target = result;
3236 else if (target != 0)
3237 convert_move (target, result, 0);
3238 else
3239 target = convert_to_mode (target_mode, result, 0);
3240
3241 return target;
3242 }
3243 }
3244
3245 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3246 caller should emit a normal call, otherwise try to get the result
3247 in TARGET, if convenient (and in mode MODE if that's convenient). */
3248
3249 static rtx
3250 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3251 {
3252 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3253 {
3254 tree type = TREE_TYPE (exp);
3255 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3256 CALL_EXPR_ARG (exp, 1), type);
3257 if (result)
3258 return expand_expr (result, target, mode, EXPAND_NORMAL);
3259 }
3260 return NULL_RTX;
3261 }
3262
3263 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3264 caller should emit a normal call, otherwise try to get the result
3265 in TARGET, if convenient (and in mode MODE if that's convenient). */
3266
3267 static rtx
3268 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3269 {
3270 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3271 {
3272 tree type = TREE_TYPE (exp);
3273 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3275 if (result)
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3277
3278 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3279 }
3280 return NULL_RTX;
3281 }
3282
3283 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3284 caller should emit a normal call, otherwise try to get the result
3285 in TARGET, if convenient (and in mode MODE if that's convenient). */
3286
3287 static rtx
3288 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3289 {
3290 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3291 {
3292 tree type = TREE_TYPE (exp);
3293 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3294 CALL_EXPR_ARG (exp, 1), type);
3295 if (result)
3296 return expand_expr (result, target, mode, EXPAND_NORMAL);
3297 }
3298 return NULL_RTX;
3299 }
3300
3301 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3302 caller should emit a normal call, otherwise try to get the result
3303 in TARGET, if convenient (and in mode MODE if that's convenient). */
3304
3305 static rtx
3306 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3307 {
3308 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3309 {
3310 tree type = TREE_TYPE (exp);
3311 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3312 CALL_EXPR_ARG (exp, 1), type);
3313 if (result)
3314 return expand_expr (result, target, mode, EXPAND_NORMAL);
3315 }
3316 return NULL_RTX;
3317 }
3318
3319 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3320 bytes from constant string DATA + OFFSET and return it as target
3321 constant. */
3322
3323 static rtx
3324 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3325 enum machine_mode mode)
3326 {
3327 const char *str = (const char *) data;
3328
3329 gcc_assert (offset >= 0
3330 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3331 <= strlen (str) + 1));
3332
3333 return c_readstr (str + offset, mode);
3334 }
3335
3336 /* Expand a call EXP to the memcpy builtin.
3337 Return NULL_RTX if we failed, the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). */
3340
3341 static rtx
3342 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3343 {
3344 tree fndecl = get_callee_fndecl (exp);
3345
3346 if (!validate_arglist (exp,
3347 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 return NULL_RTX;
3349 else
3350 {
3351 tree dest = CALL_EXPR_ARG (exp, 0);
3352 tree src = CALL_EXPR_ARG (exp, 1);
3353 tree len = CALL_EXPR_ARG (exp, 2);
3354 const char *src_str;
3355 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3356 unsigned int dest_align
3357 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3358 rtx dest_mem, src_mem, dest_addr, len_rtx;
3359 tree result = fold_builtin_memory_op (dest, src, len,
3360 TREE_TYPE (TREE_TYPE (fndecl)),
3361 false, /*endp=*/0);
3362 HOST_WIDE_INT expected_size = -1;
3363 unsigned int expected_align = 0;
3364 tree_ann_common_t ann;
3365
3366 if (result)
3367 {
3368 while (TREE_CODE (result) == COMPOUND_EXPR)
3369 {
3370 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3371 EXPAND_NORMAL);
3372 result = TREE_OPERAND (result, 1);
3373 }
3374 return expand_expr (result, target, mode, EXPAND_NORMAL);
3375 }
3376
3377 /* If DEST is not a pointer type, call the normal function. */
3378 if (dest_align == 0)
3379 return NULL_RTX;
3380
3381 /* If either SRC is not a pointer type, don't do this
3382 operation in-line. */
3383 if (src_align == 0)
3384 return NULL_RTX;
3385
3386 ann = tree_common_ann (exp);
3387 if (ann)
3388 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3389
3390 if (expected_align < dest_align)
3391 expected_align = dest_align;
3392 dest_mem = get_memory_rtx (dest, len);
3393 set_mem_align (dest_mem, dest_align);
3394 len_rtx = expand_normal (len);
3395 src_str = c_getstr (src);
3396
3397 /* If SRC is a string constant and block move would be done
3398 by pieces, we can avoid loading the string from memory
3399 and only stored the computed constants. */
3400 if (src_str
3401 && GET_CODE (len_rtx) == CONST_INT
3402 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3403 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3404 CONST_CAST (char *, src_str),
3405 dest_align, false))
3406 {
3407 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3408 builtin_memcpy_read_str,
3409 CONST_CAST (char *, src_str),
3410 dest_align, false, 0);
3411 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3412 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3413 return dest_mem;
3414 }
3415
3416 src_mem = get_memory_rtx (src, len);
3417 set_mem_align (src_mem, src_align);
3418
3419 /* Copy word part most expediently. */
3420 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3421 CALL_EXPR_TAILCALL (exp)
3422 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3423 expected_align, expected_size);
3424
3425 if (dest_addr == 0)
3426 {
3427 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3428 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3429 }
3430 return dest_addr;
3431 }
3432 }
3433
3434 /* Expand a call EXP to the mempcpy builtin.
3435 Return NULL_RTX if we failed; the caller should emit a normal call,
3436 otherwise try to get the result in TARGET, if convenient (and in
3437 mode MODE if that's convenient). If ENDP is 0 return the
3438 destination pointer, if ENDP is 1 return the end pointer ala
3439 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3440 stpcpy. */
3441
3442 static rtx
3443 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3444 {
3445 if (!validate_arglist (exp,
3446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
3448 else
3449 {
3450 tree dest = CALL_EXPR_ARG (exp, 0);
3451 tree src = CALL_EXPR_ARG (exp, 1);
3452 tree len = CALL_EXPR_ARG (exp, 2);
3453 return expand_builtin_mempcpy_args (dest, src, len,
3454 TREE_TYPE (exp),
3455 target, mode, /*endp=*/ 1);
3456 }
3457 }
3458
3459 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3460 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3461 so that this can also be called without constructing an actual CALL_EXPR.
3462 TYPE is the return type of the call. The other arguments and return value
3463 are the same as for expand_builtin_mempcpy. */
3464
3465 static rtx
3466 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3467 rtx target, enum machine_mode mode, int endp)
3468 {
3469 /* If return value is ignored, transform mempcpy into memcpy. */
3470 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3471 {
3472 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3473 tree result = build_call_expr (fn, 3, dest, src, len);
3474
3475 while (TREE_CODE (result) == COMPOUND_EXPR)
3476 {
3477 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3478 EXPAND_NORMAL);
3479 result = TREE_OPERAND (result, 1);
3480 }
3481 return expand_expr (result, target, mode, EXPAND_NORMAL);
3482 }
3483 else
3484 {
3485 const char *src_str;
3486 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3487 unsigned int dest_align
3488 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3489 rtx dest_mem, src_mem, len_rtx;
3490 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3491
3492 if (result)
3493 {
3494 while (TREE_CODE (result) == COMPOUND_EXPR)
3495 {
3496 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3497 EXPAND_NORMAL);
3498 result = TREE_OPERAND (result, 1);
3499 }
3500 return expand_expr (result, target, mode, EXPAND_NORMAL);
3501 }
3502
3503 /* If either SRC or DEST is not a pointer type, don't do this
3504 operation in-line. */
3505 if (dest_align == 0 || src_align == 0)
3506 return NULL_RTX;
3507
3508 /* If LEN is not constant, call the normal function. */
3509 if (! host_integerp (len, 1))
3510 return NULL_RTX;
3511
3512 len_rtx = expand_normal (len);
3513 src_str = c_getstr (src);
3514
3515 /* If SRC is a string constant and block move would be done
3516 by pieces, we can avoid loading the string from memory
3517 and only stored the computed constants. */
3518 if (src_str
3519 && GET_CODE (len_rtx) == CONST_INT
3520 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3521 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3522 CONST_CAST (char *, src_str),
3523 dest_align, false))
3524 {
3525 dest_mem = get_memory_rtx (dest, len);
3526 set_mem_align (dest_mem, dest_align);
3527 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3528 builtin_memcpy_read_str,
3529 CONST_CAST (char *, src_str),
3530 dest_align, false, endp);
3531 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3532 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3533 return dest_mem;
3534 }
3535
3536 if (GET_CODE (len_rtx) == CONST_INT
3537 && can_move_by_pieces (INTVAL (len_rtx),
3538 MIN (dest_align, src_align)))
3539 {
3540 dest_mem = get_memory_rtx (dest, len);
3541 set_mem_align (dest_mem, dest_align);
3542 src_mem = get_memory_rtx (src, len);
3543 set_mem_align (src_mem, src_align);
3544 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3545 MIN (dest_align, src_align), endp);
3546 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3547 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3548 return dest_mem;
3549 }
3550
3551 return NULL_RTX;
3552 }
3553 }
3554
3555 /* Expand expression EXP, which is a call to the memmove builtin. Return
3556 NULL_RTX if we failed; the caller should emit a normal call. */
3557
3558 static rtx
3559 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3560 {
3561 if (!validate_arglist (exp,
3562 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3563 return NULL_RTX;
3564 else
3565 {
3566 tree dest = CALL_EXPR_ARG (exp, 0);
3567 tree src = CALL_EXPR_ARG (exp, 1);
3568 tree len = CALL_EXPR_ARG (exp, 2);
3569 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3570 target, mode, ignore);
3571 }
3572 }
3573
3574 /* Helper function to do the actual work for expand_builtin_memmove. The
3575 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3576 so that this can also be called without constructing an actual CALL_EXPR.
3577 TYPE is the return type of the call. The other arguments and return value
3578 are the same as for expand_builtin_memmove. */
3579
3580 static rtx
3581 expand_builtin_memmove_args (tree dest, tree src, tree len,
3582 tree type, rtx target, enum machine_mode mode,
3583 int ignore)
3584 {
3585 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3586
3587 if (result)
3588 {
3589 STRIP_TYPE_NOPS (result);
3590 while (TREE_CODE (result) == COMPOUND_EXPR)
3591 {
3592 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3593 EXPAND_NORMAL);
3594 result = TREE_OPERAND (result, 1);
3595 }
3596 return expand_expr (result, target, mode, EXPAND_NORMAL);
3597 }
3598
3599 /* Otherwise, call the normal function. */
3600 return NULL_RTX;
3601 }
3602
3603 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3604 NULL_RTX if we failed the caller should emit a normal call. */
3605
3606 static rtx
3607 expand_builtin_bcopy (tree exp, int ignore)
3608 {
3609 tree type = TREE_TYPE (exp);
3610 tree src, dest, size;
3611
3612 if (!validate_arglist (exp,
3613 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3614 return NULL_RTX;
3615
3616 src = CALL_EXPR_ARG (exp, 0);
3617 dest = CALL_EXPR_ARG (exp, 1);
3618 size = CALL_EXPR_ARG (exp, 2);
3619
3620 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3621 This is done this way so that if it isn't expanded inline, we fall
3622 back to calling bcopy instead of memmove. */
3623 return expand_builtin_memmove_args (dest, src,
3624 fold_convert (sizetype, size),
3625 type, const0_rtx, VOIDmode,
3626 ignore);
3627 }
3628
3629 #ifndef HAVE_movstr
3630 # define HAVE_movstr 0
3631 # define CODE_FOR_movstr CODE_FOR_nothing
3632 #endif
3633
3634 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3635 we failed, the caller should emit a normal call, otherwise try to
3636 get the result in TARGET, if convenient. If ENDP is 0 return the
3637 destination pointer, if ENDP is 1 return the end pointer ala
3638 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3639 stpcpy. */
3640
3641 static rtx
3642 expand_movstr (tree dest, tree src, rtx target, int endp)
3643 {
3644 rtx end;
3645 rtx dest_mem;
3646 rtx src_mem;
3647 rtx insn;
3648 const struct insn_data * data;
3649
3650 if (!HAVE_movstr)
3651 return NULL_RTX;
3652
3653 dest_mem = get_memory_rtx (dest, NULL);
3654 src_mem = get_memory_rtx (src, NULL);
3655 if (!endp)
3656 {
3657 target = force_reg (Pmode, XEXP (dest_mem, 0));
3658 dest_mem = replace_equiv_address (dest_mem, target);
3659 end = gen_reg_rtx (Pmode);
3660 }
3661 else
3662 {
3663 if (target == 0 || target == const0_rtx)
3664 {
3665 end = gen_reg_rtx (Pmode);
3666 if (target == 0)
3667 target = end;
3668 }
3669 else
3670 end = target;
3671 }
3672
3673 data = insn_data + CODE_FOR_movstr;
3674
3675 if (data->operand[0].mode != VOIDmode)
3676 end = gen_lowpart (data->operand[0].mode, end);
3677
3678 insn = data->genfun (end, dest_mem, src_mem);
3679
3680 gcc_assert (insn);
3681
3682 emit_insn (insn);
3683
3684 /* movstr is supposed to set end to the address of the NUL
3685 terminator. If the caller requested a mempcpy-like return value,
3686 adjust it. */
3687 if (endp == 1 && target != const0_rtx)
3688 {
3689 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3690 emit_move_insn (target, force_operand (tem, NULL_RTX));
3691 }
3692
3693 return target;
3694 }
3695
3696 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3697 NULL_RTX if we failed the caller should emit a normal call, otherwise
3698 try to get the result in TARGET, if convenient (and in mode MODE if that's
3699 convenient). */
3700
3701 static rtx
3702 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3703 {
3704 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3705 {
3706 tree dest = CALL_EXPR_ARG (exp, 0);
3707 tree src = CALL_EXPR_ARG (exp, 1);
3708 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3709 }
3710 return NULL_RTX;
3711 }
3712
3713 /* Helper function to do the actual work for expand_builtin_strcpy. The
3714 arguments to the builtin_strcpy call DEST and SRC are broken out
3715 so that this can also be called without constructing an actual CALL_EXPR.
3716 The other arguments and return value are the same as for
3717 expand_builtin_strcpy. */
3718
3719 static rtx
3720 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3721 rtx target, enum machine_mode mode)
3722 {
3723 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3724 if (result)
3725 return expand_expr (result, target, mode, EXPAND_NORMAL);
3726 return expand_movstr (dest, src, target, /*endp=*/0);
3727
3728 }
3729
3730 /* Expand a call EXP to the stpcpy builtin.
3731 Return NULL_RTX if we failed the caller should emit a normal call,
3732 otherwise try to get the result in TARGET, if convenient (and in
3733 mode MODE if that's convenient). */
3734
3735 static rtx
3736 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3737 {
3738 tree dst, src;
3739
3740 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3741 return NULL_RTX;
3742
3743 dst = CALL_EXPR_ARG (exp, 0);
3744 src = CALL_EXPR_ARG (exp, 1);
3745
3746 /* If return value is ignored, transform stpcpy into strcpy. */
3747 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3748 {
3749 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3750 tree result = build_call_expr (fn, 2, dst, src);
3751
3752 STRIP_NOPS (result);
3753 while (TREE_CODE (result) == COMPOUND_EXPR)
3754 {
3755 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3756 EXPAND_NORMAL);
3757 result = TREE_OPERAND (result, 1);
3758 }
3759 return expand_expr (result, target, mode, EXPAND_NORMAL);
3760 }
3761 else
3762 {
3763 tree len, lenp1;
3764 rtx ret;
3765
3766 /* Ensure we get an actual string whose length can be evaluated at
3767 compile-time, not an expression containing a string. This is
3768 because the latter will potentially produce pessimized code
3769 when used to produce the return value. */
3770 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3771 return expand_movstr (dst, src, target, /*endp=*/2);
3772
3773 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3774 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3775 target, mode, /*endp=*/2);
3776
3777 if (ret)
3778 return ret;
3779
3780 if (TREE_CODE (len) == INTEGER_CST)
3781 {
3782 rtx len_rtx = expand_normal (len);
3783
3784 if (GET_CODE (len_rtx) == CONST_INT)
3785 {
3786 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3787 dst, src, target, mode);
3788
3789 if (ret)
3790 {
3791 if (! target)
3792 {
3793 if (mode != VOIDmode)
3794 target = gen_reg_rtx (mode);
3795 else
3796 target = gen_reg_rtx (GET_MODE (ret));
3797 }
3798 if (GET_MODE (target) != GET_MODE (ret))
3799 ret = gen_lowpart (GET_MODE (target), ret);
3800
3801 ret = plus_constant (ret, INTVAL (len_rtx));
3802 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3803 gcc_assert (ret);
3804
3805 return target;
3806 }
3807 }
3808 }
3809
3810 return expand_movstr (dst, src, target, /*endp=*/2);
3811 }
3812 }
3813
3814 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3815 bytes from constant string DATA + OFFSET and return it as target
3816 constant. */
3817
3818 rtx
3819 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3820 enum machine_mode mode)
3821 {
3822 const char *str = (const char *) data;
3823
3824 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3825 return const0_rtx;
3826
3827 return c_readstr (str + offset, mode);
3828 }
3829
3830 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3831 NULL_RTX if we failed the caller should emit a normal call. */
3832
3833 static rtx
3834 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3835 {
3836 tree fndecl = get_callee_fndecl (exp);
3837
3838 if (validate_arglist (exp,
3839 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3840 {
3841 tree dest = CALL_EXPR_ARG (exp, 0);
3842 tree src = CALL_EXPR_ARG (exp, 1);
3843 tree len = CALL_EXPR_ARG (exp, 2);
3844 tree slen = c_strlen (src, 1);
3845 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3846
3847 if (result)
3848 {
3849 while (TREE_CODE (result) == COMPOUND_EXPR)
3850 {
3851 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3852 EXPAND_NORMAL);
3853 result = TREE_OPERAND (result, 1);
3854 }
3855 return expand_expr (result, target, mode, EXPAND_NORMAL);
3856 }
3857
3858 /* We must be passed a constant len and src parameter. */
3859 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3860 return NULL_RTX;
3861
3862 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3863
3864 /* We're required to pad with trailing zeros if the requested
3865 len is greater than strlen(s2)+1. In that case try to
3866 use store_by_pieces, if it fails, punt. */
3867 if (tree_int_cst_lt (slen, len))
3868 {
3869 unsigned int dest_align
3870 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3871 const char *p = c_getstr (src);
3872 rtx dest_mem;
3873
3874 if (!p || dest_align == 0 || !host_integerp (len, 1)
3875 || !can_store_by_pieces (tree_low_cst (len, 1),
3876 builtin_strncpy_read_str,
3877 CONST_CAST (char *, p),
3878 dest_align, false))
3879 return NULL_RTX;
3880
3881 dest_mem = get_memory_rtx (dest, len);
3882 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3883 builtin_strncpy_read_str,
3884 CONST_CAST (char *, p), dest_align, false, 0);
3885 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3886 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3887 return dest_mem;
3888 }
3889 }
3890 return NULL_RTX;
3891 }
3892
3893 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3894 bytes from constant string DATA + OFFSET and return it as target
3895 constant. */
3896
3897 rtx
3898 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3899 enum machine_mode mode)
3900 {
3901 const char *c = (const char *) data;
3902 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3903
3904 memset (p, *c, GET_MODE_SIZE (mode));
3905
3906 return c_readstr (p, mode);
3907 }
3908
3909 /* Callback routine for store_by_pieces. Return the RTL of a register
3910 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3911 char value given in the RTL register data. For example, if mode is
3912 4 bytes wide, return the RTL for 0x01010101*data. */
3913
3914 static rtx
3915 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3916 enum machine_mode mode)
3917 {
3918 rtx target, coeff;
3919 size_t size;
3920 char *p;
3921
3922 size = GET_MODE_SIZE (mode);
3923 if (size == 1)
3924 return (rtx) data;
3925
3926 p = XALLOCAVEC (char, size);
3927 memset (p, 1, size);
3928 coeff = c_readstr (p, mode);
3929
3930 target = convert_to_mode (mode, (rtx) data, 1);
3931 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3932 return force_reg (mode, target);
3933 }
3934
3935 /* Expand expression EXP, which is a call to the memset builtin. Return
3936 NULL_RTX if we failed the caller should emit a normal call, otherwise
3937 try to get the result in TARGET, if convenient (and in mode MODE if that's
3938 convenient). */
3939
3940 static rtx
3941 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3942 {
3943 if (!validate_arglist (exp,
3944 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3945 return NULL_RTX;
3946 else
3947 {
3948 tree dest = CALL_EXPR_ARG (exp, 0);
3949 tree val = CALL_EXPR_ARG (exp, 1);
3950 tree len = CALL_EXPR_ARG (exp, 2);
3951 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3952 }
3953 }
3954
3955 /* Helper function to do the actual work for expand_builtin_memset. The
3956 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3957 so that this can also be called without constructing an actual CALL_EXPR.
3958 The other arguments and return value are the same as for
3959 expand_builtin_memset. */
3960
3961 static rtx
3962 expand_builtin_memset_args (tree dest, tree val, tree len,
3963 rtx target, enum machine_mode mode, tree orig_exp)
3964 {
3965 tree fndecl, fn;
3966 enum built_in_function fcode;
3967 char c;
3968 unsigned int dest_align;
3969 rtx dest_mem, dest_addr, len_rtx;
3970 HOST_WIDE_INT expected_size = -1;
3971 unsigned int expected_align = 0;
3972 tree_ann_common_t ann;
3973
3974 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3975
3976 /* If DEST is not a pointer type, don't do this operation in-line. */
3977 if (dest_align == 0)
3978 return NULL_RTX;
3979
3980 ann = tree_common_ann (orig_exp);
3981 if (ann)
3982 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3983
3984 if (expected_align < dest_align)
3985 expected_align = dest_align;
3986
3987 /* If the LEN parameter is zero, return DEST. */
3988 if (integer_zerop (len))
3989 {
3990 /* Evaluate and ignore VAL in case it has side-effects. */
3991 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3992 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3993 }
3994
3995 /* Stabilize the arguments in case we fail. */
3996 dest = builtin_save_expr (dest);
3997 val = builtin_save_expr (val);
3998 len = builtin_save_expr (len);
3999
4000 len_rtx = expand_normal (len);
4001 dest_mem = get_memory_rtx (dest, len);
4002
4003 if (TREE_CODE (val) != INTEGER_CST)
4004 {
4005 rtx val_rtx;
4006
4007 val_rtx = expand_normal (val);
4008 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4009 val_rtx, 0);
4010
4011 /* Assume that we can memset by pieces if we can store
4012 * the coefficients by pieces (in the required modes).
4013 * We can't pass builtin_memset_gen_str as that emits RTL. */
4014 c = 1;
4015 if (host_integerp (len, 1)
4016 && can_store_by_pieces (tree_low_cst (len, 1),
4017 builtin_memset_read_str, &c, dest_align,
4018 true))
4019 {
4020 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4021 val_rtx);
4022 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4023 builtin_memset_gen_str, val_rtx, dest_align,
4024 true, 0);
4025 }
4026 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4027 dest_align, expected_align,
4028 expected_size))
4029 goto do_libcall;
4030
4031 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4032 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4033 return dest_mem;
4034 }
4035
4036 if (target_char_cast (val, &c))
4037 goto do_libcall;
4038
4039 if (c)
4040 {
4041 if (host_integerp (len, 1)
4042 && can_store_by_pieces (tree_low_cst (len, 1),
4043 builtin_memset_read_str, &c, dest_align,
4044 true))
4045 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4046 builtin_memset_read_str, &c, dest_align, true, 0);
4047 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4048 dest_align, expected_align,
4049 expected_size))
4050 goto do_libcall;
4051
4052 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4053 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4054 return dest_mem;
4055 }
4056
4057 set_mem_align (dest_mem, dest_align);
4058 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4059 CALL_EXPR_TAILCALL (orig_exp)
4060 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4061 expected_align, expected_size);
4062
4063 if (dest_addr == 0)
4064 {
4065 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4066 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4067 }
4068
4069 return dest_addr;
4070
4071 do_libcall:
4072 fndecl = get_callee_fndecl (orig_exp);
4073 fcode = DECL_FUNCTION_CODE (fndecl);
4074 if (fcode == BUILT_IN_MEMSET)
4075 fn = build_call_expr (fndecl, 3, dest, val, len);
4076 else if (fcode == BUILT_IN_BZERO)
4077 fn = build_call_expr (fndecl, 2, dest, len);
4078 else
4079 gcc_unreachable ();
4080 if (TREE_CODE (fn) == CALL_EXPR)
4081 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4082 return expand_call (fn, target, target == const0_rtx);
4083 }
4084
4085 /* Expand expression EXP, which is a call to the bzero builtin. Return
4086 NULL_RTX if we failed the caller should emit a normal call. */
4087
4088 static rtx
4089 expand_builtin_bzero (tree exp)
4090 {
4091 tree dest, size;
4092
4093 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4094 return NULL_RTX;
4095
4096 dest = CALL_EXPR_ARG (exp, 0);
4097 size = CALL_EXPR_ARG (exp, 1);
4098
4099 /* New argument list transforming bzero(ptr x, int y) to
4100 memset(ptr x, int 0, size_t y). This is done this way
4101 so that if it isn't expanded inline, we fallback to
4102 calling bzero instead of memset. */
4103
4104 return expand_builtin_memset_args (dest, integer_zero_node,
4105 fold_convert (sizetype, size),
4106 const0_rtx, VOIDmode, exp);
4107 }
4108
4109 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4110 caller should emit a normal call, otherwise try to get the result
4111 in TARGET, if convenient (and in mode MODE if that's convenient). */
4112
4113 static rtx
4114 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4115 {
4116 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4117 INTEGER_TYPE, VOID_TYPE))
4118 {
4119 tree type = TREE_TYPE (exp);
4120 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4121 CALL_EXPR_ARG (exp, 1),
4122 CALL_EXPR_ARG (exp, 2), type);
4123 if (result)
4124 return expand_expr (result, target, mode, EXPAND_NORMAL);
4125 }
4126 return NULL_RTX;
4127 }
4128
4129 /* Expand expression EXP, which is a call to the memcmp built-in function.
4130 Return NULL_RTX if we failed and the
4131 caller should emit a normal call, otherwise try to get the result in
4132 TARGET, if convenient (and in mode MODE, if that's convenient). */
4133
4134 static rtx
4135 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4136 {
4137 if (!validate_arglist (exp,
4138 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4139 return NULL_RTX;
4140 else
4141 {
4142 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4143 CALL_EXPR_ARG (exp, 1),
4144 CALL_EXPR_ARG (exp, 2));
4145 if (result)
4146 return expand_expr (result, target, mode, EXPAND_NORMAL);
4147 }
4148
4149 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4150 {
4151 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4152 rtx result;
4153 rtx insn;
4154 tree arg1 = CALL_EXPR_ARG (exp, 0);
4155 tree arg2 = CALL_EXPR_ARG (exp, 1);
4156 tree len = CALL_EXPR_ARG (exp, 2);
4157
4158 int arg1_align
4159 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4160 int arg2_align
4161 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4162 enum machine_mode insn_mode;
4163
4164 #ifdef HAVE_cmpmemsi
4165 if (HAVE_cmpmemsi)
4166 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4167 else
4168 #endif
4169 #ifdef HAVE_cmpstrnsi
4170 if (HAVE_cmpstrnsi)
4171 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4172 else
4173 #endif
4174 return NULL_RTX;
4175
4176 /* If we don't have POINTER_TYPE, call the function. */
4177 if (arg1_align == 0 || arg2_align == 0)
4178 return NULL_RTX;
4179
4180 /* Make a place to write the result of the instruction. */
4181 result = target;
4182 if (! (result != 0
4183 && REG_P (result) && GET_MODE (result) == insn_mode
4184 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4185 result = gen_reg_rtx (insn_mode);
4186
4187 arg1_rtx = get_memory_rtx (arg1, len);
4188 arg2_rtx = get_memory_rtx (arg2, len);
4189 arg3_rtx = expand_normal (len);
4190
4191 /* Set MEM_SIZE as appropriate. */
4192 if (GET_CODE (arg3_rtx) == CONST_INT)
4193 {
4194 set_mem_size (arg1_rtx, arg3_rtx);
4195 set_mem_size (arg2_rtx, arg3_rtx);
4196 }
4197
4198 #ifdef HAVE_cmpmemsi
4199 if (HAVE_cmpmemsi)
4200 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4201 GEN_INT (MIN (arg1_align, arg2_align)));
4202 else
4203 #endif
4204 #ifdef HAVE_cmpstrnsi
4205 if (HAVE_cmpstrnsi)
4206 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4207 GEN_INT (MIN (arg1_align, arg2_align)));
4208 else
4209 #endif
4210 gcc_unreachable ();
4211
4212 if (insn)
4213 emit_insn (insn);
4214 else
4215 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4216 TYPE_MODE (integer_type_node), 3,
4217 XEXP (arg1_rtx, 0), Pmode,
4218 XEXP (arg2_rtx, 0), Pmode,
4219 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4220 TYPE_UNSIGNED (sizetype)),
4221 TYPE_MODE (sizetype));
4222
4223 /* Return the value in the proper mode for this function. */
4224 mode = TYPE_MODE (TREE_TYPE (exp));
4225 if (GET_MODE (result) == mode)
4226 return result;
4227 else if (target != 0)
4228 {
4229 convert_move (target, result, 0);
4230 return target;
4231 }
4232 else
4233 return convert_to_mode (mode, result, 0);
4234 }
4235 #endif
4236
4237 return NULL_RTX;
4238 }
4239
4240 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4241 if we failed the caller should emit a normal call, otherwise try to get
4242 the result in TARGET, if convenient. */
4243
4244 static rtx
4245 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4246 {
4247 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4248 return NULL_RTX;
4249 else
4250 {
4251 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4252 CALL_EXPR_ARG (exp, 1));
4253 if (result)
4254 return expand_expr (result, target, mode, EXPAND_NORMAL);
4255 }
4256
4257 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4258 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4259 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4260 {
4261 rtx arg1_rtx, arg2_rtx;
4262 rtx result, insn = NULL_RTX;
4263 tree fndecl, fn;
4264 tree arg1 = CALL_EXPR_ARG (exp, 0);
4265 tree arg2 = CALL_EXPR_ARG (exp, 1);
4266
4267 int arg1_align
4268 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4269 int arg2_align
4270 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4271
4272 /* If we don't have POINTER_TYPE, call the function. */
4273 if (arg1_align == 0 || arg2_align == 0)
4274 return NULL_RTX;
4275
4276 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4277 arg1 = builtin_save_expr (arg1);
4278 arg2 = builtin_save_expr (arg2);
4279
4280 arg1_rtx = get_memory_rtx (arg1, NULL);
4281 arg2_rtx = get_memory_rtx (arg2, NULL);
4282
4283 #ifdef HAVE_cmpstrsi
4284 /* Try to call cmpstrsi. */
4285 if (HAVE_cmpstrsi)
4286 {
4287 enum machine_mode insn_mode
4288 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4289
4290 /* Make a place to write the result of the instruction. */
4291 result = target;
4292 if (! (result != 0
4293 && REG_P (result) && GET_MODE (result) == insn_mode
4294 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4295 result = gen_reg_rtx (insn_mode);
4296
4297 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4298 GEN_INT (MIN (arg1_align, arg2_align)));
4299 }
4300 #endif
4301 #ifdef HAVE_cmpstrnsi
4302 /* Try to determine at least one length and call cmpstrnsi. */
4303 if (!insn && HAVE_cmpstrnsi)
4304 {
4305 tree len;
4306 rtx arg3_rtx;
4307
4308 enum machine_mode insn_mode
4309 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4310 tree len1 = c_strlen (arg1, 1);
4311 tree len2 = c_strlen (arg2, 1);
4312
4313 if (len1)
4314 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4315 if (len2)
4316 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4317
4318 /* If we don't have a constant length for the first, use the length
4319 of the second, if we know it. We don't require a constant for
4320 this case; some cost analysis could be done if both are available
4321 but neither is constant. For now, assume they're equally cheap,
4322 unless one has side effects. If both strings have constant lengths,
4323 use the smaller. */
4324
4325 if (!len1)
4326 len = len2;
4327 else if (!len2)
4328 len = len1;
4329 else if (TREE_SIDE_EFFECTS (len1))
4330 len = len2;
4331 else if (TREE_SIDE_EFFECTS (len2))
4332 len = len1;
4333 else if (TREE_CODE (len1) != INTEGER_CST)
4334 len = len2;
4335 else if (TREE_CODE (len2) != INTEGER_CST)
4336 len = len1;
4337 else if (tree_int_cst_lt (len1, len2))
4338 len = len1;
4339 else
4340 len = len2;
4341
4342 /* If both arguments have side effects, we cannot optimize. */
4343 if (!len || TREE_SIDE_EFFECTS (len))
4344 goto do_libcall;
4345
4346 arg3_rtx = expand_normal (len);
4347
4348 /* Make a place to write the result of the instruction. */
4349 result = target;
4350 if (! (result != 0
4351 && REG_P (result) && GET_MODE (result) == insn_mode
4352 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4353 result = gen_reg_rtx (insn_mode);
4354
4355 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4356 GEN_INT (MIN (arg1_align, arg2_align)));
4357 }
4358 #endif
4359
4360 if (insn)
4361 {
4362 emit_insn (insn);
4363
4364 /* Return the value in the proper mode for this function. */
4365 mode = TYPE_MODE (TREE_TYPE (exp));
4366 if (GET_MODE (result) == mode)
4367 return result;
4368 if (target == 0)
4369 return convert_to_mode (mode, result, 0);
4370 convert_move (target, result, 0);
4371 return target;
4372 }
4373
4374 /* Expand the library call ourselves using a stabilized argument
4375 list to avoid re-evaluating the function's arguments twice. */
4376 #ifdef HAVE_cmpstrnsi
4377 do_libcall:
4378 #endif
4379 fndecl = get_callee_fndecl (exp);
4380 fn = build_call_expr (fndecl, 2, arg1, arg2);
4381 if (TREE_CODE (fn) == CALL_EXPR)
4382 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4383 return expand_call (fn, target, target == const0_rtx);
4384 }
4385 #endif
4386 return NULL_RTX;
4387 }
4388
4389 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4390 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4391 the result in TARGET, if convenient. */
4392
4393 static rtx
4394 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4395 {
4396 if (!validate_arglist (exp,
4397 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4398 return NULL_RTX;
4399 else
4400 {
4401 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4402 CALL_EXPR_ARG (exp, 1),
4403 CALL_EXPR_ARG (exp, 2));
4404 if (result)
4405 return expand_expr (result, target, mode, EXPAND_NORMAL);
4406 }
4407
4408 /* If c_strlen can determine an expression for one of the string
4409 lengths, and it doesn't have side effects, then emit cmpstrnsi
4410 using length MIN(strlen(string)+1, arg3). */
4411 #ifdef HAVE_cmpstrnsi
4412 if (HAVE_cmpstrnsi)
4413 {
4414 tree len, len1, len2;
4415 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4416 rtx result, insn;
4417 tree fndecl, fn;
4418 tree arg1 = CALL_EXPR_ARG (exp, 0);
4419 tree arg2 = CALL_EXPR_ARG (exp, 1);
4420 tree arg3 = CALL_EXPR_ARG (exp, 2);
4421
4422 int arg1_align
4423 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4424 int arg2_align
4425 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4426 enum machine_mode insn_mode
4427 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4428
4429 len1 = c_strlen (arg1, 1);
4430 len2 = c_strlen (arg2, 1);
4431
4432 if (len1)
4433 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4434 if (len2)
4435 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4436
4437 /* If we don't have a constant length for the first, use the length
4438 of the second, if we know it. We don't require a constant for
4439 this case; some cost analysis could be done if both are available
4440 but neither is constant. For now, assume they're equally cheap,
4441 unless one has side effects. If both strings have constant lengths,
4442 use the smaller. */
4443
4444 if (!len1)
4445 len = len2;
4446 else if (!len2)
4447 len = len1;
4448 else if (TREE_SIDE_EFFECTS (len1))
4449 len = len2;
4450 else if (TREE_SIDE_EFFECTS (len2))
4451 len = len1;
4452 else if (TREE_CODE (len1) != INTEGER_CST)
4453 len = len2;
4454 else if (TREE_CODE (len2) != INTEGER_CST)
4455 len = len1;
4456 else if (tree_int_cst_lt (len1, len2))
4457 len = len1;
4458 else
4459 len = len2;
4460
4461 /* If both arguments have side effects, we cannot optimize. */
4462 if (!len || TREE_SIDE_EFFECTS (len))
4463 return NULL_RTX;
4464
4465 /* The actual new length parameter is MIN(len,arg3). */
4466 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4467 fold_convert (TREE_TYPE (len), arg3));
4468
4469 /* If we don't have POINTER_TYPE, call the function. */
4470 if (arg1_align == 0 || arg2_align == 0)
4471 return NULL_RTX;
4472
4473 /* Make a place to write the result of the instruction. */
4474 result = target;
4475 if (! (result != 0
4476 && REG_P (result) && GET_MODE (result) == insn_mode
4477 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4478 result = gen_reg_rtx (insn_mode);
4479
4480 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4481 arg1 = builtin_save_expr (arg1);
4482 arg2 = builtin_save_expr (arg2);
4483 len = builtin_save_expr (len);
4484
4485 arg1_rtx = get_memory_rtx (arg1, len);
4486 arg2_rtx = get_memory_rtx (arg2, len);
4487 arg3_rtx = expand_normal (len);
4488 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4489 GEN_INT (MIN (arg1_align, arg2_align)));
4490 if (insn)
4491 {
4492 emit_insn (insn);
4493
4494 /* Return the value in the proper mode for this function. */
4495 mode = TYPE_MODE (TREE_TYPE (exp));
4496 if (GET_MODE (result) == mode)
4497 return result;
4498 if (target == 0)
4499 return convert_to_mode (mode, result, 0);
4500 convert_move (target, result, 0);
4501 return target;
4502 }
4503
4504 /* Expand the library call ourselves using a stabilized argument
4505 list to avoid re-evaluating the function's arguments twice. */
4506 fndecl = get_callee_fndecl (exp);
4507 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4508 if (TREE_CODE (fn) == CALL_EXPR)
4509 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4510 return expand_call (fn, target, target == const0_rtx);
4511 }
4512 #endif
4513 return NULL_RTX;
4514 }
4515
4516 /* Expand expression EXP, which is a call to the strcat builtin.
4517 Return NULL_RTX if we failed the caller should emit a normal call,
4518 otherwise try to get the result in TARGET, if convenient. */
4519
4520 static rtx
4521 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4522 {
4523 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4524 return NULL_RTX;
4525 else
4526 {
4527 tree dst = CALL_EXPR_ARG (exp, 0);
4528 tree src = CALL_EXPR_ARG (exp, 1);
4529 const char *p = c_getstr (src);
4530
4531 /* If the string length is zero, return the dst parameter. */
4532 if (p && *p == '\0')
4533 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4534
4535 if (optimize_insn_for_speed_p ())
4536 {
4537 /* See if we can store by pieces into (dst + strlen(dst)). */
4538 tree newsrc, newdst,
4539 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4540 rtx insns;
4541
4542 /* Stabilize the argument list. */
4543 newsrc = builtin_save_expr (src);
4544 dst = builtin_save_expr (dst);
4545
4546 start_sequence ();
4547
4548 /* Create strlen (dst). */
4549 newdst = build_call_expr (strlen_fn, 1, dst);
4550 /* Create (dst p+ strlen (dst)). */
4551
4552 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4553 newdst = builtin_save_expr (newdst);
4554
4555 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4556 {
4557 end_sequence (); /* Stop sequence. */
4558 return NULL_RTX;
4559 }
4560
4561 /* Output the entire sequence. */
4562 insns = get_insns ();
4563 end_sequence ();
4564 emit_insn (insns);
4565
4566 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4567 }
4568
4569 return NULL_RTX;
4570 }
4571 }
4572
4573 /* Expand expression EXP, which is a call to the strncat builtin.
4574 Return NULL_RTX if we failed the caller should emit a normal call,
4575 otherwise try to get the result in TARGET, if convenient. */
4576
4577 static rtx
4578 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4579 {
4580 if (validate_arglist (exp,
4581 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4582 {
4583 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4584 CALL_EXPR_ARG (exp, 1),
4585 CALL_EXPR_ARG (exp, 2));
4586 if (result)
4587 return expand_expr (result, target, mode, EXPAND_NORMAL);
4588 }
4589 return NULL_RTX;
4590 }
4591
4592 /* Expand expression EXP, which is a call to the strspn builtin.
4593 Return NULL_RTX if we failed the caller should emit a normal call,
4594 otherwise try to get the result in TARGET, if convenient. */
4595
4596 static rtx
4597 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4598 {
4599 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4600 {
4601 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4602 CALL_EXPR_ARG (exp, 1));
4603 if (result)
4604 return expand_expr (result, target, mode, EXPAND_NORMAL);
4605 }
4606 return NULL_RTX;
4607 }
4608
4609 /* Expand expression EXP, which is a call to the strcspn builtin.
4610 Return NULL_RTX if we failed the caller should emit a normal call,
4611 otherwise try to get the result in TARGET, if convenient. */
4612
4613 static rtx
4614 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4615 {
4616 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4617 {
4618 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4619 CALL_EXPR_ARG (exp, 1));
4620 if (result)
4621 return expand_expr (result, target, mode, EXPAND_NORMAL);
4622 }
4623 return NULL_RTX;
4624 }
4625
4626 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4627 if that's convenient. */
4628
4629 rtx
4630 expand_builtin_saveregs (void)
4631 {
4632 rtx val, seq;
4633
4634 /* Don't do __builtin_saveregs more than once in a function.
4635 Save the result of the first call and reuse it. */
4636 if (saveregs_value != 0)
4637 return saveregs_value;
4638
4639 /* When this function is called, it means that registers must be
4640 saved on entry to this function. So we migrate the call to the
4641 first insn of this function. */
4642
4643 start_sequence ();
4644
4645 /* Do whatever the machine needs done in this case. */
4646 val = targetm.calls.expand_builtin_saveregs ();
4647
4648 seq = get_insns ();
4649 end_sequence ();
4650
4651 saveregs_value = val;
4652
4653 /* Put the insns after the NOTE that starts the function. If this
4654 is inside a start_sequence, make the outer-level insn chain current, so
4655 the code is placed at the start of the function. */
4656 push_topmost_sequence ();
4657 emit_insn_after (seq, entry_of_function ());
4658 pop_topmost_sequence ();
4659
4660 return val;
4661 }
4662
4663 /* __builtin_args_info (N) returns word N of the arg space info
4664 for the current function. The number and meanings of words
4665 is controlled by the definition of CUMULATIVE_ARGS. */
4666
4667 static rtx
4668 expand_builtin_args_info (tree exp)
4669 {
4670 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4671 int *word_ptr = (int *) &crtl->args.info;
4672
4673 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4674
4675 if (call_expr_nargs (exp) != 0)
4676 {
4677 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4678 error ("argument of %<__builtin_args_info%> must be constant");
4679 else
4680 {
4681 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4682
4683 if (wordnum < 0 || wordnum >= nwords)
4684 error ("argument of %<__builtin_args_info%> out of range");
4685 else
4686 return GEN_INT (word_ptr[wordnum]);
4687 }
4688 }
4689 else
4690 error ("missing argument in %<__builtin_args_info%>");
4691
4692 return const0_rtx;
4693 }
4694
4695 /* Expand a call to __builtin_next_arg. */
4696
4697 static rtx
4698 expand_builtin_next_arg (void)
4699 {
4700 /* Checking arguments is already done in fold_builtin_next_arg
4701 that must be called before this function. */
4702 return expand_binop (ptr_mode, add_optab,
4703 crtl->args.internal_arg_pointer,
4704 crtl->args.arg_offset_rtx,
4705 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4706 }
4707
4708 /* Make it easier for the backends by protecting the valist argument
4709 from multiple evaluations. */
4710
4711 static tree
4712 stabilize_va_list (tree valist, int needs_lvalue)
4713 {
4714 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4715
4716 gcc_assert (vatype != NULL_TREE);
4717
4718 if (TREE_CODE (vatype) == ARRAY_TYPE)
4719 {
4720 if (TREE_SIDE_EFFECTS (valist))
4721 valist = save_expr (valist);
4722
4723 /* For this case, the backends will be expecting a pointer to
4724 vatype, but it's possible we've actually been given an array
4725 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4726 So fix it. */
4727 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4728 {
4729 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4730 valist = build_fold_addr_expr_with_type (valist, p1);
4731 }
4732 }
4733 else
4734 {
4735 tree pt;
4736
4737 if (! needs_lvalue)
4738 {
4739 if (! TREE_SIDE_EFFECTS (valist))
4740 return valist;
4741
4742 pt = build_pointer_type (vatype);
4743 valist = fold_build1 (ADDR_EXPR, pt, valist);
4744 TREE_SIDE_EFFECTS (valist) = 1;
4745 }
4746
4747 if (TREE_SIDE_EFFECTS (valist))
4748 valist = save_expr (valist);
4749 valist = build_fold_indirect_ref (valist);
4750 }
4751
4752 return valist;
4753 }
4754
4755 /* The "standard" definition of va_list is void*. */
4756
4757 tree
4758 std_build_builtin_va_list (void)
4759 {
4760 return ptr_type_node;
4761 }
4762
4763 /* The "standard" abi va_list is va_list_type_node. */
4764
4765 tree
4766 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4767 {
4768 return va_list_type_node;
4769 }
4770
4771 /* The "standard" type of va_list is va_list_type_node. */
4772
4773 tree
4774 std_canonical_va_list_type (tree type)
4775 {
4776 tree wtype, htype;
4777
4778 if (INDIRECT_REF_P (type))
4779 type = TREE_TYPE (type);
4780 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4781 type = TREE_TYPE (type);
4782 wtype = va_list_type_node;
4783 htype = type;
4784 /* Treat structure va_list types. */
4785 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4786 htype = TREE_TYPE (htype);
4787 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4788 {
4789 /* If va_list is an array type, the argument may have decayed
4790 to a pointer type, e.g. by being passed to another function.
4791 In that case, unwrap both types so that we can compare the
4792 underlying records. */
4793 if (TREE_CODE (htype) == ARRAY_TYPE
4794 || POINTER_TYPE_P (htype))
4795 {
4796 wtype = TREE_TYPE (wtype);
4797 htype = TREE_TYPE (htype);
4798 }
4799 }
4800 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4801 return va_list_type_node;
4802
4803 return NULL_TREE;
4804 }
4805
4806 /* The "standard" implementation of va_start: just assign `nextarg' to
4807 the variable. */
4808
4809 void
4810 std_expand_builtin_va_start (tree valist, rtx nextarg)
4811 {
4812 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4813 convert_move (va_r, nextarg, 0);
4814 }
4815
4816 /* Expand EXP, a call to __builtin_va_start. */
4817
4818 static rtx
4819 expand_builtin_va_start (tree exp)
4820 {
4821 rtx nextarg;
4822 tree valist;
4823
4824 if (call_expr_nargs (exp) < 2)
4825 {
4826 error ("too few arguments to function %<va_start%>");
4827 return const0_rtx;
4828 }
4829
4830 if (fold_builtin_next_arg (exp, true))
4831 return const0_rtx;
4832
4833 nextarg = expand_builtin_next_arg ();
4834 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4835
4836 if (targetm.expand_builtin_va_start)
4837 targetm.expand_builtin_va_start (valist, nextarg);
4838 else
4839 std_expand_builtin_va_start (valist, nextarg);
4840
4841 return const0_rtx;
4842 }
4843
4844 /* The "standard" implementation of va_arg: read the value from the
4845 current (padded) address and increment by the (padded) size. */
4846
4847 tree
4848 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4849 gimple_seq *post_p)
4850 {
4851 tree addr, t, type_size, rounded_size, valist_tmp;
4852 unsigned HOST_WIDE_INT align, boundary;
4853 bool indirect;
4854
4855 #ifdef ARGS_GROW_DOWNWARD
4856 /* All of the alignment and movement below is for args-grow-up machines.
4857 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4858 implement their own specialized gimplify_va_arg_expr routines. */
4859 gcc_unreachable ();
4860 #endif
4861
4862 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4863 if (indirect)
4864 type = build_pointer_type (type);
4865
4866 align = PARM_BOUNDARY / BITS_PER_UNIT;
4867 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4868
4869 /* When we align parameter on stack for caller, if the parameter
4870 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4871 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4872 here with caller. */
4873 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4874 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4875
4876 boundary /= BITS_PER_UNIT;
4877
4878 /* Hoist the valist value into a temporary for the moment. */
4879 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4880
4881 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4882 requires greater alignment, we must perform dynamic alignment. */
4883 if (boundary > align
4884 && !integer_zerop (TYPE_SIZE (type)))
4885 {
4886 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4887 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4888 valist_tmp, size_int (boundary - 1)));
4889 gimplify_and_add (t, pre_p);
4890
4891 t = fold_convert (sizetype, valist_tmp);
4892 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4893 fold_convert (TREE_TYPE (valist),
4894 fold_build2 (BIT_AND_EXPR, sizetype, t,
4895 size_int (-boundary))));
4896 gimplify_and_add (t, pre_p);
4897 }
4898 else
4899 boundary = align;
4900
4901 /* If the actual alignment is less than the alignment of the type,
4902 adjust the type accordingly so that we don't assume strict alignment
4903 when dereferencing the pointer. */
4904 boundary *= BITS_PER_UNIT;
4905 if (boundary < TYPE_ALIGN (type))
4906 {
4907 type = build_variant_type_copy (type);
4908 TYPE_ALIGN (type) = boundary;
4909 }
4910
4911 /* Compute the rounded size of the type. */
4912 type_size = size_in_bytes (type);
4913 rounded_size = round_up (type_size, align);
4914
4915 /* Reduce rounded_size so it's sharable with the postqueue. */
4916 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4917
4918 /* Get AP. */
4919 addr = valist_tmp;
4920 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4921 {
4922 /* Small args are padded downward. */
4923 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4924 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4925 size_binop (MINUS_EXPR, rounded_size, type_size));
4926 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4927 }
4928
4929 /* Compute new value for AP. */
4930 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4931 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4932 gimplify_and_add (t, pre_p);
4933
4934 addr = fold_convert (build_pointer_type (type), addr);
4935
4936 if (indirect)
4937 addr = build_va_arg_indirect_ref (addr);
4938
4939 return build_va_arg_indirect_ref (addr);
4940 }
4941
4942 /* Build an indirect-ref expression over the given TREE, which represents a
4943 piece of a va_arg() expansion. */
4944 tree
4945 build_va_arg_indirect_ref (tree addr)
4946 {
4947 addr = build_fold_indirect_ref (addr);
4948
4949 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4950 mf_mark (addr);
4951
4952 return addr;
4953 }
4954
4955 /* Return a dummy expression of type TYPE in order to keep going after an
4956 error. */
4957
4958 static tree
4959 dummy_object (tree type)
4960 {
4961 tree t = build_int_cst (build_pointer_type (type), 0);
4962 return build1 (INDIRECT_REF, type, t);
4963 }
4964
4965 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4966 builtin function, but a very special sort of operator. */
4967
4968 enum gimplify_status
4969 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4970 {
4971 tree promoted_type, have_va_type;
4972 tree valist = TREE_OPERAND (*expr_p, 0);
4973 tree type = TREE_TYPE (*expr_p);
4974 tree t;
4975
4976 /* Verify that valist is of the proper type. */
4977 have_va_type = TREE_TYPE (valist);
4978 if (have_va_type == error_mark_node)
4979 return GS_ERROR;
4980 have_va_type = targetm.canonical_va_list_type (have_va_type);
4981
4982 if (have_va_type == NULL_TREE)
4983 {
4984 error ("first argument to %<va_arg%> not of type %<va_list%>");
4985 return GS_ERROR;
4986 }
4987
4988 /* Generate a diagnostic for requesting data of a type that cannot
4989 be passed through `...' due to type promotion at the call site. */
4990 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4991 != type)
4992 {
4993 static bool gave_help;
4994 bool warned;
4995
4996 /* Unfortunately, this is merely undefined, rather than a constraint
4997 violation, so we cannot make this an error. If this call is never
4998 executed, the program is still strictly conforming. */
4999 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
5000 type, promoted_type);
5001 if (!gave_help && warned)
5002 {
5003 gave_help = true;
5004 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
5005 promoted_type, type);
5006 }
5007
5008 /* We can, however, treat "undefined" any way we please.
5009 Call abort to encourage the user to fix the program. */
5010 if (warned)
5011 inform (input_location, "if this code is reached, the program will abort");
5012 /* Before the abort, allow the evaluation of the va_list
5013 expression to exit or longjmp. */
5014 gimplify_and_add (valist, pre_p);
5015 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5016 gimplify_and_add (t, pre_p);
5017
5018 /* This is dead code, but go ahead and finish so that the
5019 mode of the result comes out right. */
5020 *expr_p = dummy_object (type);
5021 return GS_ALL_DONE;
5022 }
5023 else
5024 {
5025 /* Make it easier for the backends by protecting the valist argument
5026 from multiple evaluations. */
5027 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5028 {
5029 /* For this case, the backends will be expecting a pointer to
5030 TREE_TYPE (abi), but it's possible we've
5031 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5032 So fix it. */
5033 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5034 {
5035 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5036 valist = fold_convert (p1, build_fold_addr_expr (valist));
5037 }
5038
5039 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5040 }
5041 else
5042 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5043
5044 if (!targetm.gimplify_va_arg_expr)
5045 /* FIXME: Once most targets are converted we should merely
5046 assert this is non-null. */
5047 return GS_ALL_DONE;
5048
5049 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5050 return GS_OK;
5051 }
5052 }
5053
5054 /* Expand EXP, a call to __builtin_va_end. */
5055
5056 static rtx
5057 expand_builtin_va_end (tree exp)
5058 {
5059 tree valist = CALL_EXPR_ARG (exp, 0);
5060
5061 /* Evaluate for side effects, if needed. I hate macros that don't
5062 do that. */
5063 if (TREE_SIDE_EFFECTS (valist))
5064 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5065
5066 return const0_rtx;
5067 }
5068
5069 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5070 builtin rather than just as an assignment in stdarg.h because of the
5071 nastiness of array-type va_list types. */
5072
5073 static rtx
5074 expand_builtin_va_copy (tree exp)
5075 {
5076 tree dst, src, t;
5077
5078 dst = CALL_EXPR_ARG (exp, 0);
5079 src = CALL_EXPR_ARG (exp, 1);
5080
5081 dst = stabilize_va_list (dst, 1);
5082 src = stabilize_va_list (src, 0);
5083
5084 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5085
5086 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5087 {
5088 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5089 TREE_SIDE_EFFECTS (t) = 1;
5090 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5091 }
5092 else
5093 {
5094 rtx dstb, srcb, size;
5095
5096 /* Evaluate to pointers. */
5097 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5098 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5099 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5100 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5101
5102 dstb = convert_memory_address (Pmode, dstb);
5103 srcb = convert_memory_address (Pmode, srcb);
5104
5105 /* "Dereference" to BLKmode memories. */
5106 dstb = gen_rtx_MEM (BLKmode, dstb);
5107 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5108 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5109 srcb = gen_rtx_MEM (BLKmode, srcb);
5110 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5111 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5112
5113 /* Copy. */
5114 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5115 }
5116
5117 return const0_rtx;
5118 }
5119
5120 /* Expand a call to one of the builtin functions __builtin_frame_address or
5121 __builtin_return_address. */
5122
5123 static rtx
5124 expand_builtin_frame_address (tree fndecl, tree exp)
5125 {
5126 /* The argument must be a nonnegative integer constant.
5127 It counts the number of frames to scan up the stack.
5128 The value is the return address saved in that frame. */
5129 if (call_expr_nargs (exp) == 0)
5130 /* Warning about missing arg was already issued. */
5131 return const0_rtx;
5132 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5133 {
5134 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5135 error ("invalid argument to %<__builtin_frame_address%>");
5136 else
5137 error ("invalid argument to %<__builtin_return_address%>");
5138 return const0_rtx;
5139 }
5140 else
5141 {
5142 rtx tem
5143 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5144 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5145
5146 /* Some ports cannot access arbitrary stack frames. */
5147 if (tem == NULL)
5148 {
5149 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5150 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5151 else
5152 warning (0, "unsupported argument to %<__builtin_return_address%>");
5153 return const0_rtx;
5154 }
5155
5156 /* For __builtin_frame_address, return what we've got. */
5157 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5158 return tem;
5159
5160 if (!REG_P (tem)
5161 && ! CONSTANT_P (tem))
5162 tem = copy_to_mode_reg (Pmode, tem);
5163 return tem;
5164 }
5165 }
5166
5167 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5168 we failed and the caller should emit a normal call, otherwise try to get
5169 the result in TARGET, if convenient. */
5170
5171 static rtx
5172 expand_builtin_alloca (tree exp, rtx target)
5173 {
5174 rtx op0;
5175 rtx result;
5176
5177 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5178 should always expand to function calls. These can be intercepted
5179 in libmudflap. */
5180 if (flag_mudflap)
5181 return NULL_RTX;
5182
5183 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5184 return NULL_RTX;
5185
5186 /* Compute the argument. */
5187 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5188
5189 /* Allocate the desired space. */
5190 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5191 result = convert_memory_address (ptr_mode, result);
5192
5193 return result;
5194 }
5195
5196 /* Expand a call to a bswap builtin with argument ARG0. MODE
5197 is the mode to expand with. */
5198
5199 static rtx
5200 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5201 {
5202 enum machine_mode mode;
5203 tree arg;
5204 rtx op0;
5205
5206 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5207 return NULL_RTX;
5208
5209 arg = CALL_EXPR_ARG (exp, 0);
5210 mode = TYPE_MODE (TREE_TYPE (arg));
5211 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5212
5213 target = expand_unop (mode, bswap_optab, op0, target, 1);
5214
5215 gcc_assert (target);
5216
5217 return convert_to_mode (mode, target, 0);
5218 }
5219
5220 /* Expand a call to a unary builtin in EXP.
5221 Return NULL_RTX if a normal call should be emitted rather than expanding the
5222 function in-line. If convenient, the result should be placed in TARGET.
5223 SUBTARGET may be used as the target for computing one of EXP's operands. */
5224
5225 static rtx
5226 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5227 rtx subtarget, optab op_optab)
5228 {
5229 rtx op0;
5230
5231 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5232 return NULL_RTX;
5233
5234 /* Compute the argument. */
5235 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5236 VOIDmode, EXPAND_NORMAL);
5237 /* Compute op, into TARGET if possible.
5238 Set TARGET to wherever the result comes back. */
5239 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5240 op_optab, op0, target, 1);
5241 gcc_assert (target);
5242
5243 return convert_to_mode (target_mode, target, 0);
5244 }
5245
5246 /* If the string passed to fputs is a constant and is one character
5247 long, we attempt to transform this call into __builtin_fputc(). */
5248
5249 static rtx
5250 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5251 {
5252 /* Verify the arguments in the original call. */
5253 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5254 {
5255 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5256 CALL_EXPR_ARG (exp, 1),
5257 (target == const0_rtx),
5258 unlocked, NULL_TREE);
5259 if (result)
5260 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5261 }
5262 return NULL_RTX;
5263 }
5264
5265 /* Expand a call to __builtin_expect. We just return our argument
5266 as the builtin_expect semantic should've been already executed by
5267 tree branch prediction pass. */
5268
5269 static rtx
5270 expand_builtin_expect (tree exp, rtx target)
5271 {
5272 tree arg, c;
5273
5274 if (call_expr_nargs (exp) < 2)
5275 return const0_rtx;
5276 arg = CALL_EXPR_ARG (exp, 0);
5277 c = CALL_EXPR_ARG (exp, 1);
5278
5279 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5280 /* When guessing was done, the hints should be already stripped away. */
5281 gcc_assert (!flag_guess_branch_prob
5282 || optimize == 0 || errorcount || sorrycount);
5283 return target;
5284 }
5285
5286 void
5287 expand_builtin_trap (void)
5288 {
5289 #ifdef HAVE_trap
5290 if (HAVE_trap)
5291 emit_insn (gen_trap ());
5292 else
5293 #endif
5294 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5295 emit_barrier ();
5296 }
5297
5298 /* Expand EXP, a call to fabs, fabsf or fabsl.
5299 Return NULL_RTX if a normal call should be emitted rather than expanding
5300 the function inline. If convenient, the result should be placed
5301 in TARGET. SUBTARGET may be used as the target for computing
5302 the operand. */
5303
5304 static rtx
5305 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5306 {
5307 enum machine_mode mode;
5308 tree arg;
5309 rtx op0;
5310
5311 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5312 return NULL_RTX;
5313
5314 arg = CALL_EXPR_ARG (exp, 0);
5315 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5316 mode = TYPE_MODE (TREE_TYPE (arg));
5317 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5318 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5319 }
5320
5321 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5322 Return NULL is a normal call should be emitted rather than expanding the
5323 function inline. If convenient, the result should be placed in TARGET.
5324 SUBTARGET may be used as the target for computing the operand. */
5325
5326 static rtx
5327 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5328 {
5329 rtx op0, op1;
5330 tree arg;
5331
5332 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5333 return NULL_RTX;
5334
5335 arg = CALL_EXPR_ARG (exp, 0);
5336 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5337
5338 arg = CALL_EXPR_ARG (exp, 1);
5339 op1 = expand_normal (arg);
5340
5341 return expand_copysign (op0, op1, target);
5342 }
5343
5344 /* Create a new constant string literal and return a char* pointer to it.
5345 The STRING_CST value is the LEN characters at STR. */
5346 tree
5347 build_string_literal (int len, const char *str)
5348 {
5349 tree t, elem, index, type;
5350
5351 t = build_string (len, str);
5352 elem = build_type_variant (char_type_node, 1, 0);
5353 index = build_index_type (size_int (len - 1));
5354 type = build_array_type (elem, index);
5355 TREE_TYPE (t) = type;
5356 TREE_CONSTANT (t) = 1;
5357 TREE_READONLY (t) = 1;
5358 TREE_STATIC (t) = 1;
5359
5360 type = build_pointer_type (elem);
5361 t = build1 (ADDR_EXPR, type,
5362 build4 (ARRAY_REF, elem,
5363 t, integer_zero_node, NULL_TREE, NULL_TREE));
5364 return t;
5365 }
5366
5367 /* Expand EXP, a call to printf or printf_unlocked.
5368 Return NULL_RTX if a normal call should be emitted rather than transforming
5369 the function inline. If convenient, the result should be placed in
5370 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5371 call. */
5372 static rtx
5373 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5374 bool unlocked)
5375 {
5376 /* If we're using an unlocked function, assume the other unlocked
5377 functions exist explicitly. */
5378 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5379 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5380 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5381 : implicit_built_in_decls[BUILT_IN_PUTS];
5382 const char *fmt_str;
5383 tree fn = 0;
5384 tree fmt, arg;
5385 int nargs = call_expr_nargs (exp);
5386
5387 /* If the return value is used, don't do the transformation. */
5388 if (target != const0_rtx)
5389 return NULL_RTX;
5390
5391 /* Verify the required arguments in the original call. */
5392 if (nargs == 0)
5393 return NULL_RTX;
5394 fmt = CALL_EXPR_ARG (exp, 0);
5395 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5396 return NULL_RTX;
5397
5398 /* Check whether the format is a literal string constant. */
5399 fmt_str = c_getstr (fmt);
5400 if (fmt_str == NULL)
5401 return NULL_RTX;
5402
5403 if (!init_target_chars ())
5404 return NULL_RTX;
5405
5406 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5407 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5408 {
5409 if ((nargs != 2)
5410 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5411 return NULL_RTX;
5412 if (fn_puts)
5413 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5414 }
5415 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5416 else if (strcmp (fmt_str, target_percent_c) == 0)
5417 {
5418 if ((nargs != 2)
5419 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5420 return NULL_RTX;
5421 if (fn_putchar)
5422 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5423 }
5424 else
5425 {
5426 /* We can't handle anything else with % args or %% ... yet. */
5427 if (strchr (fmt_str, target_percent))
5428 return NULL_RTX;
5429
5430 if (nargs > 1)
5431 return NULL_RTX;
5432
5433 /* If the format specifier was "", printf does nothing. */
5434 if (fmt_str[0] == '\0')
5435 return const0_rtx;
5436 /* If the format specifier has length of 1, call putchar. */
5437 if (fmt_str[1] == '\0')
5438 {
5439 /* Given printf("c"), (where c is any one character,)
5440 convert "c"[0] to an int and pass that to the replacement
5441 function. */
5442 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5443 if (fn_putchar)
5444 fn = build_call_expr (fn_putchar, 1, arg);
5445 }
5446 else
5447 {
5448 /* If the format specifier was "string\n", call puts("string"). */
5449 size_t len = strlen (fmt_str);
5450 if ((unsigned char)fmt_str[len - 1] == target_newline)
5451 {
5452 /* Create a NUL-terminated string that's one char shorter
5453 than the original, stripping off the trailing '\n'. */
5454 char *newstr = XALLOCAVEC (char, len);
5455 memcpy (newstr, fmt_str, len - 1);
5456 newstr[len - 1] = 0;
5457 arg = build_string_literal (len, newstr);
5458 if (fn_puts)
5459 fn = build_call_expr (fn_puts, 1, arg);
5460 }
5461 else
5462 /* We'd like to arrange to call fputs(string,stdout) here,
5463 but we need stdout and don't have a way to get it yet. */
5464 return NULL_RTX;
5465 }
5466 }
5467
5468 if (!fn)
5469 return NULL_RTX;
5470 if (TREE_CODE (fn) == CALL_EXPR)
5471 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5472 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5473 }
5474
5475 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5476 Return NULL_RTX if a normal call should be emitted rather than transforming
5477 the function inline. If convenient, the result should be placed in
5478 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5479 call. */
5480 static rtx
5481 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5482 bool unlocked)
5483 {
5484 /* If we're using an unlocked function, assume the other unlocked
5485 functions exist explicitly. */
5486 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5487 : implicit_built_in_decls[BUILT_IN_FPUTC];
5488 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5489 : implicit_built_in_decls[BUILT_IN_FPUTS];
5490 const char *fmt_str;
5491 tree fn = 0;
5492 tree fmt, fp, arg;
5493 int nargs = call_expr_nargs (exp);
5494
5495 /* If the return value is used, don't do the transformation. */
5496 if (target != const0_rtx)
5497 return NULL_RTX;
5498
5499 /* Verify the required arguments in the original call. */
5500 if (nargs < 2)
5501 return NULL_RTX;
5502 fp = CALL_EXPR_ARG (exp, 0);
5503 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5504 return NULL_RTX;
5505 fmt = CALL_EXPR_ARG (exp, 1);
5506 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5507 return NULL_RTX;
5508
5509 /* Check whether the format is a literal string constant. */
5510 fmt_str = c_getstr (fmt);
5511 if (fmt_str == NULL)
5512 return NULL_RTX;
5513
5514 if (!init_target_chars ())
5515 return NULL_RTX;
5516
5517 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5518 if (strcmp (fmt_str, target_percent_s) == 0)
5519 {
5520 if ((nargs != 3)
5521 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5522 return NULL_RTX;
5523 arg = CALL_EXPR_ARG (exp, 2);
5524 if (fn_fputs)
5525 fn = build_call_expr (fn_fputs, 2, arg, fp);
5526 }
5527 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5528 else if (strcmp (fmt_str, target_percent_c) == 0)
5529 {
5530 if ((nargs != 3)
5531 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5532 return NULL_RTX;
5533 arg = CALL_EXPR_ARG (exp, 2);
5534 if (fn_fputc)
5535 fn = build_call_expr (fn_fputc, 2, arg, fp);
5536 }
5537 else
5538 {
5539 /* We can't handle anything else with % args or %% ... yet. */
5540 if (strchr (fmt_str, target_percent))
5541 return NULL_RTX;
5542
5543 if (nargs > 2)
5544 return NULL_RTX;
5545
5546 /* If the format specifier was "", fprintf does nothing. */
5547 if (fmt_str[0] == '\0')
5548 {
5549 /* Evaluate and ignore FILE* argument for side-effects. */
5550 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5551 return const0_rtx;
5552 }
5553
5554 /* When "string" doesn't contain %, replace all cases of
5555 fprintf(stream,string) with fputs(string,stream). The fputs
5556 builtin will take care of special cases like length == 1. */
5557 if (fn_fputs)
5558 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5559 }
5560
5561 if (!fn)
5562 return NULL_RTX;
5563 if (TREE_CODE (fn) == CALL_EXPR)
5564 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5565 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5566 }
5567
5568 /* Expand a call EXP to sprintf. Return NULL_RTX if
5569 a normal call should be emitted rather than expanding the function
5570 inline. If convenient, the result should be placed in TARGET with
5571 mode MODE. */
5572
5573 static rtx
5574 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5575 {
5576 tree dest, fmt;
5577 const char *fmt_str;
5578 int nargs = call_expr_nargs (exp);
5579
5580 /* Verify the required arguments in the original call. */
5581 if (nargs < 2)
5582 return NULL_RTX;
5583 dest = CALL_EXPR_ARG (exp, 0);
5584 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5585 return NULL_RTX;
5586 fmt = CALL_EXPR_ARG (exp, 0);
5587 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5588 return NULL_RTX;
5589
5590 /* Check whether the format is a literal string constant. */
5591 fmt_str = c_getstr (fmt);
5592 if (fmt_str == NULL)
5593 return NULL_RTX;
5594
5595 if (!init_target_chars ())
5596 return NULL_RTX;
5597
5598 /* If the format doesn't contain % args or %%, use strcpy. */
5599 if (strchr (fmt_str, target_percent) == 0)
5600 {
5601 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5602 tree exp;
5603
5604 if ((nargs > 2) || ! fn)
5605 return NULL_RTX;
5606 expand_expr (build_call_expr (fn, 2, dest, fmt),
5607 const0_rtx, VOIDmode, EXPAND_NORMAL);
5608 if (target == const0_rtx)
5609 return const0_rtx;
5610 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5611 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5612 }
5613 /* If the format is "%s", use strcpy if the result isn't used. */
5614 else if (strcmp (fmt_str, target_percent_s) == 0)
5615 {
5616 tree fn, arg, len;
5617 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5618
5619 if (! fn)
5620 return NULL_RTX;
5621 if (nargs != 3)
5622 return NULL_RTX;
5623 arg = CALL_EXPR_ARG (exp, 2);
5624 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5625 return NULL_RTX;
5626
5627 if (target != const0_rtx)
5628 {
5629 len = c_strlen (arg, 1);
5630 if (! len || TREE_CODE (len) != INTEGER_CST)
5631 return NULL_RTX;
5632 }
5633 else
5634 len = NULL_TREE;
5635
5636 expand_expr (build_call_expr (fn, 2, dest, arg),
5637 const0_rtx, VOIDmode, EXPAND_NORMAL);
5638
5639 if (target == const0_rtx)
5640 return const0_rtx;
5641 return expand_expr (len, target, mode, EXPAND_NORMAL);
5642 }
5643
5644 return NULL_RTX;
5645 }
5646
5647 /* Expand a call to either the entry or exit function profiler. */
5648
5649 static rtx
5650 expand_builtin_profile_func (bool exitp)
5651 {
5652 rtx this_rtx, which;
5653
5654 this_rtx = DECL_RTL (current_function_decl);
5655 gcc_assert (MEM_P (this_rtx));
5656 this_rtx = XEXP (this_rtx, 0);
5657
5658 if (exitp)
5659 which = profile_function_exit_libfunc;
5660 else
5661 which = profile_function_entry_libfunc;
5662
5663 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5664 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5665 0),
5666 Pmode);
5667
5668 return const0_rtx;
5669 }
5670
5671 /* Expand a call to __builtin___clear_cache. */
5672
5673 static rtx
5674 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5675 {
5676 #ifndef HAVE_clear_cache
5677 #ifdef CLEAR_INSN_CACHE
5678 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5679 does something. Just do the default expansion to a call to
5680 __clear_cache(). */
5681 return NULL_RTX;
5682 #else
5683 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5684 does nothing. There is no need to call it. Do nothing. */
5685 return const0_rtx;
5686 #endif /* CLEAR_INSN_CACHE */
5687 #else
5688 /* We have a "clear_cache" insn, and it will handle everything. */
5689 tree begin, end;
5690 rtx begin_rtx, end_rtx;
5691 enum insn_code icode;
5692
5693 /* We must not expand to a library call. If we did, any
5694 fallback library function in libgcc that might contain a call to
5695 __builtin___clear_cache() would recurse infinitely. */
5696 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5697 {
5698 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5699 return const0_rtx;
5700 }
5701
5702 if (HAVE_clear_cache)
5703 {
5704 icode = CODE_FOR_clear_cache;
5705
5706 begin = CALL_EXPR_ARG (exp, 0);
5707 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5708 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5709 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5710 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5711
5712 end = CALL_EXPR_ARG (exp, 1);
5713 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5714 end_rtx = convert_memory_address (Pmode, end_rtx);
5715 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5716 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5717
5718 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5719 }
5720 return const0_rtx;
5721 #endif /* HAVE_clear_cache */
5722 }
5723
5724 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5725
5726 static rtx
5727 round_trampoline_addr (rtx tramp)
5728 {
5729 rtx temp, addend, mask;
5730
5731 /* If we don't need too much alignment, we'll have been guaranteed
5732 proper alignment by get_trampoline_type. */
5733 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5734 return tramp;
5735
5736 /* Round address up to desired boundary. */
5737 temp = gen_reg_rtx (Pmode);
5738 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5739 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5740
5741 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5742 temp, 0, OPTAB_LIB_WIDEN);
5743 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5744 temp, 0, OPTAB_LIB_WIDEN);
5745
5746 return tramp;
5747 }
5748
5749 static rtx
5750 expand_builtin_init_trampoline (tree exp)
5751 {
5752 tree t_tramp, t_func, t_chain;
5753 rtx r_tramp, r_func, r_chain;
5754 #ifdef TRAMPOLINE_TEMPLATE
5755 rtx blktramp;
5756 #endif
5757
5758 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5759 POINTER_TYPE, VOID_TYPE))
5760 return NULL_RTX;
5761
5762 t_tramp = CALL_EXPR_ARG (exp, 0);
5763 t_func = CALL_EXPR_ARG (exp, 1);
5764 t_chain = CALL_EXPR_ARG (exp, 2);
5765
5766 r_tramp = expand_normal (t_tramp);
5767 r_func = expand_normal (t_func);
5768 r_chain = expand_normal (t_chain);
5769
5770 /* Generate insns to initialize the trampoline. */
5771 r_tramp = round_trampoline_addr (r_tramp);
5772 #ifdef TRAMPOLINE_TEMPLATE
5773 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5774 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5775 emit_block_move (blktramp, assemble_trampoline_template (),
5776 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5777 #endif
5778 trampolines_created = 1;
5779 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5780
5781 return const0_rtx;
5782 }
5783
5784 static rtx
5785 expand_builtin_adjust_trampoline (tree exp)
5786 {
5787 rtx tramp;
5788
5789 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5790 return NULL_RTX;
5791
5792 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5793 tramp = round_trampoline_addr (tramp);
5794 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5795 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5796 #endif
5797
5798 return tramp;
5799 }
5800
5801 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5802 function. The function first checks whether the back end provides
5803 an insn to implement signbit for the respective mode. If not, it
5804 checks whether the floating point format of the value is such that
5805 the sign bit can be extracted. If that is not the case, the
5806 function returns NULL_RTX to indicate that a normal call should be
5807 emitted rather than expanding the function in-line. EXP is the
5808 expression that is a call to the builtin function; if convenient,
5809 the result should be placed in TARGET. */
5810 static rtx
5811 expand_builtin_signbit (tree exp, rtx target)
5812 {
5813 const struct real_format *fmt;
5814 enum machine_mode fmode, imode, rmode;
5815 HOST_WIDE_INT hi, lo;
5816 tree arg;
5817 int word, bitpos;
5818 enum insn_code icode;
5819 rtx temp;
5820
5821 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5822 return NULL_RTX;
5823
5824 arg = CALL_EXPR_ARG (exp, 0);
5825 fmode = TYPE_MODE (TREE_TYPE (arg));
5826 rmode = TYPE_MODE (TREE_TYPE (exp));
5827 fmt = REAL_MODE_FORMAT (fmode);
5828
5829 arg = builtin_save_expr (arg);
5830
5831 /* Expand the argument yielding a RTX expression. */
5832 temp = expand_normal (arg);
5833
5834 /* Check if the back end provides an insn that handles signbit for the
5835 argument's mode. */
5836 icode = signbit_optab->handlers [(int) fmode].insn_code;
5837 if (icode != CODE_FOR_nothing)
5838 {
5839 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5840 emit_unop_insn (icode, target, temp, UNKNOWN);
5841 return target;
5842 }
5843
5844 /* For floating point formats without a sign bit, implement signbit
5845 as "ARG < 0.0". */
5846 bitpos = fmt->signbit_ro;
5847 if (bitpos < 0)
5848 {
5849 /* But we can't do this if the format supports signed zero. */
5850 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5851 return NULL_RTX;
5852
5853 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5854 build_real (TREE_TYPE (arg), dconst0));
5855 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5856 }
5857
5858 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5859 {
5860 imode = int_mode_for_mode (fmode);
5861 if (imode == BLKmode)
5862 return NULL_RTX;
5863 temp = gen_lowpart (imode, temp);
5864 }
5865 else
5866 {
5867 imode = word_mode;
5868 /* Handle targets with different FP word orders. */
5869 if (FLOAT_WORDS_BIG_ENDIAN)
5870 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5871 else
5872 word = bitpos / BITS_PER_WORD;
5873 temp = operand_subword_force (temp, word, fmode);
5874 bitpos = bitpos % BITS_PER_WORD;
5875 }
5876
5877 /* Force the intermediate word_mode (or narrower) result into a
5878 register. This avoids attempting to create paradoxical SUBREGs
5879 of floating point modes below. */
5880 temp = force_reg (imode, temp);
5881
5882 /* If the bitpos is within the "result mode" lowpart, the operation
5883 can be implement with a single bitwise AND. Otherwise, we need
5884 a right shift and an AND. */
5885
5886 if (bitpos < GET_MODE_BITSIZE (rmode))
5887 {
5888 if (bitpos < HOST_BITS_PER_WIDE_INT)
5889 {
5890 hi = 0;
5891 lo = (HOST_WIDE_INT) 1 << bitpos;
5892 }
5893 else
5894 {
5895 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5896 lo = 0;
5897 }
5898
5899 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5900 temp = gen_lowpart (rmode, temp);
5901 temp = expand_binop (rmode, and_optab, temp,
5902 immed_double_const (lo, hi, rmode),
5903 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5904 }
5905 else
5906 {
5907 /* Perform a logical right shift to place the signbit in the least
5908 significant bit, then truncate the result to the desired mode
5909 and mask just this bit. */
5910 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5911 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5912 temp = gen_lowpart (rmode, temp);
5913 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5914 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5915 }
5916
5917 return temp;
5918 }
5919
5920 /* Expand fork or exec calls. TARGET is the desired target of the
5921 call. EXP is the call. FN is the
5922 identificator of the actual function. IGNORE is nonzero if the
5923 value is to be ignored. */
5924
5925 static rtx
5926 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5927 {
5928 tree id, decl;
5929 tree call;
5930
5931 /* If we are not profiling, just call the function. */
5932 if (!profile_arc_flag)
5933 return NULL_RTX;
5934
5935 /* Otherwise call the wrapper. This should be equivalent for the rest of
5936 compiler, so the code does not diverge, and the wrapper may run the
5937 code necessary for keeping the profiling sane. */
5938
5939 switch (DECL_FUNCTION_CODE (fn))
5940 {
5941 case BUILT_IN_FORK:
5942 id = get_identifier ("__gcov_fork");
5943 break;
5944
5945 case BUILT_IN_EXECL:
5946 id = get_identifier ("__gcov_execl");
5947 break;
5948
5949 case BUILT_IN_EXECV:
5950 id = get_identifier ("__gcov_execv");
5951 break;
5952
5953 case BUILT_IN_EXECLP:
5954 id = get_identifier ("__gcov_execlp");
5955 break;
5956
5957 case BUILT_IN_EXECLE:
5958 id = get_identifier ("__gcov_execle");
5959 break;
5960
5961 case BUILT_IN_EXECVP:
5962 id = get_identifier ("__gcov_execvp");
5963 break;
5964
5965 case BUILT_IN_EXECVE:
5966 id = get_identifier ("__gcov_execve");
5967 break;
5968
5969 default:
5970 gcc_unreachable ();
5971 }
5972
5973 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5974 DECL_EXTERNAL (decl) = 1;
5975 TREE_PUBLIC (decl) = 1;
5976 DECL_ARTIFICIAL (decl) = 1;
5977 TREE_NOTHROW (decl) = 1;
5978 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5979 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5980 call = rewrite_call_expr (exp, 0, decl, 0);
5981 return expand_call (call, target, ignore);
5982 }
5983
5984
5985 \f
5986 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5987 the pointer in these functions is void*, the tree optimizers may remove
5988 casts. The mode computed in expand_builtin isn't reliable either, due
5989 to __sync_bool_compare_and_swap.
5990
5991 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5992 group of builtins. This gives us log2 of the mode size. */
5993
5994 static inline enum machine_mode
5995 get_builtin_sync_mode (int fcode_diff)
5996 {
5997 /* The size is not negotiable, so ask not to get BLKmode in return
5998 if the target indicates that a smaller size would be better. */
5999 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6000 }
6001
6002 /* Expand the memory expression LOC and return the appropriate memory operand
6003 for the builtin_sync operations. */
6004
6005 static rtx
6006 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6007 {
6008 rtx addr, mem;
6009
6010 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6011
6012 /* Note that we explicitly do not want any alias information for this
6013 memory, so that we kill all other live memories. Otherwise we don't
6014 satisfy the full barrier semantics of the intrinsic. */
6015 mem = validize_mem (gen_rtx_MEM (mode, addr));
6016
6017 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6018 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6019 MEM_VOLATILE_P (mem) = 1;
6020
6021 return mem;
6022 }
6023
6024 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6025 EXP is the CALL_EXPR. CODE is the rtx code
6026 that corresponds to the arithmetic or logical operation from the name;
6027 an exception here is that NOT actually means NAND. TARGET is an optional
6028 place for us to store the results; AFTER is true if this is the
6029 fetch_and_xxx form. IGNORE is true if we don't actually care about
6030 the result of the operation at all. */
6031
6032 static rtx
6033 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6034 enum rtx_code code, bool after,
6035 rtx target, bool ignore)
6036 {
6037 rtx val, mem;
6038 enum machine_mode old_mode;
6039
6040 if (code == NOT && warn_sync_nand)
6041 {
6042 tree fndecl = get_callee_fndecl (exp);
6043 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6044
6045 static bool warned_f_a_n, warned_n_a_f;
6046
6047 switch (fcode)
6048 {
6049 case BUILT_IN_FETCH_AND_NAND_1:
6050 case BUILT_IN_FETCH_AND_NAND_2:
6051 case BUILT_IN_FETCH_AND_NAND_4:
6052 case BUILT_IN_FETCH_AND_NAND_8:
6053 case BUILT_IN_FETCH_AND_NAND_16:
6054
6055 if (warned_f_a_n)
6056 break;
6057
6058 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6059 inform (input_location,
6060 "%qD changed semantics in GCC 4.4", fndecl);
6061 warned_f_a_n = true;
6062 break;
6063
6064 case BUILT_IN_NAND_AND_FETCH_1:
6065 case BUILT_IN_NAND_AND_FETCH_2:
6066 case BUILT_IN_NAND_AND_FETCH_4:
6067 case BUILT_IN_NAND_AND_FETCH_8:
6068 case BUILT_IN_NAND_AND_FETCH_16:
6069
6070 if (warned_n_a_f)
6071 break;
6072
6073 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6074 inform (input_location,
6075 "%qD changed semantics in GCC 4.4", fndecl);
6076 warned_n_a_f = true;
6077 break;
6078
6079 default:
6080 gcc_unreachable ();
6081 }
6082 }
6083
6084 /* Expand the operands. */
6085 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6086
6087 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6089 of CONST_INTs, where we know the old_mode only from the call argument. */
6090 old_mode = GET_MODE (val);
6091 if (old_mode == VOIDmode)
6092 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6093 val = convert_modes (mode, old_mode, val, 1);
6094
6095 if (ignore)
6096 return expand_sync_operation (mem, val, code);
6097 else
6098 return expand_sync_fetch_operation (mem, val, code, after, target);
6099 }
6100
6101 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6102 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6103 true if this is the boolean form. TARGET is a place for us to store the
6104 results; this is NOT optional if IS_BOOL is true. */
6105
6106 static rtx
6107 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6108 bool is_bool, rtx target)
6109 {
6110 rtx old_val, new_val, mem;
6111 enum machine_mode old_mode;
6112
6113 /* Expand the operands. */
6114 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6115
6116
6117 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6118 mode, EXPAND_NORMAL);
6119 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6120 of CONST_INTs, where we know the old_mode only from the call argument. */
6121 old_mode = GET_MODE (old_val);
6122 if (old_mode == VOIDmode)
6123 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6124 old_val = convert_modes (mode, old_mode, old_val, 1);
6125
6126 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6127 mode, EXPAND_NORMAL);
6128 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6129 of CONST_INTs, where we know the old_mode only from the call argument. */
6130 old_mode = GET_MODE (new_val);
6131 if (old_mode == VOIDmode)
6132 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6133 new_val = convert_modes (mode, old_mode, new_val, 1);
6134
6135 if (is_bool)
6136 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6137 else
6138 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6139 }
6140
6141 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6142 general form is actually an atomic exchange, and some targets only
6143 support a reduced form with the second argument being a constant 1.
6144 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6145 the results. */
6146
6147 static rtx
6148 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6149 rtx target)
6150 {
6151 rtx val, mem;
6152 enum machine_mode old_mode;
6153
6154 /* Expand the operands. */
6155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6156 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6157 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6158 of CONST_INTs, where we know the old_mode only from the call argument. */
6159 old_mode = GET_MODE (val);
6160 if (old_mode == VOIDmode)
6161 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6162 val = convert_modes (mode, old_mode, val, 1);
6163
6164 return expand_sync_lock_test_and_set (mem, val, target);
6165 }
6166
6167 /* Expand the __sync_synchronize intrinsic. */
6168
6169 static void
6170 expand_builtin_synchronize (void)
6171 {
6172 tree x;
6173
6174 #ifdef HAVE_memory_barrier
6175 if (HAVE_memory_barrier)
6176 {
6177 emit_insn (gen_memory_barrier ());
6178 return;
6179 }
6180 #endif
6181
6182 if (synchronize_libfunc != NULL_RTX)
6183 {
6184 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6185 return;
6186 }
6187
6188 /* If no explicit memory barrier instruction is available, create an
6189 empty asm stmt with a memory clobber. */
6190 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6191 tree_cons (NULL, build_string (6, "memory"), NULL));
6192 ASM_VOLATILE_P (x) = 1;
6193 expand_asm_expr (x);
6194 }
6195
6196 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6197
6198 static void
6199 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6200 {
6201 enum insn_code icode;
6202 rtx mem, insn;
6203 rtx val = const0_rtx;
6204
6205 /* Expand the operands. */
6206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6207
6208 /* If there is an explicit operation in the md file, use it. */
6209 icode = sync_lock_release[mode];
6210 if (icode != CODE_FOR_nothing)
6211 {
6212 if (!insn_data[icode].operand[1].predicate (val, mode))
6213 val = force_reg (mode, val);
6214
6215 insn = GEN_FCN (icode) (mem, val);
6216 if (insn)
6217 {
6218 emit_insn (insn);
6219 return;
6220 }
6221 }
6222
6223 /* Otherwise we can implement this operation by emitting a barrier
6224 followed by a store of zero. */
6225 expand_builtin_synchronize ();
6226 emit_move_insn (mem, val);
6227 }
6228 \f
6229 /* Expand an expression EXP that calls a built-in function,
6230 with result going to TARGET if that's convenient
6231 (and in mode MODE if that's convenient).
6232 SUBTARGET may be used as the target for computing one of EXP's operands.
6233 IGNORE is nonzero if the value is to be ignored. */
6234
6235 rtx
6236 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6237 int ignore)
6238 {
6239 tree fndecl = get_callee_fndecl (exp);
6240 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6241 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6242
6243 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6244 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6245
6246 /* When not optimizing, generate calls to library functions for a certain
6247 set of builtins. */
6248 if (!optimize
6249 && !called_as_built_in (fndecl)
6250 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6251 && fcode != BUILT_IN_ALLOCA
6252 && fcode != BUILT_IN_FREE)
6253 return expand_call (exp, target, ignore);
6254
6255 /* The built-in function expanders test for target == const0_rtx
6256 to determine whether the function's result will be ignored. */
6257 if (ignore)
6258 target = const0_rtx;
6259
6260 /* If the result of a pure or const built-in function is ignored, and
6261 none of its arguments are volatile, we can avoid expanding the
6262 built-in call and just evaluate the arguments for side-effects. */
6263 if (target == const0_rtx
6264 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6265 {
6266 bool volatilep = false;
6267 tree arg;
6268 call_expr_arg_iterator iter;
6269
6270 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6271 if (TREE_THIS_VOLATILE (arg))
6272 {
6273 volatilep = true;
6274 break;
6275 }
6276
6277 if (! volatilep)
6278 {
6279 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6280 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6281 return const0_rtx;
6282 }
6283 }
6284
6285 switch (fcode)
6286 {
6287 CASE_FLT_FN (BUILT_IN_FABS):
6288 target = expand_builtin_fabs (exp, target, subtarget);
6289 if (target)
6290 return target;
6291 break;
6292
6293 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6294 target = expand_builtin_copysign (exp, target, subtarget);
6295 if (target)
6296 return target;
6297 break;
6298
6299 /* Just do a normal library call if we were unable to fold
6300 the values. */
6301 CASE_FLT_FN (BUILT_IN_CABS):
6302 break;
6303
6304 CASE_FLT_FN (BUILT_IN_EXP):
6305 CASE_FLT_FN (BUILT_IN_EXP10):
6306 CASE_FLT_FN (BUILT_IN_POW10):
6307 CASE_FLT_FN (BUILT_IN_EXP2):
6308 CASE_FLT_FN (BUILT_IN_EXPM1):
6309 CASE_FLT_FN (BUILT_IN_LOGB):
6310 CASE_FLT_FN (BUILT_IN_LOG):
6311 CASE_FLT_FN (BUILT_IN_LOG10):
6312 CASE_FLT_FN (BUILT_IN_LOG2):
6313 CASE_FLT_FN (BUILT_IN_LOG1P):
6314 CASE_FLT_FN (BUILT_IN_TAN):
6315 CASE_FLT_FN (BUILT_IN_ASIN):
6316 CASE_FLT_FN (BUILT_IN_ACOS):
6317 CASE_FLT_FN (BUILT_IN_ATAN):
6318 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6319 because of possible accuracy problems. */
6320 if (! flag_unsafe_math_optimizations)
6321 break;
6322 CASE_FLT_FN (BUILT_IN_SQRT):
6323 CASE_FLT_FN (BUILT_IN_FLOOR):
6324 CASE_FLT_FN (BUILT_IN_CEIL):
6325 CASE_FLT_FN (BUILT_IN_TRUNC):
6326 CASE_FLT_FN (BUILT_IN_ROUND):
6327 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6328 CASE_FLT_FN (BUILT_IN_RINT):
6329 target = expand_builtin_mathfn (exp, target, subtarget);
6330 if (target)
6331 return target;
6332 break;
6333
6334 CASE_FLT_FN (BUILT_IN_ILOGB):
6335 if (! flag_unsafe_math_optimizations)
6336 break;
6337 CASE_FLT_FN (BUILT_IN_ISINF):
6338 CASE_FLT_FN (BUILT_IN_FINITE):
6339 case BUILT_IN_ISFINITE:
6340 case BUILT_IN_ISNORMAL:
6341 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6342 if (target)
6343 return target;
6344 break;
6345
6346 CASE_FLT_FN (BUILT_IN_LCEIL):
6347 CASE_FLT_FN (BUILT_IN_LLCEIL):
6348 CASE_FLT_FN (BUILT_IN_LFLOOR):
6349 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6350 target = expand_builtin_int_roundingfn (exp, target);
6351 if (target)
6352 return target;
6353 break;
6354
6355 CASE_FLT_FN (BUILT_IN_LRINT):
6356 CASE_FLT_FN (BUILT_IN_LLRINT):
6357 CASE_FLT_FN (BUILT_IN_LROUND):
6358 CASE_FLT_FN (BUILT_IN_LLROUND):
6359 target = expand_builtin_int_roundingfn_2 (exp, target);
6360 if (target)
6361 return target;
6362 break;
6363
6364 CASE_FLT_FN (BUILT_IN_POW):
6365 target = expand_builtin_pow (exp, target, subtarget);
6366 if (target)
6367 return target;
6368 break;
6369
6370 CASE_FLT_FN (BUILT_IN_POWI):
6371 target = expand_builtin_powi (exp, target, subtarget);
6372 if (target)
6373 return target;
6374 break;
6375
6376 CASE_FLT_FN (BUILT_IN_ATAN2):
6377 CASE_FLT_FN (BUILT_IN_LDEXP):
6378 CASE_FLT_FN (BUILT_IN_SCALB):
6379 CASE_FLT_FN (BUILT_IN_SCALBN):
6380 CASE_FLT_FN (BUILT_IN_SCALBLN):
6381 if (! flag_unsafe_math_optimizations)
6382 break;
6383
6384 CASE_FLT_FN (BUILT_IN_FMOD):
6385 CASE_FLT_FN (BUILT_IN_REMAINDER):
6386 CASE_FLT_FN (BUILT_IN_DREM):
6387 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6388 if (target)
6389 return target;
6390 break;
6391
6392 CASE_FLT_FN (BUILT_IN_CEXPI):
6393 target = expand_builtin_cexpi (exp, target, subtarget);
6394 gcc_assert (target);
6395 return target;
6396
6397 CASE_FLT_FN (BUILT_IN_SIN):
6398 CASE_FLT_FN (BUILT_IN_COS):
6399 if (! flag_unsafe_math_optimizations)
6400 break;
6401 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6402 if (target)
6403 return target;
6404 break;
6405
6406 CASE_FLT_FN (BUILT_IN_SINCOS):
6407 if (! flag_unsafe_math_optimizations)
6408 break;
6409 target = expand_builtin_sincos (exp);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_APPLY_ARGS:
6415 return expand_builtin_apply_args ();
6416
6417 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6418 FUNCTION with a copy of the parameters described by
6419 ARGUMENTS, and ARGSIZE. It returns a block of memory
6420 allocated on the stack into which is stored all the registers
6421 that might possibly be used for returning the result of a
6422 function. ARGUMENTS is the value returned by
6423 __builtin_apply_args. ARGSIZE is the number of bytes of
6424 arguments that must be copied. ??? How should this value be
6425 computed? We'll also need a safe worst case value for varargs
6426 functions. */
6427 case BUILT_IN_APPLY:
6428 if (!validate_arglist (exp, POINTER_TYPE,
6429 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6430 && !validate_arglist (exp, REFERENCE_TYPE,
6431 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6432 return const0_rtx;
6433 else
6434 {
6435 rtx ops[3];
6436
6437 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6438 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6439 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6440
6441 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6442 }
6443
6444 /* __builtin_return (RESULT) causes the function to return the
6445 value described by RESULT. RESULT is address of the block of
6446 memory returned by __builtin_apply. */
6447 case BUILT_IN_RETURN:
6448 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6449 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6450 return const0_rtx;
6451
6452 case BUILT_IN_SAVEREGS:
6453 return expand_builtin_saveregs ();
6454
6455 case BUILT_IN_ARGS_INFO:
6456 return expand_builtin_args_info (exp);
6457
6458 case BUILT_IN_VA_ARG_PACK:
6459 /* All valid uses of __builtin_va_arg_pack () are removed during
6460 inlining. */
6461 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6462 return const0_rtx;
6463
6464 case BUILT_IN_VA_ARG_PACK_LEN:
6465 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6466 inlining. */
6467 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6468 return const0_rtx;
6469
6470 /* Return the address of the first anonymous stack arg. */
6471 case BUILT_IN_NEXT_ARG:
6472 if (fold_builtin_next_arg (exp, false))
6473 return const0_rtx;
6474 return expand_builtin_next_arg ();
6475
6476 case BUILT_IN_CLEAR_CACHE:
6477 target = expand_builtin___clear_cache (exp);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_CLASSIFY_TYPE:
6483 return expand_builtin_classify_type (exp);
6484
6485 case BUILT_IN_CONSTANT_P:
6486 return const0_rtx;
6487
6488 case BUILT_IN_FRAME_ADDRESS:
6489 case BUILT_IN_RETURN_ADDRESS:
6490 return expand_builtin_frame_address (fndecl, exp);
6491
6492 /* Returns the address of the area where the structure is returned.
6493 0 otherwise. */
6494 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6495 if (call_expr_nargs (exp) != 0
6496 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6497 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6498 return const0_rtx;
6499 else
6500 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6501
6502 case BUILT_IN_ALLOCA:
6503 target = expand_builtin_alloca (exp, target);
6504 if (target)
6505 return target;
6506 break;
6507
6508 case BUILT_IN_STACK_SAVE:
6509 return expand_stack_save ();
6510
6511 case BUILT_IN_STACK_RESTORE:
6512 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6513 return const0_rtx;
6514
6515 case BUILT_IN_BSWAP32:
6516 case BUILT_IN_BSWAP64:
6517 target = expand_builtin_bswap (exp, target, subtarget);
6518
6519 if (target)
6520 return target;
6521 break;
6522
6523 CASE_INT_FN (BUILT_IN_FFS):
6524 case BUILT_IN_FFSIMAX:
6525 target = expand_builtin_unop (target_mode, exp, target,
6526 subtarget, ffs_optab);
6527 if (target)
6528 return target;
6529 break;
6530
6531 CASE_INT_FN (BUILT_IN_CLZ):
6532 case BUILT_IN_CLZIMAX:
6533 target = expand_builtin_unop (target_mode, exp, target,
6534 subtarget, clz_optab);
6535 if (target)
6536 return target;
6537 break;
6538
6539 CASE_INT_FN (BUILT_IN_CTZ):
6540 case BUILT_IN_CTZIMAX:
6541 target = expand_builtin_unop (target_mode, exp, target,
6542 subtarget, ctz_optab);
6543 if (target)
6544 return target;
6545 break;
6546
6547 CASE_INT_FN (BUILT_IN_POPCOUNT):
6548 case BUILT_IN_POPCOUNTIMAX:
6549 target = expand_builtin_unop (target_mode, exp, target,
6550 subtarget, popcount_optab);
6551 if (target)
6552 return target;
6553 break;
6554
6555 CASE_INT_FN (BUILT_IN_PARITY):
6556 case BUILT_IN_PARITYIMAX:
6557 target = expand_builtin_unop (target_mode, exp, target,
6558 subtarget, parity_optab);
6559 if (target)
6560 return target;
6561 break;
6562
6563 case BUILT_IN_STRLEN:
6564 target = expand_builtin_strlen (exp, target, target_mode);
6565 if (target)
6566 return target;
6567 break;
6568
6569 case BUILT_IN_STRCPY:
6570 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6571 if (target)
6572 return target;
6573 break;
6574
6575 case BUILT_IN_STRNCPY:
6576 target = expand_builtin_strncpy (exp, target, mode);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_STPCPY:
6582 target = expand_builtin_stpcpy (exp, target, mode);
6583 if (target)
6584 return target;
6585 break;
6586
6587 case BUILT_IN_STRCAT:
6588 target = expand_builtin_strcat (fndecl, exp, target, mode);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_STRNCAT:
6594 target = expand_builtin_strncat (exp, target, mode);
6595 if (target)
6596 return target;
6597 break;
6598
6599 case BUILT_IN_STRSPN:
6600 target = expand_builtin_strspn (exp, target, mode);
6601 if (target)
6602 return target;
6603 break;
6604
6605 case BUILT_IN_STRCSPN:
6606 target = expand_builtin_strcspn (exp, target, mode);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_STRSTR:
6612 target = expand_builtin_strstr (exp, target, mode);
6613 if (target)
6614 return target;
6615 break;
6616
6617 case BUILT_IN_STRPBRK:
6618 target = expand_builtin_strpbrk (exp, target, mode);
6619 if (target)
6620 return target;
6621 break;
6622
6623 case BUILT_IN_INDEX:
6624 case BUILT_IN_STRCHR:
6625 target = expand_builtin_strchr (exp, target, mode);
6626 if (target)
6627 return target;
6628 break;
6629
6630 case BUILT_IN_RINDEX:
6631 case BUILT_IN_STRRCHR:
6632 target = expand_builtin_strrchr (exp, target, mode);
6633 if (target)
6634 return target;
6635 break;
6636
6637 case BUILT_IN_MEMCPY:
6638 target = expand_builtin_memcpy (exp, target, mode);
6639 if (target)
6640 return target;
6641 break;
6642
6643 case BUILT_IN_MEMPCPY:
6644 target = expand_builtin_mempcpy (exp, target, mode);
6645 if (target)
6646 return target;
6647 break;
6648
6649 case BUILT_IN_MEMMOVE:
6650 target = expand_builtin_memmove (exp, target, mode, ignore);
6651 if (target)
6652 return target;
6653 break;
6654
6655 case BUILT_IN_BCOPY:
6656 target = expand_builtin_bcopy (exp, ignore);
6657 if (target)
6658 return target;
6659 break;
6660
6661 case BUILT_IN_MEMSET:
6662 target = expand_builtin_memset (exp, target, mode);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_BZERO:
6668 target = expand_builtin_bzero (exp);
6669 if (target)
6670 return target;
6671 break;
6672
6673 case BUILT_IN_STRCMP:
6674 target = expand_builtin_strcmp (exp, target, mode);
6675 if (target)
6676 return target;
6677 break;
6678
6679 case BUILT_IN_STRNCMP:
6680 target = expand_builtin_strncmp (exp, target, mode);
6681 if (target)
6682 return target;
6683 break;
6684
6685 case BUILT_IN_MEMCHR:
6686 target = expand_builtin_memchr (exp, target, mode);
6687 if (target)
6688 return target;
6689 break;
6690
6691 case BUILT_IN_BCMP:
6692 case BUILT_IN_MEMCMP:
6693 target = expand_builtin_memcmp (exp, target, mode);
6694 if (target)
6695 return target;
6696 break;
6697
6698 case BUILT_IN_SETJMP:
6699 /* This should have been lowered to the builtins below. */
6700 gcc_unreachable ();
6701
6702 case BUILT_IN_SETJMP_SETUP:
6703 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6704 and the receiver label. */
6705 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6706 {
6707 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6708 VOIDmode, EXPAND_NORMAL);
6709 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6710 rtx label_r = label_rtx (label);
6711
6712 /* This is copied from the handling of non-local gotos. */
6713 expand_builtin_setjmp_setup (buf_addr, label_r);
6714 nonlocal_goto_handler_labels
6715 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6716 nonlocal_goto_handler_labels);
6717 /* ??? Do not let expand_label treat us as such since we would
6718 not want to be both on the list of non-local labels and on
6719 the list of forced labels. */
6720 FORCED_LABEL (label) = 0;
6721 return const0_rtx;
6722 }
6723 break;
6724
6725 case BUILT_IN_SETJMP_DISPATCHER:
6726 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6727 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6728 {
6729 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6730 rtx label_r = label_rtx (label);
6731
6732 /* Remove the dispatcher label from the list of non-local labels
6733 since the receiver labels have been added to it above. */
6734 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6735 return const0_rtx;
6736 }
6737 break;
6738
6739 case BUILT_IN_SETJMP_RECEIVER:
6740 /* __builtin_setjmp_receiver is passed the receiver label. */
6741 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6742 {
6743 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6744 rtx label_r = label_rtx (label);
6745
6746 expand_builtin_setjmp_receiver (label_r);
6747 return const0_rtx;
6748 }
6749 break;
6750
6751 /* __builtin_longjmp is passed a pointer to an array of five words.
6752 It's similar to the C library longjmp function but works with
6753 __builtin_setjmp above. */
6754 case BUILT_IN_LONGJMP:
6755 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6756 {
6757 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6758 VOIDmode, EXPAND_NORMAL);
6759 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6760
6761 if (value != const1_rtx)
6762 {
6763 error ("%<__builtin_longjmp%> second argument must be 1");
6764 return const0_rtx;
6765 }
6766
6767 expand_builtin_longjmp (buf_addr, value);
6768 return const0_rtx;
6769 }
6770 break;
6771
6772 case BUILT_IN_NONLOCAL_GOTO:
6773 target = expand_builtin_nonlocal_goto (exp);
6774 if (target)
6775 return target;
6776 break;
6777
6778 /* This updates the setjmp buffer that is its argument with the value
6779 of the current stack pointer. */
6780 case BUILT_IN_UPDATE_SETJMP_BUF:
6781 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6782 {
6783 rtx buf_addr
6784 = expand_normal (CALL_EXPR_ARG (exp, 0));
6785
6786 expand_builtin_update_setjmp_buf (buf_addr);
6787 return const0_rtx;
6788 }
6789 break;
6790
6791 case BUILT_IN_TRAP:
6792 expand_builtin_trap ();
6793 return const0_rtx;
6794
6795 case BUILT_IN_PRINTF:
6796 target = expand_builtin_printf (exp, target, mode, false);
6797 if (target)
6798 return target;
6799 break;
6800
6801 case BUILT_IN_PRINTF_UNLOCKED:
6802 target = expand_builtin_printf (exp, target, mode, true);
6803 if (target)
6804 return target;
6805 break;
6806
6807 case BUILT_IN_FPUTS:
6808 target = expand_builtin_fputs (exp, target, false);
6809 if (target)
6810 return target;
6811 break;
6812 case BUILT_IN_FPUTS_UNLOCKED:
6813 target = expand_builtin_fputs (exp, target, true);
6814 if (target)
6815 return target;
6816 break;
6817
6818 case BUILT_IN_FPRINTF:
6819 target = expand_builtin_fprintf (exp, target, mode, false);
6820 if (target)
6821 return target;
6822 break;
6823
6824 case BUILT_IN_FPRINTF_UNLOCKED:
6825 target = expand_builtin_fprintf (exp, target, mode, true);
6826 if (target)
6827 return target;
6828 break;
6829
6830 case BUILT_IN_SPRINTF:
6831 target = expand_builtin_sprintf (exp, target, mode);
6832 if (target)
6833 return target;
6834 break;
6835
6836 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6837 case BUILT_IN_SIGNBITD32:
6838 case BUILT_IN_SIGNBITD64:
6839 case BUILT_IN_SIGNBITD128:
6840 target = expand_builtin_signbit (exp, target);
6841 if (target)
6842 return target;
6843 break;
6844
6845 /* Various hooks for the DWARF 2 __throw routine. */
6846 case BUILT_IN_UNWIND_INIT:
6847 expand_builtin_unwind_init ();
6848 return const0_rtx;
6849 case BUILT_IN_DWARF_CFA:
6850 return virtual_cfa_rtx;
6851 #ifdef DWARF2_UNWIND_INFO
6852 case BUILT_IN_DWARF_SP_COLUMN:
6853 return expand_builtin_dwarf_sp_column ();
6854 case BUILT_IN_INIT_DWARF_REG_SIZES:
6855 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6856 return const0_rtx;
6857 #endif
6858 case BUILT_IN_FROB_RETURN_ADDR:
6859 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6860 case BUILT_IN_EXTRACT_RETURN_ADDR:
6861 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6862 case BUILT_IN_EH_RETURN:
6863 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6864 CALL_EXPR_ARG (exp, 1));
6865 return const0_rtx;
6866 #ifdef EH_RETURN_DATA_REGNO
6867 case BUILT_IN_EH_RETURN_DATA_REGNO:
6868 return expand_builtin_eh_return_data_regno (exp);
6869 #endif
6870 case BUILT_IN_EXTEND_POINTER:
6871 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6872
6873 case BUILT_IN_VA_START:
6874 return expand_builtin_va_start (exp);
6875 case BUILT_IN_VA_END:
6876 return expand_builtin_va_end (exp);
6877 case BUILT_IN_VA_COPY:
6878 return expand_builtin_va_copy (exp);
6879 case BUILT_IN_EXPECT:
6880 return expand_builtin_expect (exp, target);
6881 case BUILT_IN_PREFETCH:
6882 expand_builtin_prefetch (exp);
6883 return const0_rtx;
6884
6885 case BUILT_IN_PROFILE_FUNC_ENTER:
6886 return expand_builtin_profile_func (false);
6887 case BUILT_IN_PROFILE_FUNC_EXIT:
6888 return expand_builtin_profile_func (true);
6889
6890 case BUILT_IN_INIT_TRAMPOLINE:
6891 return expand_builtin_init_trampoline (exp);
6892 case BUILT_IN_ADJUST_TRAMPOLINE:
6893 return expand_builtin_adjust_trampoline (exp);
6894
6895 case BUILT_IN_FORK:
6896 case BUILT_IN_EXECL:
6897 case BUILT_IN_EXECV:
6898 case BUILT_IN_EXECLP:
6899 case BUILT_IN_EXECLE:
6900 case BUILT_IN_EXECVP:
6901 case BUILT_IN_EXECVE:
6902 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6903 if (target)
6904 return target;
6905 break;
6906
6907 case BUILT_IN_FETCH_AND_ADD_1:
6908 case BUILT_IN_FETCH_AND_ADD_2:
6909 case BUILT_IN_FETCH_AND_ADD_4:
6910 case BUILT_IN_FETCH_AND_ADD_8:
6911 case BUILT_IN_FETCH_AND_ADD_16:
6912 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6913 target = expand_builtin_sync_operation (mode, exp, PLUS,
6914 false, target, ignore);
6915 if (target)
6916 return target;
6917 break;
6918
6919 case BUILT_IN_FETCH_AND_SUB_1:
6920 case BUILT_IN_FETCH_AND_SUB_2:
6921 case BUILT_IN_FETCH_AND_SUB_4:
6922 case BUILT_IN_FETCH_AND_SUB_8:
6923 case BUILT_IN_FETCH_AND_SUB_16:
6924 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6925 target = expand_builtin_sync_operation (mode, exp, MINUS,
6926 false, target, ignore);
6927 if (target)
6928 return target;
6929 break;
6930
6931 case BUILT_IN_FETCH_AND_OR_1:
6932 case BUILT_IN_FETCH_AND_OR_2:
6933 case BUILT_IN_FETCH_AND_OR_4:
6934 case BUILT_IN_FETCH_AND_OR_8:
6935 case BUILT_IN_FETCH_AND_OR_16:
6936 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6937 target = expand_builtin_sync_operation (mode, exp, IOR,
6938 false, target, ignore);
6939 if (target)
6940 return target;
6941 break;
6942
6943 case BUILT_IN_FETCH_AND_AND_1:
6944 case BUILT_IN_FETCH_AND_AND_2:
6945 case BUILT_IN_FETCH_AND_AND_4:
6946 case BUILT_IN_FETCH_AND_AND_8:
6947 case BUILT_IN_FETCH_AND_AND_16:
6948 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6949 target = expand_builtin_sync_operation (mode, exp, AND,
6950 false, target, ignore);
6951 if (target)
6952 return target;
6953 break;
6954
6955 case BUILT_IN_FETCH_AND_XOR_1:
6956 case BUILT_IN_FETCH_AND_XOR_2:
6957 case BUILT_IN_FETCH_AND_XOR_4:
6958 case BUILT_IN_FETCH_AND_XOR_8:
6959 case BUILT_IN_FETCH_AND_XOR_16:
6960 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6961 target = expand_builtin_sync_operation (mode, exp, XOR,
6962 false, target, ignore);
6963 if (target)
6964 return target;
6965 break;
6966
6967 case BUILT_IN_FETCH_AND_NAND_1:
6968 case BUILT_IN_FETCH_AND_NAND_2:
6969 case BUILT_IN_FETCH_AND_NAND_4:
6970 case BUILT_IN_FETCH_AND_NAND_8:
6971 case BUILT_IN_FETCH_AND_NAND_16:
6972 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6973 target = expand_builtin_sync_operation (mode, exp, NOT,
6974 false, target, ignore);
6975 if (target)
6976 return target;
6977 break;
6978
6979 case BUILT_IN_ADD_AND_FETCH_1:
6980 case BUILT_IN_ADD_AND_FETCH_2:
6981 case BUILT_IN_ADD_AND_FETCH_4:
6982 case BUILT_IN_ADD_AND_FETCH_8:
6983 case BUILT_IN_ADD_AND_FETCH_16:
6984 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6985 target = expand_builtin_sync_operation (mode, exp, PLUS,
6986 true, target, ignore);
6987 if (target)
6988 return target;
6989 break;
6990
6991 case BUILT_IN_SUB_AND_FETCH_1:
6992 case BUILT_IN_SUB_AND_FETCH_2:
6993 case BUILT_IN_SUB_AND_FETCH_4:
6994 case BUILT_IN_SUB_AND_FETCH_8:
6995 case BUILT_IN_SUB_AND_FETCH_16:
6996 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6997 target = expand_builtin_sync_operation (mode, exp, MINUS,
6998 true, target, ignore);
6999 if (target)
7000 return target;
7001 break;
7002
7003 case BUILT_IN_OR_AND_FETCH_1:
7004 case BUILT_IN_OR_AND_FETCH_2:
7005 case BUILT_IN_OR_AND_FETCH_4:
7006 case BUILT_IN_OR_AND_FETCH_8:
7007 case BUILT_IN_OR_AND_FETCH_16:
7008 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7009 target = expand_builtin_sync_operation (mode, exp, IOR,
7010 true, target, ignore);
7011 if (target)
7012 return target;
7013 break;
7014
7015 case BUILT_IN_AND_AND_FETCH_1:
7016 case BUILT_IN_AND_AND_FETCH_2:
7017 case BUILT_IN_AND_AND_FETCH_4:
7018 case BUILT_IN_AND_AND_FETCH_8:
7019 case BUILT_IN_AND_AND_FETCH_16:
7020 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7021 target = expand_builtin_sync_operation (mode, exp, AND,
7022 true, target, ignore);
7023 if (target)
7024 return target;
7025 break;
7026
7027 case BUILT_IN_XOR_AND_FETCH_1:
7028 case BUILT_IN_XOR_AND_FETCH_2:
7029 case BUILT_IN_XOR_AND_FETCH_4:
7030 case BUILT_IN_XOR_AND_FETCH_8:
7031 case BUILT_IN_XOR_AND_FETCH_16:
7032 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7033 target = expand_builtin_sync_operation (mode, exp, XOR,
7034 true, target, ignore);
7035 if (target)
7036 return target;
7037 break;
7038
7039 case BUILT_IN_NAND_AND_FETCH_1:
7040 case BUILT_IN_NAND_AND_FETCH_2:
7041 case BUILT_IN_NAND_AND_FETCH_4:
7042 case BUILT_IN_NAND_AND_FETCH_8:
7043 case BUILT_IN_NAND_AND_FETCH_16:
7044 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7045 target = expand_builtin_sync_operation (mode, exp, NOT,
7046 true, target, ignore);
7047 if (target)
7048 return target;
7049 break;
7050
7051 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7052 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7053 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7054 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7055 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7056 if (mode == VOIDmode)
7057 mode = TYPE_MODE (boolean_type_node);
7058 if (!target || !register_operand (target, mode))
7059 target = gen_reg_rtx (mode);
7060
7061 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7062 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7063 if (target)
7064 return target;
7065 break;
7066
7067 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7068 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7069 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7070 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7071 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7072 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7073 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7074 if (target)
7075 return target;
7076 break;
7077
7078 case BUILT_IN_LOCK_TEST_AND_SET_1:
7079 case BUILT_IN_LOCK_TEST_AND_SET_2:
7080 case BUILT_IN_LOCK_TEST_AND_SET_4:
7081 case BUILT_IN_LOCK_TEST_AND_SET_8:
7082 case BUILT_IN_LOCK_TEST_AND_SET_16:
7083 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7084 target = expand_builtin_lock_test_and_set (mode, exp, target);
7085 if (target)
7086 return target;
7087 break;
7088
7089 case BUILT_IN_LOCK_RELEASE_1:
7090 case BUILT_IN_LOCK_RELEASE_2:
7091 case BUILT_IN_LOCK_RELEASE_4:
7092 case BUILT_IN_LOCK_RELEASE_8:
7093 case BUILT_IN_LOCK_RELEASE_16:
7094 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7095 expand_builtin_lock_release (mode, exp);
7096 return const0_rtx;
7097
7098 case BUILT_IN_SYNCHRONIZE:
7099 expand_builtin_synchronize ();
7100 return const0_rtx;
7101
7102 case BUILT_IN_OBJECT_SIZE:
7103 return expand_builtin_object_size (exp);
7104
7105 case BUILT_IN_MEMCPY_CHK:
7106 case BUILT_IN_MEMPCPY_CHK:
7107 case BUILT_IN_MEMMOVE_CHK:
7108 case BUILT_IN_MEMSET_CHK:
7109 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7110 if (target)
7111 return target;
7112 break;
7113
7114 case BUILT_IN_STRCPY_CHK:
7115 case BUILT_IN_STPCPY_CHK:
7116 case BUILT_IN_STRNCPY_CHK:
7117 case BUILT_IN_STRCAT_CHK:
7118 case BUILT_IN_STRNCAT_CHK:
7119 case BUILT_IN_SNPRINTF_CHK:
7120 case BUILT_IN_VSNPRINTF_CHK:
7121 maybe_emit_chk_warning (exp, fcode);
7122 break;
7123
7124 case BUILT_IN_SPRINTF_CHK:
7125 case BUILT_IN_VSPRINTF_CHK:
7126 maybe_emit_sprintf_chk_warning (exp, fcode);
7127 break;
7128
7129 case BUILT_IN_FREE:
7130 maybe_emit_free_warning (exp);
7131 break;
7132
7133 default: /* just do library call, if unknown builtin */
7134 break;
7135 }
7136
7137 /* The switch statement above can drop through to cause the function
7138 to be called normally. */
7139 return expand_call (exp, target, ignore);
7140 }
7141
7142 /* Determine whether a tree node represents a call to a built-in
7143 function. If the tree T is a call to a built-in function with
7144 the right number of arguments of the appropriate types, return
7145 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7146 Otherwise the return value is END_BUILTINS. */
7147
7148 enum built_in_function
7149 builtin_mathfn_code (const_tree t)
7150 {
7151 const_tree fndecl, arg, parmlist;
7152 const_tree argtype, parmtype;
7153 const_call_expr_arg_iterator iter;
7154
7155 if (TREE_CODE (t) != CALL_EXPR
7156 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7157 return END_BUILTINS;
7158
7159 fndecl = get_callee_fndecl (t);
7160 if (fndecl == NULL_TREE
7161 || TREE_CODE (fndecl) != FUNCTION_DECL
7162 || ! DECL_BUILT_IN (fndecl)
7163 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7164 return END_BUILTINS;
7165
7166 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7167 init_const_call_expr_arg_iterator (t, &iter);
7168 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7169 {
7170 /* If a function doesn't take a variable number of arguments,
7171 the last element in the list will have type `void'. */
7172 parmtype = TREE_VALUE (parmlist);
7173 if (VOID_TYPE_P (parmtype))
7174 {
7175 if (more_const_call_expr_args_p (&iter))
7176 return END_BUILTINS;
7177 return DECL_FUNCTION_CODE (fndecl);
7178 }
7179
7180 if (! more_const_call_expr_args_p (&iter))
7181 return END_BUILTINS;
7182
7183 arg = next_const_call_expr_arg (&iter);
7184 argtype = TREE_TYPE (arg);
7185
7186 if (SCALAR_FLOAT_TYPE_P (parmtype))
7187 {
7188 if (! SCALAR_FLOAT_TYPE_P (argtype))
7189 return END_BUILTINS;
7190 }
7191 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7192 {
7193 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7194 return END_BUILTINS;
7195 }
7196 else if (POINTER_TYPE_P (parmtype))
7197 {
7198 if (! POINTER_TYPE_P (argtype))
7199 return END_BUILTINS;
7200 }
7201 else if (INTEGRAL_TYPE_P (parmtype))
7202 {
7203 if (! INTEGRAL_TYPE_P (argtype))
7204 return END_BUILTINS;
7205 }
7206 else
7207 return END_BUILTINS;
7208 }
7209
7210 /* Variable-length argument list. */
7211 return DECL_FUNCTION_CODE (fndecl);
7212 }
7213
7214 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7215 evaluate to a constant. */
7216
7217 static tree
7218 fold_builtin_constant_p (tree arg)
7219 {
7220 /* We return 1 for a numeric type that's known to be a constant
7221 value at compile-time or for an aggregate type that's a
7222 literal constant. */
7223 STRIP_NOPS (arg);
7224
7225 /* If we know this is a constant, emit the constant of one. */
7226 if (CONSTANT_CLASS_P (arg)
7227 || (TREE_CODE (arg) == CONSTRUCTOR
7228 && TREE_CONSTANT (arg)))
7229 return integer_one_node;
7230 if (TREE_CODE (arg) == ADDR_EXPR)
7231 {
7232 tree op = TREE_OPERAND (arg, 0);
7233 if (TREE_CODE (op) == STRING_CST
7234 || (TREE_CODE (op) == ARRAY_REF
7235 && integer_zerop (TREE_OPERAND (op, 1))
7236 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7237 return integer_one_node;
7238 }
7239
7240 /* If this expression has side effects, show we don't know it to be a
7241 constant. Likewise if it's a pointer or aggregate type since in
7242 those case we only want literals, since those are only optimized
7243 when generating RTL, not later.
7244 And finally, if we are compiling an initializer, not code, we
7245 need to return a definite result now; there's not going to be any
7246 more optimization done. */
7247 if (TREE_SIDE_EFFECTS (arg)
7248 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7249 || POINTER_TYPE_P (TREE_TYPE (arg))
7250 || cfun == 0
7251 || folding_initializer)
7252 return integer_zero_node;
7253
7254 return NULL_TREE;
7255 }
7256
7257 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7258 return it as a truthvalue. */
7259
7260 static tree
7261 build_builtin_expect_predicate (tree pred, tree expected)
7262 {
7263 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7264
7265 fn = built_in_decls[BUILT_IN_EXPECT];
7266 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7267 ret_type = TREE_TYPE (TREE_TYPE (fn));
7268 pred_type = TREE_VALUE (arg_types);
7269 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7270
7271 pred = fold_convert (pred_type, pred);
7272 expected = fold_convert (expected_type, expected);
7273 call_expr = build_call_expr (fn, 2, pred, expected);
7274
7275 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7276 build_int_cst (ret_type, 0));
7277 }
7278
7279 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7280 NULL_TREE if no simplification is possible. */
7281
7282 static tree
7283 fold_builtin_expect (tree arg0, tree arg1)
7284 {
7285 tree inner, fndecl;
7286 enum tree_code code;
7287
7288 /* If this is a builtin_expect within a builtin_expect keep the
7289 inner one. See through a comparison against a constant. It
7290 might have been added to create a thruthvalue. */
7291 inner = arg0;
7292 if (COMPARISON_CLASS_P (inner)
7293 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7294 inner = TREE_OPERAND (inner, 0);
7295
7296 if (TREE_CODE (inner) == CALL_EXPR
7297 && (fndecl = get_callee_fndecl (inner))
7298 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7299 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7300 return arg0;
7301
7302 /* Distribute the expected value over short-circuiting operators.
7303 See through the cast from truthvalue_type_node to long. */
7304 inner = arg0;
7305 while (TREE_CODE (inner) == NOP_EXPR
7306 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7307 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7308 inner = TREE_OPERAND (inner, 0);
7309
7310 code = TREE_CODE (inner);
7311 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7312 {
7313 tree op0 = TREE_OPERAND (inner, 0);
7314 tree op1 = TREE_OPERAND (inner, 1);
7315
7316 op0 = build_builtin_expect_predicate (op0, arg1);
7317 op1 = build_builtin_expect_predicate (op1, arg1);
7318 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7319
7320 return fold_convert (TREE_TYPE (arg0), inner);
7321 }
7322
7323 /* If the argument isn't invariant then there's nothing else we can do. */
7324 if (!TREE_CONSTANT (arg0))
7325 return NULL_TREE;
7326
7327 /* If we expect that a comparison against the argument will fold to
7328 a constant return the constant. In practice, this means a true
7329 constant or the address of a non-weak symbol. */
7330 inner = arg0;
7331 STRIP_NOPS (inner);
7332 if (TREE_CODE (inner) == ADDR_EXPR)
7333 {
7334 do
7335 {
7336 inner = TREE_OPERAND (inner, 0);
7337 }
7338 while (TREE_CODE (inner) == COMPONENT_REF
7339 || TREE_CODE (inner) == ARRAY_REF);
7340 if ((TREE_CODE (inner) == VAR_DECL
7341 || TREE_CODE (inner) == FUNCTION_DECL)
7342 && DECL_WEAK (inner))
7343 return NULL_TREE;
7344 }
7345
7346 /* Otherwise, ARG0 already has the proper type for the return value. */
7347 return arg0;
7348 }
7349
7350 /* Fold a call to __builtin_classify_type with argument ARG. */
7351
7352 static tree
7353 fold_builtin_classify_type (tree arg)
7354 {
7355 if (arg == 0)
7356 return build_int_cst (NULL_TREE, no_type_class);
7357
7358 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7359 }
7360
7361 /* Fold a call to __builtin_strlen with argument ARG. */
7362
7363 static tree
7364 fold_builtin_strlen (tree arg)
7365 {
7366 if (!validate_arg (arg, POINTER_TYPE))
7367 return NULL_TREE;
7368 else
7369 {
7370 tree len = c_strlen (arg, 0);
7371
7372 if (len)
7373 {
7374 /* Convert from the internal "sizetype" type to "size_t". */
7375 if (size_type_node)
7376 len = fold_convert (size_type_node, len);
7377 return len;
7378 }
7379
7380 return NULL_TREE;
7381 }
7382 }
7383
7384 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7385
7386 static tree
7387 fold_builtin_inf (tree type, int warn)
7388 {
7389 REAL_VALUE_TYPE real;
7390
7391 /* __builtin_inff is intended to be usable to define INFINITY on all
7392 targets. If an infinity is not available, INFINITY expands "to a
7393 positive constant of type float that overflows at translation
7394 time", footnote "In this case, using INFINITY will violate the
7395 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7396 Thus we pedwarn to ensure this constraint violation is
7397 diagnosed. */
7398 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7399 pedwarn (input_location, 0, "target format does not support infinity");
7400
7401 real_inf (&real);
7402 return build_real (type, real);
7403 }
7404
7405 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7406
7407 static tree
7408 fold_builtin_nan (tree arg, tree type, int quiet)
7409 {
7410 REAL_VALUE_TYPE real;
7411 const char *str;
7412
7413 if (!validate_arg (arg, POINTER_TYPE))
7414 return NULL_TREE;
7415 str = c_getstr (arg);
7416 if (!str)
7417 return NULL_TREE;
7418
7419 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7420 return NULL_TREE;
7421
7422 return build_real (type, real);
7423 }
7424
7425 /* Return true if the floating point expression T has an integer value.
7426 We also allow +Inf, -Inf and NaN to be considered integer values. */
7427
7428 static bool
7429 integer_valued_real_p (tree t)
7430 {
7431 switch (TREE_CODE (t))
7432 {
7433 case FLOAT_EXPR:
7434 return true;
7435
7436 case ABS_EXPR:
7437 case SAVE_EXPR:
7438 return integer_valued_real_p (TREE_OPERAND (t, 0));
7439
7440 case COMPOUND_EXPR:
7441 case MODIFY_EXPR:
7442 case BIND_EXPR:
7443 return integer_valued_real_p (TREE_OPERAND (t, 1));
7444
7445 case PLUS_EXPR:
7446 case MINUS_EXPR:
7447 case MULT_EXPR:
7448 case MIN_EXPR:
7449 case MAX_EXPR:
7450 return integer_valued_real_p (TREE_OPERAND (t, 0))
7451 && integer_valued_real_p (TREE_OPERAND (t, 1));
7452
7453 case COND_EXPR:
7454 return integer_valued_real_p (TREE_OPERAND (t, 1))
7455 && integer_valued_real_p (TREE_OPERAND (t, 2));
7456
7457 case REAL_CST:
7458 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7459
7460 case NOP_EXPR:
7461 {
7462 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7463 if (TREE_CODE (type) == INTEGER_TYPE)
7464 return true;
7465 if (TREE_CODE (type) == REAL_TYPE)
7466 return integer_valued_real_p (TREE_OPERAND (t, 0));
7467 break;
7468 }
7469
7470 case CALL_EXPR:
7471 switch (builtin_mathfn_code (t))
7472 {
7473 CASE_FLT_FN (BUILT_IN_CEIL):
7474 CASE_FLT_FN (BUILT_IN_FLOOR):
7475 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7476 CASE_FLT_FN (BUILT_IN_RINT):
7477 CASE_FLT_FN (BUILT_IN_ROUND):
7478 CASE_FLT_FN (BUILT_IN_TRUNC):
7479 return true;
7480
7481 CASE_FLT_FN (BUILT_IN_FMIN):
7482 CASE_FLT_FN (BUILT_IN_FMAX):
7483 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7484 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7485
7486 default:
7487 break;
7488 }
7489 break;
7490
7491 default:
7492 break;
7493 }
7494 return false;
7495 }
7496
7497 /* FNDECL is assumed to be a builtin where truncation can be propagated
7498 across (for instance floor((double)f) == (double)floorf (f).
7499 Do the transformation for a call with argument ARG. */
7500
7501 static tree
7502 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7503 {
7504 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7505
7506 if (!validate_arg (arg, REAL_TYPE))
7507 return NULL_TREE;
7508
7509 /* Integer rounding functions are idempotent. */
7510 if (fcode == builtin_mathfn_code (arg))
7511 return arg;
7512
7513 /* If argument is already integer valued, and we don't need to worry
7514 about setting errno, there's no need to perform rounding. */
7515 if (! flag_errno_math && integer_valued_real_p (arg))
7516 return arg;
7517
7518 if (optimize)
7519 {
7520 tree arg0 = strip_float_extensions (arg);
7521 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7522 tree newtype = TREE_TYPE (arg0);
7523 tree decl;
7524
7525 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7526 && (decl = mathfn_built_in (newtype, fcode)))
7527 return fold_convert (ftype,
7528 build_call_expr (decl, 1,
7529 fold_convert (newtype, arg0)));
7530 }
7531 return NULL_TREE;
7532 }
7533
7534 /* FNDECL is assumed to be builtin which can narrow the FP type of
7535 the argument, for instance lround((double)f) -> lroundf (f).
7536 Do the transformation for a call with argument ARG. */
7537
7538 static tree
7539 fold_fixed_mathfn (tree fndecl, tree arg)
7540 {
7541 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7542
7543 if (!validate_arg (arg, REAL_TYPE))
7544 return NULL_TREE;
7545
7546 /* If argument is already integer valued, and we don't need to worry
7547 about setting errno, there's no need to perform rounding. */
7548 if (! flag_errno_math && integer_valued_real_p (arg))
7549 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7550
7551 if (optimize)
7552 {
7553 tree ftype = TREE_TYPE (arg);
7554 tree arg0 = strip_float_extensions (arg);
7555 tree newtype = TREE_TYPE (arg0);
7556 tree decl;
7557
7558 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7559 && (decl = mathfn_built_in (newtype, fcode)))
7560 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7561 }
7562
7563 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7564 sizeof (long long) == sizeof (long). */
7565 if (TYPE_PRECISION (long_long_integer_type_node)
7566 == TYPE_PRECISION (long_integer_type_node))
7567 {
7568 tree newfn = NULL_TREE;
7569 switch (fcode)
7570 {
7571 CASE_FLT_FN (BUILT_IN_LLCEIL):
7572 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7573 break;
7574
7575 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7576 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7577 break;
7578
7579 CASE_FLT_FN (BUILT_IN_LLROUND):
7580 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7581 break;
7582
7583 CASE_FLT_FN (BUILT_IN_LLRINT):
7584 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7585 break;
7586
7587 default:
7588 break;
7589 }
7590
7591 if (newfn)
7592 {
7593 tree newcall = build_call_expr(newfn, 1, arg);
7594 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7595 }
7596 }
7597
7598 return NULL_TREE;
7599 }
7600
7601 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7602 return type. Return NULL_TREE if no simplification can be made. */
7603
7604 static tree
7605 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7606 {
7607 tree res;
7608
7609 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7610 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7611 return NULL_TREE;
7612
7613 /* Calculate the result when the argument is a constant. */
7614 if (TREE_CODE (arg) == COMPLEX_CST
7615 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7616 type, mpfr_hypot)))
7617 return res;
7618
7619 if (TREE_CODE (arg) == COMPLEX_EXPR)
7620 {
7621 tree real = TREE_OPERAND (arg, 0);
7622 tree imag = TREE_OPERAND (arg, 1);
7623
7624 /* If either part is zero, cabs is fabs of the other. */
7625 if (real_zerop (real))
7626 return fold_build1 (ABS_EXPR, type, imag);
7627 if (real_zerop (imag))
7628 return fold_build1 (ABS_EXPR, type, real);
7629
7630 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7631 if (flag_unsafe_math_optimizations
7632 && operand_equal_p (real, imag, OEP_PURE_SAME))
7633 {
7634 const REAL_VALUE_TYPE sqrt2_trunc
7635 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7636 STRIP_NOPS (real);
7637 return fold_build2 (MULT_EXPR, type,
7638 fold_build1 (ABS_EXPR, type, real),
7639 build_real (type, sqrt2_trunc));
7640 }
7641 }
7642
7643 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7644 if (TREE_CODE (arg) == NEGATE_EXPR
7645 || TREE_CODE (arg) == CONJ_EXPR)
7646 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7647
7648 /* Don't do this when optimizing for size. */
7649 if (flag_unsafe_math_optimizations
7650 && optimize && optimize_function_for_speed_p (cfun))
7651 {
7652 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7653
7654 if (sqrtfn != NULL_TREE)
7655 {
7656 tree rpart, ipart, result;
7657
7658 arg = builtin_save_expr (arg);
7659
7660 rpart = fold_build1 (REALPART_EXPR, type, arg);
7661 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7662
7663 rpart = builtin_save_expr (rpart);
7664 ipart = builtin_save_expr (ipart);
7665
7666 result = fold_build2 (PLUS_EXPR, type,
7667 fold_build2 (MULT_EXPR, type,
7668 rpart, rpart),
7669 fold_build2 (MULT_EXPR, type,
7670 ipart, ipart));
7671
7672 return build_call_expr (sqrtfn, 1, result);
7673 }
7674 }
7675
7676 return NULL_TREE;
7677 }
7678
7679 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7680 Return NULL_TREE if no simplification can be made. */
7681
7682 static tree
7683 fold_builtin_sqrt (tree arg, tree type)
7684 {
7685
7686 enum built_in_function fcode;
7687 tree res;
7688
7689 if (!validate_arg (arg, REAL_TYPE))
7690 return NULL_TREE;
7691
7692 /* Calculate the result when the argument is a constant. */
7693 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7694 return res;
7695
7696 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7697 fcode = builtin_mathfn_code (arg);
7698 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7699 {
7700 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7701 arg = fold_build2 (MULT_EXPR, type,
7702 CALL_EXPR_ARG (arg, 0),
7703 build_real (type, dconsthalf));
7704 return build_call_expr (expfn, 1, arg);
7705 }
7706
7707 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7708 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7709 {
7710 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7711
7712 if (powfn)
7713 {
7714 tree arg0 = CALL_EXPR_ARG (arg, 0);
7715 tree tree_root;
7716 /* The inner root was either sqrt or cbrt. */
7717 /* This was a conditional expression but it triggered a bug
7718 in Sun C 5.5. */
7719 REAL_VALUE_TYPE dconstroot;
7720 if (BUILTIN_SQRT_P (fcode))
7721 dconstroot = dconsthalf;
7722 else
7723 dconstroot = dconst_third ();
7724
7725 /* Adjust for the outer root. */
7726 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7727 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7728 tree_root = build_real (type, dconstroot);
7729 return build_call_expr (powfn, 2, arg0, tree_root);
7730 }
7731 }
7732
7733 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7734 if (flag_unsafe_math_optimizations
7735 && (fcode == BUILT_IN_POW
7736 || fcode == BUILT_IN_POWF
7737 || fcode == BUILT_IN_POWL))
7738 {
7739 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7740 tree arg0 = CALL_EXPR_ARG (arg, 0);
7741 tree arg1 = CALL_EXPR_ARG (arg, 1);
7742 tree narg1;
7743 if (!tree_expr_nonnegative_p (arg0))
7744 arg0 = build1 (ABS_EXPR, type, arg0);
7745 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7746 build_real (type, dconsthalf));
7747 return build_call_expr (powfn, 2, arg0, narg1);
7748 }
7749
7750 return NULL_TREE;
7751 }
7752
7753 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7754 Return NULL_TREE if no simplification can be made. */
7755
7756 static tree
7757 fold_builtin_cbrt (tree arg, tree type)
7758 {
7759 const enum built_in_function fcode = builtin_mathfn_code (arg);
7760 tree res;
7761
7762 if (!validate_arg (arg, REAL_TYPE))
7763 return NULL_TREE;
7764
7765 /* Calculate the result when the argument is a constant. */
7766 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7767 return res;
7768
7769 if (flag_unsafe_math_optimizations)
7770 {
7771 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7772 if (BUILTIN_EXPONENT_P (fcode))
7773 {
7774 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7775 const REAL_VALUE_TYPE third_trunc =
7776 real_value_truncate (TYPE_MODE (type), dconst_third ());
7777 arg = fold_build2 (MULT_EXPR, type,
7778 CALL_EXPR_ARG (arg, 0),
7779 build_real (type, third_trunc));
7780 return build_call_expr (expfn, 1, arg);
7781 }
7782
7783 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7784 if (BUILTIN_SQRT_P (fcode))
7785 {
7786 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7787
7788 if (powfn)
7789 {
7790 tree arg0 = CALL_EXPR_ARG (arg, 0);
7791 tree tree_root;
7792 REAL_VALUE_TYPE dconstroot = dconst_third ();
7793
7794 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7795 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7796 tree_root = build_real (type, dconstroot);
7797 return build_call_expr (powfn, 2, arg0, tree_root);
7798 }
7799 }
7800
7801 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7802 if (BUILTIN_CBRT_P (fcode))
7803 {
7804 tree arg0 = CALL_EXPR_ARG (arg, 0);
7805 if (tree_expr_nonnegative_p (arg0))
7806 {
7807 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7808
7809 if (powfn)
7810 {
7811 tree tree_root;
7812 REAL_VALUE_TYPE dconstroot;
7813
7814 real_arithmetic (&dconstroot, MULT_EXPR,
7815 dconst_third_ptr (), dconst_third_ptr ());
7816 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7817 tree_root = build_real (type, dconstroot);
7818 return build_call_expr (powfn, 2, arg0, tree_root);
7819 }
7820 }
7821 }
7822
7823 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7824 if (fcode == BUILT_IN_POW
7825 || fcode == BUILT_IN_POWF
7826 || fcode == BUILT_IN_POWL)
7827 {
7828 tree arg00 = CALL_EXPR_ARG (arg, 0);
7829 tree arg01 = CALL_EXPR_ARG (arg, 1);
7830 if (tree_expr_nonnegative_p (arg00))
7831 {
7832 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7833 const REAL_VALUE_TYPE dconstroot
7834 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7835 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7836 build_real (type, dconstroot));
7837 return build_call_expr (powfn, 2, arg00, narg01);
7838 }
7839 }
7840 }
7841 return NULL_TREE;
7842 }
7843
7844 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7845 TYPE is the type of the return value. Return NULL_TREE if no
7846 simplification can be made. */
7847
7848 static tree
7849 fold_builtin_cos (tree arg, tree type, tree fndecl)
7850 {
7851 tree res, narg;
7852
7853 if (!validate_arg (arg, REAL_TYPE))
7854 return NULL_TREE;
7855
7856 /* Calculate the result when the argument is a constant. */
7857 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7858 return res;
7859
7860 /* Optimize cos(-x) into cos (x). */
7861 if ((narg = fold_strip_sign_ops (arg)))
7862 return build_call_expr (fndecl, 1, narg);
7863
7864 return NULL_TREE;
7865 }
7866
7867 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7869
7870 static tree
7871 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7872 {
7873 if (validate_arg (arg, REAL_TYPE))
7874 {
7875 tree res, narg;
7876
7877 /* Calculate the result when the argument is a constant. */
7878 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7879 return res;
7880
7881 /* Optimize cosh(-x) into cosh (x). */
7882 if ((narg = fold_strip_sign_ops (arg)))
7883 return build_call_expr (fndecl, 1, narg);
7884 }
7885
7886 return NULL_TREE;
7887 }
7888
7889 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7890 Return NULL_TREE if no simplification can be made. */
7891
7892 static tree
7893 fold_builtin_tan (tree arg, tree type)
7894 {
7895 enum built_in_function fcode;
7896 tree res;
7897
7898 if (!validate_arg (arg, REAL_TYPE))
7899 return NULL_TREE;
7900
7901 /* Calculate the result when the argument is a constant. */
7902 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7903 return res;
7904
7905 /* Optimize tan(atan(x)) = x. */
7906 fcode = builtin_mathfn_code (arg);
7907 if (flag_unsafe_math_optimizations
7908 && (fcode == BUILT_IN_ATAN
7909 || fcode == BUILT_IN_ATANF
7910 || fcode == BUILT_IN_ATANL))
7911 return CALL_EXPR_ARG (arg, 0);
7912
7913 return NULL_TREE;
7914 }
7915
7916 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7917 NULL_TREE if no simplification can be made. */
7918
7919 static tree
7920 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7921 {
7922 tree type;
7923 tree res, fn, call;
7924
7925 if (!validate_arg (arg0, REAL_TYPE)
7926 || !validate_arg (arg1, POINTER_TYPE)
7927 || !validate_arg (arg2, POINTER_TYPE))
7928 return NULL_TREE;
7929
7930 type = TREE_TYPE (arg0);
7931
7932 /* Calculate the result when the argument is a constant. */
7933 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7934 return res;
7935
7936 /* Canonicalize sincos to cexpi. */
7937 if (!TARGET_C99_FUNCTIONS)
7938 return NULL_TREE;
7939 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7940 if (!fn)
7941 return NULL_TREE;
7942
7943 call = build_call_expr (fn, 1, arg0);
7944 call = builtin_save_expr (call);
7945
7946 return build2 (COMPOUND_EXPR, void_type_node,
7947 build2 (MODIFY_EXPR, void_type_node,
7948 build_fold_indirect_ref (arg1),
7949 build1 (IMAGPART_EXPR, type, call)),
7950 build2 (MODIFY_EXPR, void_type_node,
7951 build_fold_indirect_ref (arg2),
7952 build1 (REALPART_EXPR, type, call)));
7953 }
7954
7955 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7956 NULL_TREE if no simplification can be made. */
7957
7958 static tree
7959 fold_builtin_cexp (tree arg0, tree type)
7960 {
7961 tree rtype;
7962 tree realp, imagp, ifn;
7963
7964 if (!validate_arg (arg0, COMPLEX_TYPE))
7965 return NULL_TREE;
7966
7967 rtype = TREE_TYPE (TREE_TYPE (arg0));
7968
7969 /* In case we can figure out the real part of arg0 and it is constant zero
7970 fold to cexpi. */
7971 if (!TARGET_C99_FUNCTIONS)
7972 return NULL_TREE;
7973 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7974 if (!ifn)
7975 return NULL_TREE;
7976
7977 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7978 && real_zerop (realp))
7979 {
7980 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7981 return build_call_expr (ifn, 1, narg);
7982 }
7983
7984 /* In case we can easily decompose real and imaginary parts split cexp
7985 to exp (r) * cexpi (i). */
7986 if (flag_unsafe_math_optimizations
7987 && realp)
7988 {
7989 tree rfn, rcall, icall;
7990
7991 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7992 if (!rfn)
7993 return NULL_TREE;
7994
7995 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7996 if (!imagp)
7997 return NULL_TREE;
7998
7999 icall = build_call_expr (ifn, 1, imagp);
8000 icall = builtin_save_expr (icall);
8001 rcall = build_call_expr (rfn, 1, realp);
8002 rcall = builtin_save_expr (rcall);
8003 return fold_build2 (COMPLEX_EXPR, type,
8004 fold_build2 (MULT_EXPR, rtype,
8005 rcall,
8006 fold_build1 (REALPART_EXPR, rtype, icall)),
8007 fold_build2 (MULT_EXPR, rtype,
8008 rcall,
8009 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8010 }
8011
8012 return NULL_TREE;
8013 }
8014
8015 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8016 Return NULL_TREE if no simplification can be made. */
8017
8018 static tree
8019 fold_builtin_trunc (tree fndecl, tree arg)
8020 {
8021 if (!validate_arg (arg, REAL_TYPE))
8022 return NULL_TREE;
8023
8024 /* Optimize trunc of constant value. */
8025 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8026 {
8027 REAL_VALUE_TYPE r, x;
8028 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8029
8030 x = TREE_REAL_CST (arg);
8031 real_trunc (&r, TYPE_MODE (type), &x);
8032 return build_real (type, r);
8033 }
8034
8035 return fold_trunc_transparent_mathfn (fndecl, arg);
8036 }
8037
8038 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8039 Return NULL_TREE if no simplification can be made. */
8040
8041 static tree
8042 fold_builtin_floor (tree fndecl, tree arg)
8043 {
8044 if (!validate_arg (arg, REAL_TYPE))
8045 return NULL_TREE;
8046
8047 /* Optimize floor of constant value. */
8048 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8049 {
8050 REAL_VALUE_TYPE x;
8051
8052 x = TREE_REAL_CST (arg);
8053 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8054 {
8055 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8056 REAL_VALUE_TYPE r;
8057
8058 real_floor (&r, TYPE_MODE (type), &x);
8059 return build_real (type, r);
8060 }
8061 }
8062
8063 /* Fold floor (x) where x is nonnegative to trunc (x). */
8064 if (tree_expr_nonnegative_p (arg))
8065 {
8066 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8067 if (truncfn)
8068 return build_call_expr (truncfn, 1, arg);
8069 }
8070
8071 return fold_trunc_transparent_mathfn (fndecl, arg);
8072 }
8073
8074 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8075 Return NULL_TREE if no simplification can be made. */
8076
8077 static tree
8078 fold_builtin_ceil (tree fndecl, tree arg)
8079 {
8080 if (!validate_arg (arg, REAL_TYPE))
8081 return NULL_TREE;
8082
8083 /* Optimize ceil of constant value. */
8084 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8085 {
8086 REAL_VALUE_TYPE x;
8087
8088 x = TREE_REAL_CST (arg);
8089 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8090 {
8091 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8092 REAL_VALUE_TYPE r;
8093
8094 real_ceil (&r, TYPE_MODE (type), &x);
8095 return build_real (type, r);
8096 }
8097 }
8098
8099 return fold_trunc_transparent_mathfn (fndecl, arg);
8100 }
8101
8102 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8103 Return NULL_TREE if no simplification can be made. */
8104
8105 static tree
8106 fold_builtin_round (tree fndecl, tree arg)
8107 {
8108 if (!validate_arg (arg, REAL_TYPE))
8109 return NULL_TREE;
8110
8111 /* Optimize round of constant value. */
8112 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8113 {
8114 REAL_VALUE_TYPE x;
8115
8116 x = TREE_REAL_CST (arg);
8117 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8118 {
8119 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8120 REAL_VALUE_TYPE r;
8121
8122 real_round (&r, TYPE_MODE (type), &x);
8123 return build_real (type, r);
8124 }
8125 }
8126
8127 return fold_trunc_transparent_mathfn (fndecl, arg);
8128 }
8129
8130 /* Fold function call to builtin lround, lroundf or lroundl (or the
8131 corresponding long long versions) and other rounding functions. ARG
8132 is the argument to the call. Return NULL_TREE if no simplification
8133 can be made. */
8134
8135 static tree
8136 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8137 {
8138 if (!validate_arg (arg, REAL_TYPE))
8139 return NULL_TREE;
8140
8141 /* Optimize lround of constant value. */
8142 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8143 {
8144 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8145
8146 if (real_isfinite (&x))
8147 {
8148 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8149 tree ftype = TREE_TYPE (arg);
8150 unsigned HOST_WIDE_INT lo2;
8151 HOST_WIDE_INT hi, lo;
8152 REAL_VALUE_TYPE r;
8153
8154 switch (DECL_FUNCTION_CODE (fndecl))
8155 {
8156 CASE_FLT_FN (BUILT_IN_LFLOOR):
8157 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8158 real_floor (&r, TYPE_MODE (ftype), &x);
8159 break;
8160
8161 CASE_FLT_FN (BUILT_IN_LCEIL):
8162 CASE_FLT_FN (BUILT_IN_LLCEIL):
8163 real_ceil (&r, TYPE_MODE (ftype), &x);
8164 break;
8165
8166 CASE_FLT_FN (BUILT_IN_LROUND):
8167 CASE_FLT_FN (BUILT_IN_LLROUND):
8168 real_round (&r, TYPE_MODE (ftype), &x);
8169 break;
8170
8171 default:
8172 gcc_unreachable ();
8173 }
8174
8175 REAL_VALUE_TO_INT (&lo, &hi, r);
8176 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8177 return build_int_cst_wide (itype, lo2, hi);
8178 }
8179 }
8180
8181 switch (DECL_FUNCTION_CODE (fndecl))
8182 {
8183 CASE_FLT_FN (BUILT_IN_LFLOOR):
8184 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8185 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8186 if (tree_expr_nonnegative_p (arg))
8187 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8188 arg);
8189 break;
8190 default:;
8191 }
8192
8193 return fold_fixed_mathfn (fndecl, arg);
8194 }
8195
8196 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8197 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8198 the argument to the call. Return NULL_TREE if no simplification can
8199 be made. */
8200
8201 static tree
8202 fold_builtin_bitop (tree fndecl, tree arg)
8203 {
8204 if (!validate_arg (arg, INTEGER_TYPE))
8205 return NULL_TREE;
8206
8207 /* Optimize for constant argument. */
8208 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8209 {
8210 HOST_WIDE_INT hi, width, result;
8211 unsigned HOST_WIDE_INT lo;
8212 tree type;
8213
8214 type = TREE_TYPE (arg);
8215 width = TYPE_PRECISION (type);
8216 lo = TREE_INT_CST_LOW (arg);
8217
8218 /* Clear all the bits that are beyond the type's precision. */
8219 if (width > HOST_BITS_PER_WIDE_INT)
8220 {
8221 hi = TREE_INT_CST_HIGH (arg);
8222 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8223 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8224 }
8225 else
8226 {
8227 hi = 0;
8228 if (width < HOST_BITS_PER_WIDE_INT)
8229 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8230 }
8231
8232 switch (DECL_FUNCTION_CODE (fndecl))
8233 {
8234 CASE_INT_FN (BUILT_IN_FFS):
8235 if (lo != 0)
8236 result = exact_log2 (lo & -lo) + 1;
8237 else if (hi != 0)
8238 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8239 else
8240 result = 0;
8241 break;
8242
8243 CASE_INT_FN (BUILT_IN_CLZ):
8244 if (hi != 0)
8245 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8246 else if (lo != 0)
8247 result = width - floor_log2 (lo) - 1;
8248 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8249 result = width;
8250 break;
8251
8252 CASE_INT_FN (BUILT_IN_CTZ):
8253 if (lo != 0)
8254 result = exact_log2 (lo & -lo);
8255 else if (hi != 0)
8256 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8257 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8258 result = width;
8259 break;
8260
8261 CASE_INT_FN (BUILT_IN_POPCOUNT):
8262 result = 0;
8263 while (lo)
8264 result++, lo &= lo - 1;
8265 while (hi)
8266 result++, hi &= hi - 1;
8267 break;
8268
8269 CASE_INT_FN (BUILT_IN_PARITY):
8270 result = 0;
8271 while (lo)
8272 result++, lo &= lo - 1;
8273 while (hi)
8274 result++, hi &= hi - 1;
8275 result &= 1;
8276 break;
8277
8278 default:
8279 gcc_unreachable ();
8280 }
8281
8282 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8283 }
8284
8285 return NULL_TREE;
8286 }
8287
8288 /* Fold function call to builtin_bswap and the long and long long
8289 variants. Return NULL_TREE if no simplification can be made. */
8290 static tree
8291 fold_builtin_bswap (tree fndecl, tree arg)
8292 {
8293 if (! validate_arg (arg, INTEGER_TYPE))
8294 return NULL_TREE;
8295
8296 /* Optimize constant value. */
8297 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8298 {
8299 HOST_WIDE_INT hi, width, r_hi = 0;
8300 unsigned HOST_WIDE_INT lo, r_lo = 0;
8301 tree type;
8302
8303 type = TREE_TYPE (arg);
8304 width = TYPE_PRECISION (type);
8305 lo = TREE_INT_CST_LOW (arg);
8306 hi = TREE_INT_CST_HIGH (arg);
8307
8308 switch (DECL_FUNCTION_CODE (fndecl))
8309 {
8310 case BUILT_IN_BSWAP32:
8311 case BUILT_IN_BSWAP64:
8312 {
8313 int s;
8314
8315 for (s = 0; s < width; s += 8)
8316 {
8317 int d = width - s - 8;
8318 unsigned HOST_WIDE_INT byte;
8319
8320 if (s < HOST_BITS_PER_WIDE_INT)
8321 byte = (lo >> s) & 0xff;
8322 else
8323 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8324
8325 if (d < HOST_BITS_PER_WIDE_INT)
8326 r_lo |= byte << d;
8327 else
8328 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8329 }
8330 }
8331
8332 break;
8333
8334 default:
8335 gcc_unreachable ();
8336 }
8337
8338 if (width < HOST_BITS_PER_WIDE_INT)
8339 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8340 else
8341 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8342 }
8343
8344 return NULL_TREE;
8345 }
8346
8347 /* A subroutine of fold_builtin to fold the various logarithmic
8348 functions. Return NULL_TREE if no simplification can me made.
8349 FUNC is the corresponding MPFR logarithm function. */
8350
8351 static tree
8352 fold_builtin_logarithm (tree fndecl, tree arg,
8353 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8354 {
8355 if (validate_arg (arg, REAL_TYPE))
8356 {
8357 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8358 tree res;
8359 const enum built_in_function fcode = builtin_mathfn_code (arg);
8360
8361 /* Calculate the result when the argument is a constant. */
8362 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8363 return res;
8364
8365 /* Special case, optimize logN(expN(x)) = x. */
8366 if (flag_unsafe_math_optimizations
8367 && ((func == mpfr_log
8368 && (fcode == BUILT_IN_EXP
8369 || fcode == BUILT_IN_EXPF
8370 || fcode == BUILT_IN_EXPL))
8371 || (func == mpfr_log2
8372 && (fcode == BUILT_IN_EXP2
8373 || fcode == BUILT_IN_EXP2F
8374 || fcode == BUILT_IN_EXP2L))
8375 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8376 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8377
8378 /* Optimize logN(func()) for various exponential functions. We
8379 want to determine the value "x" and the power "exponent" in
8380 order to transform logN(x**exponent) into exponent*logN(x). */
8381 if (flag_unsafe_math_optimizations)
8382 {
8383 tree exponent = 0, x = 0;
8384
8385 switch (fcode)
8386 {
8387 CASE_FLT_FN (BUILT_IN_EXP):
8388 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8389 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8390 dconst_e ()));
8391 exponent = CALL_EXPR_ARG (arg, 0);
8392 break;
8393 CASE_FLT_FN (BUILT_IN_EXP2):
8394 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8395 x = build_real (type, dconst2);
8396 exponent = CALL_EXPR_ARG (arg, 0);
8397 break;
8398 CASE_FLT_FN (BUILT_IN_EXP10):
8399 CASE_FLT_FN (BUILT_IN_POW10):
8400 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8401 {
8402 REAL_VALUE_TYPE dconst10;
8403 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8404 x = build_real (type, dconst10);
8405 }
8406 exponent = CALL_EXPR_ARG (arg, 0);
8407 break;
8408 CASE_FLT_FN (BUILT_IN_SQRT):
8409 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8410 x = CALL_EXPR_ARG (arg, 0);
8411 exponent = build_real (type, dconsthalf);
8412 break;
8413 CASE_FLT_FN (BUILT_IN_CBRT):
8414 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8415 x = CALL_EXPR_ARG (arg, 0);
8416 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8417 dconst_third ()));
8418 break;
8419 CASE_FLT_FN (BUILT_IN_POW):
8420 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8421 x = CALL_EXPR_ARG (arg, 0);
8422 exponent = CALL_EXPR_ARG (arg, 1);
8423 break;
8424 default:
8425 break;
8426 }
8427
8428 /* Now perform the optimization. */
8429 if (x && exponent)
8430 {
8431 tree logfn = build_call_expr (fndecl, 1, x);
8432 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8433 }
8434 }
8435 }
8436
8437 return NULL_TREE;
8438 }
8439
8440 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8441 NULL_TREE if no simplification can be made. */
8442
8443 static tree
8444 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8445 {
8446 tree res, narg0, narg1;
8447
8448 if (!validate_arg (arg0, REAL_TYPE)
8449 || !validate_arg (arg1, REAL_TYPE))
8450 return NULL_TREE;
8451
8452 /* Calculate the result when the argument is a constant. */
8453 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8454 return res;
8455
8456 /* If either argument to hypot has a negate or abs, strip that off.
8457 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8458 narg0 = fold_strip_sign_ops (arg0);
8459 narg1 = fold_strip_sign_ops (arg1);
8460 if (narg0 || narg1)
8461 {
8462 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8463 narg1 ? narg1 : arg1);
8464 }
8465
8466 /* If either argument is zero, hypot is fabs of the other. */
8467 if (real_zerop (arg0))
8468 return fold_build1 (ABS_EXPR, type, arg1);
8469 else if (real_zerop (arg1))
8470 return fold_build1 (ABS_EXPR, type, arg0);
8471
8472 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8473 if (flag_unsafe_math_optimizations
8474 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8475 {
8476 const REAL_VALUE_TYPE sqrt2_trunc
8477 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8478 return fold_build2 (MULT_EXPR, type,
8479 fold_build1 (ABS_EXPR, type, arg0),
8480 build_real (type, sqrt2_trunc));
8481 }
8482
8483 return NULL_TREE;
8484 }
8485
8486
8487 /* Fold a builtin function call to pow, powf, or powl. Return
8488 NULL_TREE if no simplification can be made. */
8489 static tree
8490 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8491 {
8492 tree res;
8493
8494 if (!validate_arg (arg0, REAL_TYPE)
8495 || !validate_arg (arg1, REAL_TYPE))
8496 return NULL_TREE;
8497
8498 /* Calculate the result when the argument is a constant. */
8499 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8500 return res;
8501
8502 /* Optimize pow(1.0,y) = 1.0. */
8503 if (real_onep (arg0))
8504 return omit_one_operand (type, build_real (type, dconst1), arg1);
8505
8506 if (TREE_CODE (arg1) == REAL_CST
8507 && !TREE_OVERFLOW (arg1))
8508 {
8509 REAL_VALUE_TYPE cint;
8510 REAL_VALUE_TYPE c;
8511 HOST_WIDE_INT n;
8512
8513 c = TREE_REAL_CST (arg1);
8514
8515 /* Optimize pow(x,0.0) = 1.0. */
8516 if (REAL_VALUES_EQUAL (c, dconst0))
8517 return omit_one_operand (type, build_real (type, dconst1),
8518 arg0);
8519
8520 /* Optimize pow(x,1.0) = x. */
8521 if (REAL_VALUES_EQUAL (c, dconst1))
8522 return arg0;
8523
8524 /* Optimize pow(x,-1.0) = 1.0/x. */
8525 if (REAL_VALUES_EQUAL (c, dconstm1))
8526 return fold_build2 (RDIV_EXPR, type,
8527 build_real (type, dconst1), arg0);
8528
8529 /* Optimize pow(x,0.5) = sqrt(x). */
8530 if (flag_unsafe_math_optimizations
8531 && REAL_VALUES_EQUAL (c, dconsthalf))
8532 {
8533 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8534
8535 if (sqrtfn != NULL_TREE)
8536 return build_call_expr (sqrtfn, 1, arg0);
8537 }
8538
8539 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8540 if (flag_unsafe_math_optimizations)
8541 {
8542 const REAL_VALUE_TYPE dconstroot
8543 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8544
8545 if (REAL_VALUES_EQUAL (c, dconstroot))
8546 {
8547 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8548 if (cbrtfn != NULL_TREE)
8549 return build_call_expr (cbrtfn, 1, arg0);
8550 }
8551 }
8552
8553 /* Check for an integer exponent. */
8554 n = real_to_integer (&c);
8555 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8556 if (real_identical (&c, &cint))
8557 {
8558 /* Attempt to evaluate pow at compile-time, unless this should
8559 raise an exception. */
8560 if (TREE_CODE (arg0) == REAL_CST
8561 && !TREE_OVERFLOW (arg0)
8562 && (n > 0
8563 || (!flag_trapping_math && !flag_errno_math)
8564 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8565 {
8566 REAL_VALUE_TYPE x;
8567 bool inexact;
8568
8569 x = TREE_REAL_CST (arg0);
8570 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8571 if (flag_unsafe_math_optimizations || !inexact)
8572 return build_real (type, x);
8573 }
8574
8575 /* Strip sign ops from even integer powers. */
8576 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8577 {
8578 tree narg0 = fold_strip_sign_ops (arg0);
8579 if (narg0)
8580 return build_call_expr (fndecl, 2, narg0, arg1);
8581 }
8582 }
8583 }
8584
8585 if (flag_unsafe_math_optimizations)
8586 {
8587 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8588
8589 /* Optimize pow(expN(x),y) = expN(x*y). */
8590 if (BUILTIN_EXPONENT_P (fcode))
8591 {
8592 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8593 tree arg = CALL_EXPR_ARG (arg0, 0);
8594 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8595 return build_call_expr (expfn, 1, arg);
8596 }
8597
8598 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8599 if (BUILTIN_SQRT_P (fcode))
8600 {
8601 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8602 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8603 build_real (type, dconsthalf));
8604 return build_call_expr (fndecl, 2, narg0, narg1);
8605 }
8606
8607 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8608 if (BUILTIN_CBRT_P (fcode))
8609 {
8610 tree arg = CALL_EXPR_ARG (arg0, 0);
8611 if (tree_expr_nonnegative_p (arg))
8612 {
8613 const REAL_VALUE_TYPE dconstroot
8614 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8615 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8616 build_real (type, dconstroot));
8617 return build_call_expr (fndecl, 2, arg, narg1);
8618 }
8619 }
8620
8621 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8622 if (fcode == BUILT_IN_POW
8623 || fcode == BUILT_IN_POWF
8624 || fcode == BUILT_IN_POWL)
8625 {
8626 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8627 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8628 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8629 return build_call_expr (fndecl, 2, arg00, narg1);
8630 }
8631 }
8632
8633 return NULL_TREE;
8634 }
8635
8636 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8637 Return NULL_TREE if no simplification can be made. */
8638 static tree
8639 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8640 tree arg0, tree arg1, tree type)
8641 {
8642 if (!validate_arg (arg0, REAL_TYPE)
8643 || !validate_arg (arg1, INTEGER_TYPE))
8644 return NULL_TREE;
8645
8646 /* Optimize pow(1.0,y) = 1.0. */
8647 if (real_onep (arg0))
8648 return omit_one_operand (type, build_real (type, dconst1), arg1);
8649
8650 if (host_integerp (arg1, 0))
8651 {
8652 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8653
8654 /* Evaluate powi at compile-time. */
8655 if (TREE_CODE (arg0) == REAL_CST
8656 && !TREE_OVERFLOW (arg0))
8657 {
8658 REAL_VALUE_TYPE x;
8659 x = TREE_REAL_CST (arg0);
8660 real_powi (&x, TYPE_MODE (type), &x, c);
8661 return build_real (type, x);
8662 }
8663
8664 /* Optimize pow(x,0) = 1.0. */
8665 if (c == 0)
8666 return omit_one_operand (type, build_real (type, dconst1),
8667 arg0);
8668
8669 /* Optimize pow(x,1) = x. */
8670 if (c == 1)
8671 return arg0;
8672
8673 /* Optimize pow(x,-1) = 1.0/x. */
8674 if (c == -1)
8675 return fold_build2 (RDIV_EXPR, type,
8676 build_real (type, dconst1), arg0);
8677 }
8678
8679 return NULL_TREE;
8680 }
8681
8682 /* A subroutine of fold_builtin to fold the various exponent
8683 functions. Return NULL_TREE if no simplification can be made.
8684 FUNC is the corresponding MPFR exponent function. */
8685
8686 static tree
8687 fold_builtin_exponent (tree fndecl, tree arg,
8688 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8689 {
8690 if (validate_arg (arg, REAL_TYPE))
8691 {
8692 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8693 tree res;
8694
8695 /* Calculate the result when the argument is a constant. */
8696 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8697 return res;
8698
8699 /* Optimize expN(logN(x)) = x. */
8700 if (flag_unsafe_math_optimizations)
8701 {
8702 const enum built_in_function fcode = builtin_mathfn_code (arg);
8703
8704 if ((func == mpfr_exp
8705 && (fcode == BUILT_IN_LOG
8706 || fcode == BUILT_IN_LOGF
8707 || fcode == BUILT_IN_LOGL))
8708 || (func == mpfr_exp2
8709 && (fcode == BUILT_IN_LOG2
8710 || fcode == BUILT_IN_LOG2F
8711 || fcode == BUILT_IN_LOG2L))
8712 || (func == mpfr_exp10
8713 && (fcode == BUILT_IN_LOG10
8714 || fcode == BUILT_IN_LOG10F
8715 || fcode == BUILT_IN_LOG10L)))
8716 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8717 }
8718 }
8719
8720 return NULL_TREE;
8721 }
8722
8723 /* Return true if VAR is a VAR_DECL or a component thereof. */
8724
8725 static bool
8726 var_decl_component_p (tree var)
8727 {
8728 tree inner = var;
8729 while (handled_component_p (inner))
8730 inner = TREE_OPERAND (inner, 0);
8731 return SSA_VAR_P (inner);
8732 }
8733
8734 /* Fold function call to builtin memset. Return
8735 NULL_TREE if no simplification can be made. */
8736
8737 static tree
8738 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8739 {
8740 tree var, ret, etype;
8741 unsigned HOST_WIDE_INT length, cval;
8742
8743 if (! validate_arg (dest, POINTER_TYPE)
8744 || ! validate_arg (c, INTEGER_TYPE)
8745 || ! validate_arg (len, INTEGER_TYPE))
8746 return NULL_TREE;
8747
8748 if (! host_integerp (len, 1))
8749 return NULL_TREE;
8750
8751 /* If the LEN parameter is zero, return DEST. */
8752 if (integer_zerop (len))
8753 return omit_one_operand (type, dest, c);
8754
8755 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8756 return NULL_TREE;
8757
8758 var = dest;
8759 STRIP_NOPS (var);
8760 if (TREE_CODE (var) != ADDR_EXPR)
8761 return NULL_TREE;
8762
8763 var = TREE_OPERAND (var, 0);
8764 if (TREE_THIS_VOLATILE (var))
8765 return NULL_TREE;
8766
8767 etype = TREE_TYPE (var);
8768 if (TREE_CODE (etype) == ARRAY_TYPE)
8769 etype = TREE_TYPE (etype);
8770
8771 if (!INTEGRAL_TYPE_P (etype)
8772 && !POINTER_TYPE_P (etype))
8773 return NULL_TREE;
8774
8775 if (! var_decl_component_p (var))
8776 return NULL_TREE;
8777
8778 length = tree_low_cst (len, 1);
8779 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8780 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8781 < (int) length)
8782 return NULL_TREE;
8783
8784 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8785 return NULL_TREE;
8786
8787 if (integer_zerop (c))
8788 cval = 0;
8789 else
8790 {
8791 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8792 return NULL_TREE;
8793
8794 cval = tree_low_cst (c, 1);
8795 cval &= 0xff;
8796 cval |= cval << 8;
8797 cval |= cval << 16;
8798 cval |= (cval << 31) << 1;
8799 }
8800
8801 ret = build_int_cst_type (etype, cval);
8802 var = build_fold_indirect_ref (fold_convert (build_pointer_type (etype),
8803 dest));
8804 ret = build2 (MODIFY_EXPR, etype, var, ret);
8805 if (ignore)
8806 return ret;
8807
8808 return omit_one_operand (type, dest, ret);
8809 }
8810
8811 /* Fold function call to builtin memset. Return
8812 NULL_TREE if no simplification can be made. */
8813
8814 static tree
8815 fold_builtin_bzero (tree dest, tree size, bool ignore)
8816 {
8817 if (! validate_arg (dest, POINTER_TYPE)
8818 || ! validate_arg (size, INTEGER_TYPE))
8819 return NULL_TREE;
8820
8821 if (!ignore)
8822 return NULL_TREE;
8823
8824 /* New argument list transforming bzero(ptr x, int y) to
8825 memset(ptr x, int 0, size_t y). This is done this way
8826 so that if it isn't expanded inline, we fallback to
8827 calling bzero instead of memset. */
8828
8829 return fold_builtin_memset (dest, integer_zero_node,
8830 fold_convert (sizetype, size),
8831 void_type_node, ignore);
8832 }
8833
8834 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8835 NULL_TREE if no simplification can be made.
8836 If ENDP is 0, return DEST (like memcpy).
8837 If ENDP is 1, return DEST+LEN (like mempcpy).
8838 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8839 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8840 (memmove). */
8841
8842 static tree
8843 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8844 {
8845 tree destvar, srcvar, expr;
8846
8847 if (! validate_arg (dest, POINTER_TYPE)
8848 || ! validate_arg (src, POINTER_TYPE)
8849 || ! validate_arg (len, INTEGER_TYPE))
8850 return NULL_TREE;
8851
8852 /* If the LEN parameter is zero, return DEST. */
8853 if (integer_zerop (len))
8854 return omit_one_operand (type, dest, src);
8855
8856 /* If SRC and DEST are the same (and not volatile), return
8857 DEST{,+LEN,+LEN-1}. */
8858 if (operand_equal_p (src, dest, 0))
8859 expr = len;
8860 else
8861 {
8862 tree srctype, desttype;
8863 int src_align, dest_align;
8864
8865 if (endp == 3)
8866 {
8867 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8868 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8869
8870 /* Both DEST and SRC must be pointer types.
8871 ??? This is what old code did. Is the testing for pointer types
8872 really mandatory?
8873
8874 If either SRC is readonly or length is 1, we can use memcpy. */
8875 if (!dest_align || !src_align)
8876 return NULL_TREE;
8877 if (readonly_data_expr (src)
8878 || (host_integerp (len, 1)
8879 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8880 >= tree_low_cst (len, 1))))
8881 {
8882 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8883 if (!fn)
8884 return NULL_TREE;
8885 return build_call_expr (fn, 3, dest, src, len);
8886 }
8887
8888 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8889 srcvar = build_fold_indirect_ref (src);
8890 destvar = build_fold_indirect_ref (dest);
8891 if (srcvar
8892 && !TREE_THIS_VOLATILE (srcvar)
8893 && destvar
8894 && !TREE_THIS_VOLATILE (destvar))
8895 {
8896 tree src_base, dest_base, fn;
8897 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8898 HOST_WIDE_INT size = -1;
8899 HOST_WIDE_INT maxsize = -1;
8900
8901 src_base = srcvar;
8902 if (handled_component_p (src_base))
8903 src_base = get_ref_base_and_extent (src_base, &src_offset,
8904 &size, &maxsize);
8905 dest_base = destvar;
8906 if (handled_component_p (dest_base))
8907 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8908 &size, &maxsize);
8909 if (host_integerp (len, 1))
8910 {
8911 maxsize = tree_low_cst (len, 1);
8912 if (maxsize
8913 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8914 maxsize = -1;
8915 else
8916 maxsize *= BITS_PER_UNIT;
8917 }
8918 else
8919 maxsize = -1;
8920 if (SSA_VAR_P (src_base)
8921 && SSA_VAR_P (dest_base))
8922 {
8923 if (operand_equal_p (src_base, dest_base, 0)
8924 && ranges_overlap_p (src_offset, maxsize,
8925 dest_offset, maxsize))
8926 return NULL_TREE;
8927 }
8928 else if (TREE_CODE (src_base) == INDIRECT_REF
8929 && TREE_CODE (dest_base) == INDIRECT_REF)
8930 {
8931 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8932 TREE_OPERAND (dest_base, 0), 0)
8933 || ranges_overlap_p (src_offset, maxsize,
8934 dest_offset, maxsize))
8935 return NULL_TREE;
8936 }
8937 else
8938 return NULL_TREE;
8939
8940 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8941 if (!fn)
8942 return NULL_TREE;
8943 return build_call_expr (fn, 3, dest, src, len);
8944 }
8945 return NULL_TREE;
8946 }
8947
8948 if (!host_integerp (len, 0))
8949 return NULL_TREE;
8950 /* FIXME:
8951 This logic lose for arguments like (type *)malloc (sizeof (type)),
8952 since we strip the casts of up to VOID return value from malloc.
8953 Perhaps we ought to inherit type from non-VOID argument here? */
8954 STRIP_NOPS (src);
8955 STRIP_NOPS (dest);
8956 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8957 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8958 {
8959 tree tem = TREE_OPERAND (src, 0);
8960 STRIP_NOPS (tem);
8961 if (tem != TREE_OPERAND (src, 0))
8962 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8963 }
8964 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8965 {
8966 tree tem = TREE_OPERAND (dest, 0);
8967 STRIP_NOPS (tem);
8968 if (tem != TREE_OPERAND (dest, 0))
8969 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8970 }
8971 srctype = TREE_TYPE (TREE_TYPE (src));
8972 if (srctype
8973 && TREE_CODE (srctype) == ARRAY_TYPE
8974 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8975 {
8976 srctype = TREE_TYPE (srctype);
8977 STRIP_NOPS (src);
8978 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8979 }
8980 desttype = TREE_TYPE (TREE_TYPE (dest));
8981 if (desttype
8982 && TREE_CODE (desttype) == ARRAY_TYPE
8983 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8984 {
8985 desttype = TREE_TYPE (desttype);
8986 STRIP_NOPS (dest);
8987 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8988 }
8989 if (!srctype || !desttype
8990 || !TYPE_SIZE_UNIT (srctype)
8991 || !TYPE_SIZE_UNIT (desttype)
8992 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8993 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8994 || TYPE_VOLATILE (srctype)
8995 || TYPE_VOLATILE (desttype))
8996 return NULL_TREE;
8997
8998 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8999 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9000 if (dest_align < (int) TYPE_ALIGN (desttype)
9001 || src_align < (int) TYPE_ALIGN (srctype))
9002 return NULL_TREE;
9003
9004 if (!ignore)
9005 dest = builtin_save_expr (dest);
9006
9007 srcvar = NULL_TREE;
9008 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9009 {
9010 srcvar = build_fold_indirect_ref (src);
9011 if (TREE_THIS_VOLATILE (srcvar))
9012 return NULL_TREE;
9013 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9014 srcvar = NULL_TREE;
9015 /* With memcpy, it is possible to bypass aliasing rules, so without
9016 this check i.e. execute/20060930-2.c would be misoptimized,
9017 because it use conflicting alias set to hold argument for the
9018 memcpy call. This check is probably unnecessary with
9019 -fno-strict-aliasing. Similarly for destvar. See also
9020 PR29286. */
9021 else if (!var_decl_component_p (srcvar))
9022 srcvar = NULL_TREE;
9023 }
9024
9025 destvar = NULL_TREE;
9026 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9027 {
9028 destvar = build_fold_indirect_ref (dest);
9029 if (TREE_THIS_VOLATILE (destvar))
9030 return NULL_TREE;
9031 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9032 destvar = NULL_TREE;
9033 else if (!var_decl_component_p (destvar))
9034 destvar = NULL_TREE;
9035 }
9036
9037 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9038 return NULL_TREE;
9039
9040 if (srcvar == NULL_TREE)
9041 {
9042 tree srcptype;
9043 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9044 return NULL_TREE;
9045
9046 srctype = build_qualified_type (desttype, 0);
9047 if (src_align < (int) TYPE_ALIGN (srctype))
9048 {
9049 if (AGGREGATE_TYPE_P (srctype)
9050 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9051 return NULL_TREE;
9052
9053 srctype = build_variant_type_copy (srctype);
9054 TYPE_ALIGN (srctype) = src_align;
9055 TYPE_USER_ALIGN (srctype) = 1;
9056 TYPE_PACKED (srctype) = 1;
9057 }
9058 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9059 src = fold_convert (srcptype, src);
9060 srcvar = build_fold_indirect_ref (src);
9061 }
9062 else if (destvar == NULL_TREE)
9063 {
9064 tree destptype;
9065 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9066 return NULL_TREE;
9067
9068 desttype = build_qualified_type (srctype, 0);
9069 if (dest_align < (int) TYPE_ALIGN (desttype))
9070 {
9071 if (AGGREGATE_TYPE_P (desttype)
9072 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9073 return NULL_TREE;
9074
9075 desttype = build_variant_type_copy (desttype);
9076 TYPE_ALIGN (desttype) = dest_align;
9077 TYPE_USER_ALIGN (desttype) = 1;
9078 TYPE_PACKED (desttype) = 1;
9079 }
9080 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9081 dest = fold_convert (destptype, dest);
9082 destvar = build_fold_indirect_ref (dest);
9083 }
9084
9085 if (srctype == desttype
9086 || (gimple_in_ssa_p (cfun)
9087 && useless_type_conversion_p (desttype, srctype)))
9088 expr = srcvar;
9089 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9090 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9091 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9092 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9093 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9094 else
9095 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9096 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9097 }
9098
9099 if (ignore)
9100 return expr;
9101
9102 if (endp == 0 || endp == 3)
9103 return omit_one_operand (type, dest, expr);
9104
9105 if (expr == len)
9106 expr = NULL_TREE;
9107
9108 if (endp == 2)
9109 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9110 ssize_int (1));
9111
9112 len = fold_convert (sizetype, len);
9113 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9114 dest = fold_convert (type, dest);
9115 if (expr)
9116 dest = omit_one_operand (type, dest, expr);
9117 return dest;
9118 }
9119
9120 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9121 If LEN is not NULL, it represents the length of the string to be
9122 copied. Return NULL_TREE if no simplification can be made. */
9123
9124 tree
9125 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9126 {
9127 tree fn;
9128
9129 if (!validate_arg (dest, POINTER_TYPE)
9130 || !validate_arg (src, POINTER_TYPE))
9131 return NULL_TREE;
9132
9133 /* If SRC and DEST are the same (and not volatile), return DEST. */
9134 if (operand_equal_p (src, dest, 0))
9135 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9136
9137 if (optimize_function_for_size_p (cfun))
9138 return NULL_TREE;
9139
9140 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9141 if (!fn)
9142 return NULL_TREE;
9143
9144 if (!len)
9145 {
9146 len = c_strlen (src, 1);
9147 if (! len || TREE_SIDE_EFFECTS (len))
9148 return NULL_TREE;
9149 }
9150
9151 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9152 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9153 build_call_expr (fn, 3, dest, src, len));
9154 }
9155
9156 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9157 If SLEN is not NULL, it represents the length of the source string.
9158 Return NULL_TREE if no simplification can be made. */
9159
9160 tree
9161 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9162 {
9163 tree fn;
9164
9165 if (!validate_arg (dest, POINTER_TYPE)
9166 || !validate_arg (src, POINTER_TYPE)
9167 || !validate_arg (len, INTEGER_TYPE))
9168 return NULL_TREE;
9169
9170 /* If the LEN parameter is zero, return DEST. */
9171 if (integer_zerop (len))
9172 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9173
9174 /* We can't compare slen with len as constants below if len is not a
9175 constant. */
9176 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9177 return NULL_TREE;
9178
9179 if (!slen)
9180 slen = c_strlen (src, 1);
9181
9182 /* Now, we must be passed a constant src ptr parameter. */
9183 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9184 return NULL_TREE;
9185
9186 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9187
9188 /* We do not support simplification of this case, though we do
9189 support it when expanding trees into RTL. */
9190 /* FIXME: generate a call to __builtin_memset. */
9191 if (tree_int_cst_lt (slen, len))
9192 return NULL_TREE;
9193
9194 /* OK transform into builtin memcpy. */
9195 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9196 if (!fn)
9197 return NULL_TREE;
9198 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9199 build_call_expr (fn, 3, dest, src, len));
9200 }
9201
9202 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9203 arguments to the call, and TYPE is its return type.
9204 Return NULL_TREE if no simplification can be made. */
9205
9206 static tree
9207 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9208 {
9209 if (!validate_arg (arg1, POINTER_TYPE)
9210 || !validate_arg (arg2, INTEGER_TYPE)
9211 || !validate_arg (len, INTEGER_TYPE))
9212 return NULL_TREE;
9213 else
9214 {
9215 const char *p1;
9216
9217 if (TREE_CODE (arg2) != INTEGER_CST
9218 || !host_integerp (len, 1))
9219 return NULL_TREE;
9220
9221 p1 = c_getstr (arg1);
9222 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9223 {
9224 char c;
9225 const char *r;
9226 tree tem;
9227
9228 if (target_char_cast (arg2, &c))
9229 return NULL_TREE;
9230
9231 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9232
9233 if (r == NULL)
9234 return build_int_cst (TREE_TYPE (arg1), 0);
9235
9236 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9237 size_int (r - p1));
9238 return fold_convert (type, tem);
9239 }
9240 return NULL_TREE;
9241 }
9242 }
9243
9244 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9245 Return NULL_TREE if no simplification can be made. */
9246
9247 static tree
9248 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9249 {
9250 const char *p1, *p2;
9251
9252 if (!validate_arg (arg1, POINTER_TYPE)
9253 || !validate_arg (arg2, POINTER_TYPE)
9254 || !validate_arg (len, INTEGER_TYPE))
9255 return NULL_TREE;
9256
9257 /* If the LEN parameter is zero, return zero. */
9258 if (integer_zerop (len))
9259 return omit_two_operands (integer_type_node, integer_zero_node,
9260 arg1, arg2);
9261
9262 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9263 if (operand_equal_p (arg1, arg2, 0))
9264 return omit_one_operand (integer_type_node, integer_zero_node, len);
9265
9266 p1 = c_getstr (arg1);
9267 p2 = c_getstr (arg2);
9268
9269 /* If all arguments are constant, and the value of len is not greater
9270 than the lengths of arg1 and arg2, evaluate at compile-time. */
9271 if (host_integerp (len, 1) && p1 && p2
9272 && compare_tree_int (len, strlen (p1) + 1) <= 0
9273 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9274 {
9275 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9276
9277 if (r > 0)
9278 return integer_one_node;
9279 else if (r < 0)
9280 return integer_minus_one_node;
9281 else
9282 return integer_zero_node;
9283 }
9284
9285 /* If len parameter is one, return an expression corresponding to
9286 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9287 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9288 {
9289 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9290 tree cst_uchar_ptr_node
9291 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9292
9293 tree ind1 = fold_convert (integer_type_node,
9294 build1 (INDIRECT_REF, cst_uchar_node,
9295 fold_convert (cst_uchar_ptr_node,
9296 arg1)));
9297 tree ind2 = fold_convert (integer_type_node,
9298 build1 (INDIRECT_REF, cst_uchar_node,
9299 fold_convert (cst_uchar_ptr_node,
9300 arg2)));
9301 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9302 }
9303
9304 return NULL_TREE;
9305 }
9306
9307 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9308 Return NULL_TREE if no simplification can be made. */
9309
9310 static tree
9311 fold_builtin_strcmp (tree arg1, tree arg2)
9312 {
9313 const char *p1, *p2;
9314
9315 if (!validate_arg (arg1, POINTER_TYPE)
9316 || !validate_arg (arg2, POINTER_TYPE))
9317 return NULL_TREE;
9318
9319 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9320 if (operand_equal_p (arg1, arg2, 0))
9321 return integer_zero_node;
9322
9323 p1 = c_getstr (arg1);
9324 p2 = c_getstr (arg2);
9325
9326 if (p1 && p2)
9327 {
9328 const int i = strcmp (p1, p2);
9329 if (i < 0)
9330 return integer_minus_one_node;
9331 else if (i > 0)
9332 return integer_one_node;
9333 else
9334 return integer_zero_node;
9335 }
9336
9337 /* If the second arg is "", return *(const unsigned char*)arg1. */
9338 if (p2 && *p2 == '\0')
9339 {
9340 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9341 tree cst_uchar_ptr_node
9342 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9343
9344 return fold_convert (integer_type_node,
9345 build1 (INDIRECT_REF, cst_uchar_node,
9346 fold_convert (cst_uchar_ptr_node,
9347 arg1)));
9348 }
9349
9350 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9351 if (p1 && *p1 == '\0')
9352 {
9353 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9354 tree cst_uchar_ptr_node
9355 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9356
9357 tree temp = fold_convert (integer_type_node,
9358 build1 (INDIRECT_REF, cst_uchar_node,
9359 fold_convert (cst_uchar_ptr_node,
9360 arg2)));
9361 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9362 }
9363
9364 return NULL_TREE;
9365 }
9366
9367 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9368 Return NULL_TREE if no simplification can be made. */
9369
9370 static tree
9371 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9372 {
9373 const char *p1, *p2;
9374
9375 if (!validate_arg (arg1, POINTER_TYPE)
9376 || !validate_arg (arg2, POINTER_TYPE)
9377 || !validate_arg (len, INTEGER_TYPE))
9378 return NULL_TREE;
9379
9380 /* If the LEN parameter is zero, return zero. */
9381 if (integer_zerop (len))
9382 return omit_two_operands (integer_type_node, integer_zero_node,
9383 arg1, arg2);
9384
9385 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9386 if (operand_equal_p (arg1, arg2, 0))
9387 return omit_one_operand (integer_type_node, integer_zero_node, len);
9388
9389 p1 = c_getstr (arg1);
9390 p2 = c_getstr (arg2);
9391
9392 if (host_integerp (len, 1) && p1 && p2)
9393 {
9394 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9395 if (i > 0)
9396 return integer_one_node;
9397 else if (i < 0)
9398 return integer_minus_one_node;
9399 else
9400 return integer_zero_node;
9401 }
9402
9403 /* If the second arg is "", and the length is greater than zero,
9404 return *(const unsigned char*)arg1. */
9405 if (p2 && *p2 == '\0'
9406 && TREE_CODE (len) == INTEGER_CST
9407 && tree_int_cst_sgn (len) == 1)
9408 {
9409 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9410 tree cst_uchar_ptr_node
9411 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9412
9413 return fold_convert (integer_type_node,
9414 build1 (INDIRECT_REF, cst_uchar_node,
9415 fold_convert (cst_uchar_ptr_node,
9416 arg1)));
9417 }
9418
9419 /* If the first arg is "", and the length is greater than zero,
9420 return -*(const unsigned char*)arg2. */
9421 if (p1 && *p1 == '\0'
9422 && TREE_CODE (len) == INTEGER_CST
9423 && tree_int_cst_sgn (len) == 1)
9424 {
9425 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9426 tree cst_uchar_ptr_node
9427 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9428
9429 tree temp = fold_convert (integer_type_node,
9430 build1 (INDIRECT_REF, cst_uchar_node,
9431 fold_convert (cst_uchar_ptr_node,
9432 arg2)));
9433 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9434 }
9435
9436 /* If len parameter is one, return an expression corresponding to
9437 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9438 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9439 {
9440 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9441 tree cst_uchar_ptr_node
9442 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9443
9444 tree ind1 = fold_convert (integer_type_node,
9445 build1 (INDIRECT_REF, cst_uchar_node,
9446 fold_convert (cst_uchar_ptr_node,
9447 arg1)));
9448 tree ind2 = fold_convert (integer_type_node,
9449 build1 (INDIRECT_REF, cst_uchar_node,
9450 fold_convert (cst_uchar_ptr_node,
9451 arg2)));
9452 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9453 }
9454
9455 return NULL_TREE;
9456 }
9457
9458 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9459 ARG. Return NULL_TREE if no simplification can be made. */
9460
9461 static tree
9462 fold_builtin_signbit (tree arg, tree type)
9463 {
9464 tree temp;
9465
9466 if (!validate_arg (arg, REAL_TYPE))
9467 return NULL_TREE;
9468
9469 /* If ARG is a compile-time constant, determine the result. */
9470 if (TREE_CODE (arg) == REAL_CST
9471 && !TREE_OVERFLOW (arg))
9472 {
9473 REAL_VALUE_TYPE c;
9474
9475 c = TREE_REAL_CST (arg);
9476 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9477 return fold_convert (type, temp);
9478 }
9479
9480 /* If ARG is non-negative, the result is always zero. */
9481 if (tree_expr_nonnegative_p (arg))
9482 return omit_one_operand (type, integer_zero_node, arg);
9483
9484 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9485 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9486 return fold_build2 (LT_EXPR, type, arg,
9487 build_real (TREE_TYPE (arg), dconst0));
9488
9489 return NULL_TREE;
9490 }
9491
9492 /* Fold function call to builtin copysign, copysignf or copysignl with
9493 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9494 be made. */
9495
9496 static tree
9497 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9498 {
9499 tree tem;
9500
9501 if (!validate_arg (arg1, REAL_TYPE)
9502 || !validate_arg (arg2, REAL_TYPE))
9503 return NULL_TREE;
9504
9505 /* copysign(X,X) is X. */
9506 if (operand_equal_p (arg1, arg2, 0))
9507 return fold_convert (type, arg1);
9508
9509 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9510 if (TREE_CODE (arg1) == REAL_CST
9511 && TREE_CODE (arg2) == REAL_CST
9512 && !TREE_OVERFLOW (arg1)
9513 && !TREE_OVERFLOW (arg2))
9514 {
9515 REAL_VALUE_TYPE c1, c2;
9516
9517 c1 = TREE_REAL_CST (arg1);
9518 c2 = TREE_REAL_CST (arg2);
9519 /* c1.sign := c2.sign. */
9520 real_copysign (&c1, &c2);
9521 return build_real (type, c1);
9522 }
9523
9524 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9525 Remember to evaluate Y for side-effects. */
9526 if (tree_expr_nonnegative_p (arg2))
9527 return omit_one_operand (type,
9528 fold_build1 (ABS_EXPR, type, arg1),
9529 arg2);
9530
9531 /* Strip sign changing operations for the first argument. */
9532 tem = fold_strip_sign_ops (arg1);
9533 if (tem)
9534 return build_call_expr (fndecl, 2, tem, arg2);
9535
9536 return NULL_TREE;
9537 }
9538
9539 /* Fold a call to builtin isascii with argument ARG. */
9540
9541 static tree
9542 fold_builtin_isascii (tree arg)
9543 {
9544 if (!validate_arg (arg, INTEGER_TYPE))
9545 return NULL_TREE;
9546 else
9547 {
9548 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9549 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9550 build_int_cst (NULL_TREE,
9551 ~ (unsigned HOST_WIDE_INT) 0x7f));
9552 return fold_build2 (EQ_EXPR, integer_type_node,
9553 arg, integer_zero_node);
9554 }
9555 }
9556
9557 /* Fold a call to builtin toascii with argument ARG. */
9558
9559 static tree
9560 fold_builtin_toascii (tree arg)
9561 {
9562 if (!validate_arg (arg, INTEGER_TYPE))
9563 return NULL_TREE;
9564
9565 /* Transform toascii(c) -> (c & 0x7f). */
9566 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9567 build_int_cst (NULL_TREE, 0x7f));
9568 }
9569
9570 /* Fold a call to builtin isdigit with argument ARG. */
9571
9572 static tree
9573 fold_builtin_isdigit (tree arg)
9574 {
9575 if (!validate_arg (arg, INTEGER_TYPE))
9576 return NULL_TREE;
9577 else
9578 {
9579 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9580 /* According to the C standard, isdigit is unaffected by locale.
9581 However, it definitely is affected by the target character set. */
9582 unsigned HOST_WIDE_INT target_digit0
9583 = lang_hooks.to_target_charset ('0');
9584
9585 if (target_digit0 == 0)
9586 return NULL_TREE;
9587
9588 arg = fold_convert (unsigned_type_node, arg);
9589 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9590 build_int_cst (unsigned_type_node, target_digit0));
9591 return fold_build2 (LE_EXPR, integer_type_node, arg,
9592 build_int_cst (unsigned_type_node, 9));
9593 }
9594 }
9595
9596 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9597
9598 static tree
9599 fold_builtin_fabs (tree arg, tree type)
9600 {
9601 if (!validate_arg (arg, REAL_TYPE))
9602 return NULL_TREE;
9603
9604 arg = fold_convert (type, arg);
9605 if (TREE_CODE (arg) == REAL_CST)
9606 return fold_abs_const (arg, type);
9607 return fold_build1 (ABS_EXPR, type, arg);
9608 }
9609
9610 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9611
9612 static tree
9613 fold_builtin_abs (tree arg, tree type)
9614 {
9615 if (!validate_arg (arg, INTEGER_TYPE))
9616 return NULL_TREE;
9617
9618 arg = fold_convert (type, arg);
9619 if (TREE_CODE (arg) == INTEGER_CST)
9620 return fold_abs_const (arg, type);
9621 return fold_build1 (ABS_EXPR, type, arg);
9622 }
9623
9624 /* Fold a call to builtin fmin or fmax. */
9625
9626 static tree
9627 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9628 {
9629 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9630 {
9631 /* Calculate the result when the argument is a constant. */
9632 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9633
9634 if (res)
9635 return res;
9636
9637 /* If either argument is NaN, return the other one. Avoid the
9638 transformation if we get (and honor) a signalling NaN. Using
9639 omit_one_operand() ensures we create a non-lvalue. */
9640 if (TREE_CODE (arg0) == REAL_CST
9641 && real_isnan (&TREE_REAL_CST (arg0))
9642 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9643 || ! TREE_REAL_CST (arg0).signalling))
9644 return omit_one_operand (type, arg1, arg0);
9645 if (TREE_CODE (arg1) == REAL_CST
9646 && real_isnan (&TREE_REAL_CST (arg1))
9647 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9648 || ! TREE_REAL_CST (arg1).signalling))
9649 return omit_one_operand (type, arg0, arg1);
9650
9651 /* Transform fmin/fmax(x,x) -> x. */
9652 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9653 return omit_one_operand (type, arg0, arg1);
9654
9655 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9656 functions to return the numeric arg if the other one is NaN.
9657 These tree codes don't honor that, so only transform if
9658 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9659 handled, so we don't have to worry about it either. */
9660 if (flag_finite_math_only)
9661 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9662 fold_convert (type, arg0),
9663 fold_convert (type, arg1));
9664 }
9665 return NULL_TREE;
9666 }
9667
9668 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9669
9670 static tree
9671 fold_builtin_carg (tree arg, tree type)
9672 {
9673 if (validate_arg (arg, COMPLEX_TYPE))
9674 {
9675 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9676
9677 if (atan2_fn)
9678 {
9679 tree new_arg = builtin_save_expr (arg);
9680 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9681 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9682 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9683 }
9684 }
9685
9686 return NULL_TREE;
9687 }
9688
9689 /* Fold a call to builtin logb/ilogb. */
9690
9691 static tree
9692 fold_builtin_logb (tree arg, tree rettype)
9693 {
9694 if (! validate_arg (arg, REAL_TYPE))
9695 return NULL_TREE;
9696
9697 STRIP_NOPS (arg);
9698
9699 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9700 {
9701 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9702
9703 switch (value->cl)
9704 {
9705 case rvc_nan:
9706 case rvc_inf:
9707 /* If arg is Inf or NaN and we're logb, return it. */
9708 if (TREE_CODE (rettype) == REAL_TYPE)
9709 return fold_convert (rettype, arg);
9710 /* Fall through... */
9711 case rvc_zero:
9712 /* Zero may set errno and/or raise an exception for logb, also
9713 for ilogb we don't know FP_ILOGB0. */
9714 return NULL_TREE;
9715 case rvc_normal:
9716 /* For normal numbers, proceed iff radix == 2. In GCC,
9717 normalized significands are in the range [0.5, 1.0). We
9718 want the exponent as if they were [1.0, 2.0) so get the
9719 exponent and subtract 1. */
9720 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9721 return fold_convert (rettype, build_int_cst (NULL_TREE,
9722 REAL_EXP (value)-1));
9723 break;
9724 }
9725 }
9726
9727 return NULL_TREE;
9728 }
9729
9730 /* Fold a call to builtin significand, if radix == 2. */
9731
9732 static tree
9733 fold_builtin_significand (tree arg, tree rettype)
9734 {
9735 if (! validate_arg (arg, REAL_TYPE))
9736 return NULL_TREE;
9737
9738 STRIP_NOPS (arg);
9739
9740 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9741 {
9742 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9743
9744 switch (value->cl)
9745 {
9746 case rvc_zero:
9747 case rvc_nan:
9748 case rvc_inf:
9749 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9750 return fold_convert (rettype, arg);
9751 case rvc_normal:
9752 /* For normal numbers, proceed iff radix == 2. */
9753 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9754 {
9755 REAL_VALUE_TYPE result = *value;
9756 /* In GCC, normalized significands are in the range [0.5,
9757 1.0). We want them to be [1.0, 2.0) so set the
9758 exponent to 1. */
9759 SET_REAL_EXP (&result, 1);
9760 return build_real (rettype, result);
9761 }
9762 break;
9763 }
9764 }
9765
9766 return NULL_TREE;
9767 }
9768
9769 /* Fold a call to builtin frexp, we can assume the base is 2. */
9770
9771 static tree
9772 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9773 {
9774 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9775 return NULL_TREE;
9776
9777 STRIP_NOPS (arg0);
9778
9779 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9780 return NULL_TREE;
9781
9782 arg1 = build_fold_indirect_ref (arg1);
9783
9784 /* Proceed if a valid pointer type was passed in. */
9785 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9786 {
9787 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9788 tree frac, exp;
9789
9790 switch (value->cl)
9791 {
9792 case rvc_zero:
9793 /* For +-0, return (*exp = 0, +-0). */
9794 exp = integer_zero_node;
9795 frac = arg0;
9796 break;
9797 case rvc_nan:
9798 case rvc_inf:
9799 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9800 return omit_one_operand (rettype, arg0, arg1);
9801 case rvc_normal:
9802 {
9803 /* Since the frexp function always expects base 2, and in
9804 GCC normalized significands are already in the range
9805 [0.5, 1.0), we have exactly what frexp wants. */
9806 REAL_VALUE_TYPE frac_rvt = *value;
9807 SET_REAL_EXP (&frac_rvt, 0);
9808 frac = build_real (rettype, frac_rvt);
9809 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9810 }
9811 break;
9812 default:
9813 gcc_unreachable ();
9814 }
9815
9816 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9817 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9818 TREE_SIDE_EFFECTS (arg1) = 1;
9819 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9820 }
9821
9822 return NULL_TREE;
9823 }
9824
9825 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9826 then we can assume the base is two. If it's false, then we have to
9827 check the mode of the TYPE parameter in certain cases. */
9828
9829 static tree
9830 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9831 {
9832 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9833 {
9834 STRIP_NOPS (arg0);
9835 STRIP_NOPS (arg1);
9836
9837 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9838 if (real_zerop (arg0) || integer_zerop (arg1)
9839 || (TREE_CODE (arg0) == REAL_CST
9840 && !real_isfinite (&TREE_REAL_CST (arg0))))
9841 return omit_one_operand (type, arg0, arg1);
9842
9843 /* If both arguments are constant, then try to evaluate it. */
9844 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9845 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9846 && host_integerp (arg1, 0))
9847 {
9848 /* Bound the maximum adjustment to twice the range of the
9849 mode's valid exponents. Use abs to ensure the range is
9850 positive as a sanity check. */
9851 const long max_exp_adj = 2 *
9852 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9853 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9854
9855 /* Get the user-requested adjustment. */
9856 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9857
9858 /* The requested adjustment must be inside this range. This
9859 is a preliminary cap to avoid things like overflow, we
9860 may still fail to compute the result for other reasons. */
9861 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9862 {
9863 REAL_VALUE_TYPE initial_result;
9864
9865 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9866
9867 /* Ensure we didn't overflow. */
9868 if (! real_isinf (&initial_result))
9869 {
9870 const REAL_VALUE_TYPE trunc_result
9871 = real_value_truncate (TYPE_MODE (type), initial_result);
9872
9873 /* Only proceed if the target mode can hold the
9874 resulting value. */
9875 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9876 return build_real (type, trunc_result);
9877 }
9878 }
9879 }
9880 }
9881
9882 return NULL_TREE;
9883 }
9884
9885 /* Fold a call to builtin modf. */
9886
9887 static tree
9888 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9889 {
9890 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9891 return NULL_TREE;
9892
9893 STRIP_NOPS (arg0);
9894
9895 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9896 return NULL_TREE;
9897
9898 arg1 = build_fold_indirect_ref (arg1);
9899
9900 /* Proceed if a valid pointer type was passed in. */
9901 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9902 {
9903 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9904 REAL_VALUE_TYPE trunc, frac;
9905
9906 switch (value->cl)
9907 {
9908 case rvc_nan:
9909 case rvc_zero:
9910 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9911 trunc = frac = *value;
9912 break;
9913 case rvc_inf:
9914 /* For +-Inf, return (*arg1 = arg0, +-0). */
9915 frac = dconst0;
9916 frac.sign = value->sign;
9917 trunc = *value;
9918 break;
9919 case rvc_normal:
9920 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9921 real_trunc (&trunc, VOIDmode, value);
9922 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9923 /* If the original number was negative and already
9924 integral, then the fractional part is -0.0. */
9925 if (value->sign && frac.cl == rvc_zero)
9926 frac.sign = value->sign;
9927 break;
9928 }
9929
9930 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9931 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9932 build_real (rettype, trunc));
9933 TREE_SIDE_EFFECTS (arg1) = 1;
9934 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9935 build_real (rettype, frac));
9936 }
9937
9938 return NULL_TREE;
9939 }
9940
9941 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9942 ARG is the argument for the call. */
9943
9944 static tree
9945 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9946 {
9947 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9948 REAL_VALUE_TYPE r;
9949
9950 if (!validate_arg (arg, REAL_TYPE))
9951 return NULL_TREE;
9952
9953 switch (builtin_index)
9954 {
9955 case BUILT_IN_ISINF:
9956 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9957 return omit_one_operand (type, integer_zero_node, arg);
9958
9959 if (TREE_CODE (arg) == REAL_CST)
9960 {
9961 r = TREE_REAL_CST (arg);
9962 if (real_isinf (&r))
9963 return real_compare (GT_EXPR, &r, &dconst0)
9964 ? integer_one_node : integer_minus_one_node;
9965 else
9966 return integer_zero_node;
9967 }
9968
9969 return NULL_TREE;
9970
9971 case BUILT_IN_ISINF_SIGN:
9972 {
9973 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9974 /* In a boolean context, GCC will fold the inner COND_EXPR to
9975 1. So e.g. "if (isinf_sign(x))" would be folded to just
9976 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9977 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9978 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9979 tree tmp = NULL_TREE;
9980
9981 arg = builtin_save_expr (arg);
9982
9983 if (signbit_fn && isinf_fn)
9984 {
9985 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9986 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9987
9988 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9989 signbit_call, integer_zero_node);
9990 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9991 isinf_call, integer_zero_node);
9992
9993 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9994 integer_minus_one_node, integer_one_node);
9995 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9996 integer_zero_node);
9997 }
9998
9999 return tmp;
10000 }
10001
10002 case BUILT_IN_ISFINITE:
10003 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10004 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10005 return omit_one_operand (type, integer_one_node, arg);
10006
10007 if (TREE_CODE (arg) == REAL_CST)
10008 {
10009 r = TREE_REAL_CST (arg);
10010 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10011 }
10012
10013 return NULL_TREE;
10014
10015 case BUILT_IN_ISNAN:
10016 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10017 return omit_one_operand (type, integer_zero_node, arg);
10018
10019 if (TREE_CODE (arg) == REAL_CST)
10020 {
10021 r = TREE_REAL_CST (arg);
10022 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10023 }
10024
10025 arg = builtin_save_expr (arg);
10026 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
10027
10028 default:
10029 gcc_unreachable ();
10030 }
10031 }
10032
10033 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10034 This builtin will generate code to return the appropriate floating
10035 point classification depending on the value of the floating point
10036 number passed in. The possible return values must be supplied as
10037 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10038 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10039 one floating point argument which is "type generic". */
10040
10041 static tree
10042 fold_builtin_fpclassify (tree exp)
10043 {
10044 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10045 arg, type, res, tmp;
10046 enum machine_mode mode;
10047 REAL_VALUE_TYPE r;
10048 char buf[128];
10049
10050 /* Verify the required arguments in the original call. */
10051 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10052 INTEGER_TYPE, INTEGER_TYPE,
10053 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10054 return NULL_TREE;
10055
10056 fp_nan = CALL_EXPR_ARG (exp, 0);
10057 fp_infinite = CALL_EXPR_ARG (exp, 1);
10058 fp_normal = CALL_EXPR_ARG (exp, 2);
10059 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10060 fp_zero = CALL_EXPR_ARG (exp, 4);
10061 arg = CALL_EXPR_ARG (exp, 5);
10062 type = TREE_TYPE (arg);
10063 mode = TYPE_MODE (type);
10064 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10065
10066 /* fpclassify(x) ->
10067 isnan(x) ? FP_NAN :
10068 (fabs(x) == Inf ? FP_INFINITE :
10069 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10070 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10071
10072 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10073 build_real (type, dconst0));
10074 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10075
10076 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10077 real_from_string (&r, buf);
10078 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10079 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10080
10081 if (HONOR_INFINITIES (mode))
10082 {
10083 real_inf (&r);
10084 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10085 build_real (type, r));
10086 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10087 }
10088
10089 if (HONOR_NANS (mode))
10090 {
10091 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10092 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10093 }
10094
10095 return res;
10096 }
10097
10098 /* Fold a call to an unordered comparison function such as
10099 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10100 being called and ARG0 and ARG1 are the arguments for the call.
10101 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10102 the opposite of the desired result. UNORDERED_CODE is used
10103 for modes that can hold NaNs and ORDERED_CODE is used for
10104 the rest. */
10105
10106 static tree
10107 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10108 enum tree_code unordered_code,
10109 enum tree_code ordered_code)
10110 {
10111 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10112 enum tree_code code;
10113 tree type0, type1;
10114 enum tree_code code0, code1;
10115 tree cmp_type = NULL_TREE;
10116
10117 type0 = TREE_TYPE (arg0);
10118 type1 = TREE_TYPE (arg1);
10119
10120 code0 = TREE_CODE (type0);
10121 code1 = TREE_CODE (type1);
10122
10123 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10124 /* Choose the wider of two real types. */
10125 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10126 ? type0 : type1;
10127 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10128 cmp_type = type0;
10129 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10130 cmp_type = type1;
10131
10132 arg0 = fold_convert (cmp_type, arg0);
10133 arg1 = fold_convert (cmp_type, arg1);
10134
10135 if (unordered_code == UNORDERED_EXPR)
10136 {
10137 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10138 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10139 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10140 }
10141
10142 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10143 : ordered_code;
10144 return fold_build1 (TRUTH_NOT_EXPR, type,
10145 fold_build2 (code, type, arg0, arg1));
10146 }
10147
10148 /* Fold a call to built-in function FNDECL with 0 arguments.
10149 IGNORE is true if the result of the function call is ignored. This
10150 function returns NULL_TREE if no simplification was possible. */
10151
10152 static tree
10153 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10154 {
10155 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10156 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10157 switch (fcode)
10158 {
10159 CASE_FLT_FN (BUILT_IN_INF):
10160 case BUILT_IN_INFD32:
10161 case BUILT_IN_INFD64:
10162 case BUILT_IN_INFD128:
10163 return fold_builtin_inf (type, true);
10164
10165 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10166 return fold_builtin_inf (type, false);
10167
10168 case BUILT_IN_CLASSIFY_TYPE:
10169 return fold_builtin_classify_type (NULL_TREE);
10170
10171 default:
10172 break;
10173 }
10174 return NULL_TREE;
10175 }
10176
10177 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10178 IGNORE is true if the result of the function call is ignored. This
10179 function returns NULL_TREE if no simplification was possible. */
10180
10181 static tree
10182 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10183 {
10184 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10185 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10186 switch (fcode)
10187 {
10188
10189 case BUILT_IN_CONSTANT_P:
10190 {
10191 tree val = fold_builtin_constant_p (arg0);
10192
10193 /* Gimplification will pull the CALL_EXPR for the builtin out of
10194 an if condition. When not optimizing, we'll not CSE it back.
10195 To avoid link error types of regressions, return false now. */
10196 if (!val && !optimize)
10197 val = integer_zero_node;
10198
10199 return val;
10200 }
10201
10202 case BUILT_IN_CLASSIFY_TYPE:
10203 return fold_builtin_classify_type (arg0);
10204
10205 case BUILT_IN_STRLEN:
10206 return fold_builtin_strlen (arg0);
10207
10208 CASE_FLT_FN (BUILT_IN_FABS):
10209 return fold_builtin_fabs (arg0, type);
10210
10211 case BUILT_IN_ABS:
10212 case BUILT_IN_LABS:
10213 case BUILT_IN_LLABS:
10214 case BUILT_IN_IMAXABS:
10215 return fold_builtin_abs (arg0, type);
10216
10217 CASE_FLT_FN (BUILT_IN_CONJ):
10218 if (validate_arg (arg0, COMPLEX_TYPE))
10219 return fold_build1 (CONJ_EXPR, type, arg0);
10220 break;
10221
10222 CASE_FLT_FN (BUILT_IN_CREAL):
10223 if (validate_arg (arg0, COMPLEX_TYPE))
10224 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10225 break;
10226
10227 CASE_FLT_FN (BUILT_IN_CIMAG):
10228 if (validate_arg (arg0, COMPLEX_TYPE))
10229 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10230 break;
10231
10232 CASE_FLT_FN (BUILT_IN_CCOS):
10233 CASE_FLT_FN (BUILT_IN_CCOSH):
10234 /* These functions are "even", i.e. f(x) == f(-x). */
10235 if (validate_arg (arg0, COMPLEX_TYPE))
10236 {
10237 tree narg = fold_strip_sign_ops (arg0);
10238 if (narg)
10239 return build_call_expr (fndecl, 1, narg);
10240 }
10241 break;
10242
10243 CASE_FLT_FN (BUILT_IN_CABS):
10244 return fold_builtin_cabs (arg0, type, fndecl);
10245
10246 CASE_FLT_FN (BUILT_IN_CARG):
10247 return fold_builtin_carg (arg0, type);
10248
10249 CASE_FLT_FN (BUILT_IN_SQRT):
10250 return fold_builtin_sqrt (arg0, type);
10251
10252 CASE_FLT_FN (BUILT_IN_CBRT):
10253 return fold_builtin_cbrt (arg0, type);
10254
10255 CASE_FLT_FN (BUILT_IN_ASIN):
10256 if (validate_arg (arg0, REAL_TYPE))
10257 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10258 &dconstm1, &dconst1, true);
10259 break;
10260
10261 CASE_FLT_FN (BUILT_IN_ACOS):
10262 if (validate_arg (arg0, REAL_TYPE))
10263 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10264 &dconstm1, &dconst1, true);
10265 break;
10266
10267 CASE_FLT_FN (BUILT_IN_ATAN):
10268 if (validate_arg (arg0, REAL_TYPE))
10269 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10270 break;
10271
10272 CASE_FLT_FN (BUILT_IN_ASINH):
10273 if (validate_arg (arg0, REAL_TYPE))
10274 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10275 break;
10276
10277 CASE_FLT_FN (BUILT_IN_ACOSH):
10278 if (validate_arg (arg0, REAL_TYPE))
10279 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10280 &dconst1, NULL, true);
10281 break;
10282
10283 CASE_FLT_FN (BUILT_IN_ATANH):
10284 if (validate_arg (arg0, REAL_TYPE))
10285 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10286 &dconstm1, &dconst1, false);
10287 break;
10288
10289 CASE_FLT_FN (BUILT_IN_SIN):
10290 if (validate_arg (arg0, REAL_TYPE))
10291 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10292 break;
10293
10294 CASE_FLT_FN (BUILT_IN_COS):
10295 return fold_builtin_cos (arg0, type, fndecl);
10296 break;
10297
10298 CASE_FLT_FN (BUILT_IN_TAN):
10299 return fold_builtin_tan (arg0, type);
10300
10301 CASE_FLT_FN (BUILT_IN_CEXP):
10302 return fold_builtin_cexp (arg0, type);
10303
10304 CASE_FLT_FN (BUILT_IN_CEXPI):
10305 if (validate_arg (arg0, REAL_TYPE))
10306 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10307 break;
10308
10309 CASE_FLT_FN (BUILT_IN_SINH):
10310 if (validate_arg (arg0, REAL_TYPE))
10311 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10312 break;
10313
10314 CASE_FLT_FN (BUILT_IN_COSH):
10315 return fold_builtin_cosh (arg0, type, fndecl);
10316
10317 CASE_FLT_FN (BUILT_IN_TANH):
10318 if (validate_arg (arg0, REAL_TYPE))
10319 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10320 break;
10321
10322 CASE_FLT_FN (BUILT_IN_ERF):
10323 if (validate_arg (arg0, REAL_TYPE))
10324 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10325 break;
10326
10327 CASE_FLT_FN (BUILT_IN_ERFC):
10328 if (validate_arg (arg0, REAL_TYPE))
10329 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10330 break;
10331
10332 CASE_FLT_FN (BUILT_IN_TGAMMA):
10333 if (validate_arg (arg0, REAL_TYPE))
10334 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10335 break;
10336
10337 CASE_FLT_FN (BUILT_IN_EXP):
10338 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10339
10340 CASE_FLT_FN (BUILT_IN_EXP2):
10341 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10342
10343 CASE_FLT_FN (BUILT_IN_EXP10):
10344 CASE_FLT_FN (BUILT_IN_POW10):
10345 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10346
10347 CASE_FLT_FN (BUILT_IN_EXPM1):
10348 if (validate_arg (arg0, REAL_TYPE))
10349 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10350 break;
10351
10352 CASE_FLT_FN (BUILT_IN_LOG):
10353 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10354
10355 CASE_FLT_FN (BUILT_IN_LOG2):
10356 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10357
10358 CASE_FLT_FN (BUILT_IN_LOG10):
10359 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10360
10361 CASE_FLT_FN (BUILT_IN_LOG1P):
10362 if (validate_arg (arg0, REAL_TYPE))
10363 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10364 &dconstm1, NULL, false);
10365 break;
10366
10367 CASE_FLT_FN (BUILT_IN_J0):
10368 if (validate_arg (arg0, REAL_TYPE))
10369 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10370 NULL, NULL, 0);
10371 break;
10372
10373 CASE_FLT_FN (BUILT_IN_J1):
10374 if (validate_arg (arg0, REAL_TYPE))
10375 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10376 NULL, NULL, 0);
10377 break;
10378
10379 CASE_FLT_FN (BUILT_IN_Y0):
10380 if (validate_arg (arg0, REAL_TYPE))
10381 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10382 &dconst0, NULL, false);
10383 break;
10384
10385 CASE_FLT_FN (BUILT_IN_Y1):
10386 if (validate_arg (arg0, REAL_TYPE))
10387 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10388 &dconst0, NULL, false);
10389 break;
10390
10391 CASE_FLT_FN (BUILT_IN_NAN):
10392 case BUILT_IN_NAND32:
10393 case BUILT_IN_NAND64:
10394 case BUILT_IN_NAND128:
10395 return fold_builtin_nan (arg0, type, true);
10396
10397 CASE_FLT_FN (BUILT_IN_NANS):
10398 return fold_builtin_nan (arg0, type, false);
10399
10400 CASE_FLT_FN (BUILT_IN_FLOOR):
10401 return fold_builtin_floor (fndecl, arg0);
10402
10403 CASE_FLT_FN (BUILT_IN_CEIL):
10404 return fold_builtin_ceil (fndecl, arg0);
10405
10406 CASE_FLT_FN (BUILT_IN_TRUNC):
10407 return fold_builtin_trunc (fndecl, arg0);
10408
10409 CASE_FLT_FN (BUILT_IN_ROUND):
10410 return fold_builtin_round (fndecl, arg0);
10411
10412 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10413 CASE_FLT_FN (BUILT_IN_RINT):
10414 return fold_trunc_transparent_mathfn (fndecl, arg0);
10415
10416 CASE_FLT_FN (BUILT_IN_LCEIL):
10417 CASE_FLT_FN (BUILT_IN_LLCEIL):
10418 CASE_FLT_FN (BUILT_IN_LFLOOR):
10419 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10420 CASE_FLT_FN (BUILT_IN_LROUND):
10421 CASE_FLT_FN (BUILT_IN_LLROUND):
10422 return fold_builtin_int_roundingfn (fndecl, arg0);
10423
10424 CASE_FLT_FN (BUILT_IN_LRINT):
10425 CASE_FLT_FN (BUILT_IN_LLRINT):
10426 return fold_fixed_mathfn (fndecl, arg0);
10427
10428 case BUILT_IN_BSWAP32:
10429 case BUILT_IN_BSWAP64:
10430 return fold_builtin_bswap (fndecl, arg0);
10431
10432 CASE_INT_FN (BUILT_IN_FFS):
10433 CASE_INT_FN (BUILT_IN_CLZ):
10434 CASE_INT_FN (BUILT_IN_CTZ):
10435 CASE_INT_FN (BUILT_IN_POPCOUNT):
10436 CASE_INT_FN (BUILT_IN_PARITY):
10437 return fold_builtin_bitop (fndecl, arg0);
10438
10439 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10440 return fold_builtin_signbit (arg0, type);
10441
10442 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10443 return fold_builtin_significand (arg0, type);
10444
10445 CASE_FLT_FN (BUILT_IN_ILOGB):
10446 CASE_FLT_FN (BUILT_IN_LOGB):
10447 return fold_builtin_logb (arg0, type);
10448
10449 case BUILT_IN_ISASCII:
10450 return fold_builtin_isascii (arg0);
10451
10452 case BUILT_IN_TOASCII:
10453 return fold_builtin_toascii (arg0);
10454
10455 case BUILT_IN_ISDIGIT:
10456 return fold_builtin_isdigit (arg0);
10457
10458 CASE_FLT_FN (BUILT_IN_FINITE):
10459 case BUILT_IN_FINITED32:
10460 case BUILT_IN_FINITED64:
10461 case BUILT_IN_FINITED128:
10462 case BUILT_IN_ISFINITE:
10463 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10464
10465 CASE_FLT_FN (BUILT_IN_ISINF):
10466 case BUILT_IN_ISINFD32:
10467 case BUILT_IN_ISINFD64:
10468 case BUILT_IN_ISINFD128:
10469 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10470
10471 case BUILT_IN_ISINF_SIGN:
10472 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10473
10474 CASE_FLT_FN (BUILT_IN_ISNAN):
10475 case BUILT_IN_ISNAND32:
10476 case BUILT_IN_ISNAND64:
10477 case BUILT_IN_ISNAND128:
10478 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10479
10480 case BUILT_IN_PRINTF:
10481 case BUILT_IN_PRINTF_UNLOCKED:
10482 case BUILT_IN_VPRINTF:
10483 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10484
10485 default:
10486 break;
10487 }
10488
10489 return NULL_TREE;
10490
10491 }
10492
10493 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10494 IGNORE is true if the result of the function call is ignored. This
10495 function returns NULL_TREE if no simplification was possible. */
10496
10497 static tree
10498 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10499 {
10500 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10501 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10502
10503 switch (fcode)
10504 {
10505 CASE_FLT_FN (BUILT_IN_JN):
10506 if (validate_arg (arg0, INTEGER_TYPE)
10507 && validate_arg (arg1, REAL_TYPE))
10508 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10509 break;
10510
10511 CASE_FLT_FN (BUILT_IN_YN):
10512 if (validate_arg (arg0, INTEGER_TYPE)
10513 && validate_arg (arg1, REAL_TYPE))
10514 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10515 &dconst0, false);
10516 break;
10517
10518 CASE_FLT_FN (BUILT_IN_DREM):
10519 CASE_FLT_FN (BUILT_IN_REMAINDER):
10520 if (validate_arg (arg0, REAL_TYPE)
10521 && validate_arg(arg1, REAL_TYPE))
10522 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10523 break;
10524
10525 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10526 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10527 if (validate_arg (arg0, REAL_TYPE)
10528 && validate_arg(arg1, POINTER_TYPE))
10529 return do_mpfr_lgamma_r (arg0, arg1, type);
10530 break;
10531
10532 CASE_FLT_FN (BUILT_IN_ATAN2):
10533 if (validate_arg (arg0, REAL_TYPE)
10534 && validate_arg(arg1, REAL_TYPE))
10535 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10536 break;
10537
10538 CASE_FLT_FN (BUILT_IN_FDIM):
10539 if (validate_arg (arg0, REAL_TYPE)
10540 && validate_arg(arg1, REAL_TYPE))
10541 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10542 break;
10543
10544 CASE_FLT_FN (BUILT_IN_HYPOT):
10545 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10546
10547 CASE_FLT_FN (BUILT_IN_LDEXP):
10548 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10549 CASE_FLT_FN (BUILT_IN_SCALBN):
10550 CASE_FLT_FN (BUILT_IN_SCALBLN):
10551 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10552
10553 CASE_FLT_FN (BUILT_IN_FREXP):
10554 return fold_builtin_frexp (arg0, arg1, type);
10555
10556 CASE_FLT_FN (BUILT_IN_MODF):
10557 return fold_builtin_modf (arg0, arg1, type);
10558
10559 case BUILT_IN_BZERO:
10560 return fold_builtin_bzero (arg0, arg1, ignore);
10561
10562 case BUILT_IN_FPUTS:
10563 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10564
10565 case BUILT_IN_FPUTS_UNLOCKED:
10566 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10567
10568 case BUILT_IN_STRSTR:
10569 return fold_builtin_strstr (arg0, arg1, type);
10570
10571 case BUILT_IN_STRCAT:
10572 return fold_builtin_strcat (arg0, arg1);
10573
10574 case BUILT_IN_STRSPN:
10575 return fold_builtin_strspn (arg0, arg1);
10576
10577 case BUILT_IN_STRCSPN:
10578 return fold_builtin_strcspn (arg0, arg1);
10579
10580 case BUILT_IN_STRCHR:
10581 case BUILT_IN_INDEX:
10582 return fold_builtin_strchr (arg0, arg1, type);
10583
10584 case BUILT_IN_STRRCHR:
10585 case BUILT_IN_RINDEX:
10586 return fold_builtin_strrchr (arg0, arg1, type);
10587
10588 case BUILT_IN_STRCPY:
10589 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10590
10591 case BUILT_IN_STPCPY:
10592 if (ignore)
10593 {
10594 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10595 if (!fn)
10596 break;
10597
10598 return build_call_expr (fn, 2, arg0, arg1);
10599 }
10600 break;
10601
10602 case BUILT_IN_STRCMP:
10603 return fold_builtin_strcmp (arg0, arg1);
10604
10605 case BUILT_IN_STRPBRK:
10606 return fold_builtin_strpbrk (arg0, arg1, type);
10607
10608 case BUILT_IN_EXPECT:
10609 return fold_builtin_expect (arg0, arg1);
10610
10611 CASE_FLT_FN (BUILT_IN_POW):
10612 return fold_builtin_pow (fndecl, arg0, arg1, type);
10613
10614 CASE_FLT_FN (BUILT_IN_POWI):
10615 return fold_builtin_powi (fndecl, arg0, arg1, type);
10616
10617 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10618 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10619
10620 CASE_FLT_FN (BUILT_IN_FMIN):
10621 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10622
10623 CASE_FLT_FN (BUILT_IN_FMAX):
10624 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10625
10626 case BUILT_IN_ISGREATER:
10627 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10628 case BUILT_IN_ISGREATEREQUAL:
10629 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10630 case BUILT_IN_ISLESS:
10631 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10632 case BUILT_IN_ISLESSEQUAL:
10633 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10634 case BUILT_IN_ISLESSGREATER:
10635 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10636 case BUILT_IN_ISUNORDERED:
10637 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10638 NOP_EXPR);
10639
10640 /* We do the folding for va_start in the expander. */
10641 case BUILT_IN_VA_START:
10642 break;
10643
10644 case BUILT_IN_SPRINTF:
10645 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10646
10647 case BUILT_IN_OBJECT_SIZE:
10648 return fold_builtin_object_size (arg0, arg1);
10649
10650 case BUILT_IN_PRINTF:
10651 case BUILT_IN_PRINTF_UNLOCKED:
10652 case BUILT_IN_VPRINTF:
10653 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10654
10655 case BUILT_IN_PRINTF_CHK:
10656 case BUILT_IN_VPRINTF_CHK:
10657 if (!validate_arg (arg0, INTEGER_TYPE)
10658 || TREE_SIDE_EFFECTS (arg0))
10659 return NULL_TREE;
10660 else
10661 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10662 break;
10663
10664 case BUILT_IN_FPRINTF:
10665 case BUILT_IN_FPRINTF_UNLOCKED:
10666 case BUILT_IN_VFPRINTF:
10667 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10668 ignore, fcode);
10669
10670 default:
10671 break;
10672 }
10673 return NULL_TREE;
10674 }
10675
10676 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10677 and ARG2. IGNORE is true if the result of the function call is ignored.
10678 This function returns NULL_TREE if no simplification was possible. */
10679
10680 static tree
10681 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10682 {
10683 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10684 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10685 switch (fcode)
10686 {
10687
10688 CASE_FLT_FN (BUILT_IN_SINCOS):
10689 return fold_builtin_sincos (arg0, arg1, arg2);
10690
10691 CASE_FLT_FN (BUILT_IN_FMA):
10692 if (validate_arg (arg0, REAL_TYPE)
10693 && validate_arg(arg1, REAL_TYPE)
10694 && validate_arg(arg2, REAL_TYPE))
10695 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10696 break;
10697
10698 CASE_FLT_FN (BUILT_IN_REMQUO):
10699 if (validate_arg (arg0, REAL_TYPE)
10700 && validate_arg(arg1, REAL_TYPE)
10701 && validate_arg(arg2, POINTER_TYPE))
10702 return do_mpfr_remquo (arg0, arg1, arg2);
10703 break;
10704
10705 case BUILT_IN_MEMSET:
10706 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10707
10708 case BUILT_IN_BCOPY:
10709 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10710
10711 case BUILT_IN_MEMCPY:
10712 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10713
10714 case BUILT_IN_MEMPCPY:
10715 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10716
10717 case BUILT_IN_MEMMOVE:
10718 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10719
10720 case BUILT_IN_STRNCAT:
10721 return fold_builtin_strncat (arg0, arg1, arg2);
10722
10723 case BUILT_IN_STRNCPY:
10724 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10725
10726 case BUILT_IN_STRNCMP:
10727 return fold_builtin_strncmp (arg0, arg1, arg2);
10728
10729 case BUILT_IN_MEMCHR:
10730 return fold_builtin_memchr (arg0, arg1, arg2, type);
10731
10732 case BUILT_IN_BCMP:
10733 case BUILT_IN_MEMCMP:
10734 return fold_builtin_memcmp (arg0, arg1, arg2);;
10735
10736 case BUILT_IN_SPRINTF:
10737 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10738
10739 case BUILT_IN_STRCPY_CHK:
10740 case BUILT_IN_STPCPY_CHK:
10741 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10742 ignore, fcode);
10743
10744 case BUILT_IN_STRCAT_CHK:
10745 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10746
10747 case BUILT_IN_PRINTF_CHK:
10748 case BUILT_IN_VPRINTF_CHK:
10749 if (!validate_arg (arg0, INTEGER_TYPE)
10750 || TREE_SIDE_EFFECTS (arg0))
10751 return NULL_TREE;
10752 else
10753 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10754 break;
10755
10756 case BUILT_IN_FPRINTF:
10757 case BUILT_IN_FPRINTF_UNLOCKED:
10758 case BUILT_IN_VFPRINTF:
10759 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10760
10761 case BUILT_IN_FPRINTF_CHK:
10762 case BUILT_IN_VFPRINTF_CHK:
10763 if (!validate_arg (arg1, INTEGER_TYPE)
10764 || TREE_SIDE_EFFECTS (arg1))
10765 return NULL_TREE;
10766 else
10767 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10768 ignore, fcode);
10769
10770 default:
10771 break;
10772 }
10773 return NULL_TREE;
10774 }
10775
10776 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10777 ARG2, and ARG3. IGNORE is true if the result of the function call is
10778 ignored. This function returns NULL_TREE if no simplification was
10779 possible. */
10780
10781 static tree
10782 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10783 bool ignore)
10784 {
10785 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10786
10787 switch (fcode)
10788 {
10789 case BUILT_IN_MEMCPY_CHK:
10790 case BUILT_IN_MEMPCPY_CHK:
10791 case BUILT_IN_MEMMOVE_CHK:
10792 case BUILT_IN_MEMSET_CHK:
10793 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10794 NULL_TREE, ignore,
10795 DECL_FUNCTION_CODE (fndecl));
10796
10797 case BUILT_IN_STRNCPY_CHK:
10798 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10799
10800 case BUILT_IN_STRNCAT_CHK:
10801 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10802
10803 case BUILT_IN_FPRINTF_CHK:
10804 case BUILT_IN_VFPRINTF_CHK:
10805 if (!validate_arg (arg1, INTEGER_TYPE)
10806 || TREE_SIDE_EFFECTS (arg1))
10807 return NULL_TREE;
10808 else
10809 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10810 ignore, fcode);
10811 break;
10812
10813 default:
10814 break;
10815 }
10816 return NULL_TREE;
10817 }
10818
10819 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10820 arguments, where NARGS <= 4. IGNORE is true if the result of the
10821 function call is ignored. This function returns NULL_TREE if no
10822 simplification was possible. Note that this only folds builtins with
10823 fixed argument patterns. Foldings that do varargs-to-varargs
10824 transformations, or that match calls with more than 4 arguments,
10825 need to be handled with fold_builtin_varargs instead. */
10826
10827 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10828
10829 static tree
10830 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10831 {
10832 tree ret = NULL_TREE;
10833
10834 switch (nargs)
10835 {
10836 case 0:
10837 ret = fold_builtin_0 (fndecl, ignore);
10838 break;
10839 case 1:
10840 ret = fold_builtin_1 (fndecl, args[0], ignore);
10841 break;
10842 case 2:
10843 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10844 break;
10845 case 3:
10846 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10847 break;
10848 case 4:
10849 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10850 ignore);
10851 break;
10852 default:
10853 break;
10854 }
10855 if (ret)
10856 {
10857 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10858 TREE_NO_WARNING (ret) = 1;
10859 return ret;
10860 }
10861 return NULL_TREE;
10862 }
10863
10864 /* Builtins with folding operations that operate on "..." arguments
10865 need special handling; we need to store the arguments in a convenient
10866 data structure before attempting any folding. Fortunately there are
10867 only a few builtins that fall into this category. FNDECL is the
10868 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10869 result of the function call is ignored. */
10870
10871 static tree
10872 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10873 {
10874 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10875 tree ret = NULL_TREE;
10876
10877 switch (fcode)
10878 {
10879 case BUILT_IN_SPRINTF_CHK:
10880 case BUILT_IN_VSPRINTF_CHK:
10881 ret = fold_builtin_sprintf_chk (exp, fcode);
10882 break;
10883
10884 case BUILT_IN_SNPRINTF_CHK:
10885 case BUILT_IN_VSNPRINTF_CHK:
10886 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10887 break;
10888
10889 case BUILT_IN_FPCLASSIFY:
10890 ret = fold_builtin_fpclassify (exp);
10891 break;
10892
10893 default:
10894 break;
10895 }
10896 if (ret)
10897 {
10898 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10899 TREE_NO_WARNING (ret) = 1;
10900 return ret;
10901 }
10902 return NULL_TREE;
10903 }
10904
10905 /* Return true if FNDECL shouldn't be folded right now.
10906 If a built-in function has an inline attribute always_inline
10907 wrapper, defer folding it after always_inline functions have
10908 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10909 might not be performed. */
10910
10911 static bool
10912 avoid_folding_inline_builtin (tree fndecl)
10913 {
10914 return (DECL_DECLARED_INLINE_P (fndecl)
10915 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10916 && cfun
10917 && !cfun->always_inline_functions_inlined
10918 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10919 }
10920
10921 /* A wrapper function for builtin folding that prevents warnings for
10922 "statement without effect" and the like, caused by removing the
10923 call node earlier than the warning is generated. */
10924
10925 tree
10926 fold_call_expr (tree exp, bool ignore)
10927 {
10928 tree ret = NULL_TREE;
10929 tree fndecl = get_callee_fndecl (exp);
10930 if (fndecl
10931 && TREE_CODE (fndecl) == FUNCTION_DECL
10932 && DECL_BUILT_IN (fndecl)
10933 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10934 yet. Defer folding until we see all the arguments
10935 (after inlining). */
10936 && !CALL_EXPR_VA_ARG_PACK (exp))
10937 {
10938 int nargs = call_expr_nargs (exp);
10939
10940 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10941 instead last argument is __builtin_va_arg_pack (). Defer folding
10942 even in that case, until arguments are finalized. */
10943 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10944 {
10945 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10946 if (fndecl2
10947 && TREE_CODE (fndecl2) == FUNCTION_DECL
10948 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10949 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10950 return NULL_TREE;
10951 }
10952
10953 if (avoid_folding_inline_builtin (fndecl))
10954 return NULL_TREE;
10955
10956 /* FIXME: Don't use a list in this interface. */
10957 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10958 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10959 else
10960 {
10961 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10962 {
10963 tree *args = CALL_EXPR_ARGP (exp);
10964 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10965 }
10966 if (!ret)
10967 ret = fold_builtin_varargs (fndecl, exp, ignore);
10968 if (ret)
10969 {
10970 /* Propagate location information from original call to
10971 expansion of builtin. Otherwise things like
10972 maybe_emit_chk_warning, that operate on the expansion
10973 of a builtin, will use the wrong location information. */
10974 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10975 {
10976 tree realret = ret;
10977 if (TREE_CODE (ret) == NOP_EXPR)
10978 realret = TREE_OPERAND (ret, 0);
10979 if (CAN_HAVE_LOCATION_P (realret)
10980 && !EXPR_HAS_LOCATION (realret))
10981 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10982 }
10983 return ret;
10984 }
10985 }
10986 }
10987 return NULL_TREE;
10988 }
10989
10990 /* Conveniently construct a function call expression. FNDECL names the
10991 function to be called and ARGLIST is a TREE_LIST of arguments. */
10992
10993 tree
10994 build_function_call_expr (tree fndecl, tree arglist)
10995 {
10996 tree fntype = TREE_TYPE (fndecl);
10997 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10998 int n = list_length (arglist);
10999 tree *argarray = (tree *) alloca (n * sizeof (tree));
11000 int i;
11001
11002 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11003 argarray[i] = TREE_VALUE (arglist);
11004 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11005 }
11006
11007 /* Conveniently construct a function call expression. FNDECL names the
11008 function to be called, N is the number of arguments, and the "..."
11009 parameters are the argument expressions. */
11010
11011 tree
11012 build_call_expr (tree fndecl, int n, ...)
11013 {
11014 va_list ap;
11015 tree fntype = TREE_TYPE (fndecl);
11016 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11017 tree *argarray = (tree *) alloca (n * sizeof (tree));
11018 int i;
11019
11020 va_start (ap, n);
11021 for (i = 0; i < n; i++)
11022 argarray[i] = va_arg (ap, tree);
11023 va_end (ap);
11024 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11025 }
11026
11027 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11028 N arguments are passed in the array ARGARRAY. */
11029
11030 tree
11031 fold_builtin_call_array (tree type,
11032 tree fn,
11033 int n,
11034 tree *argarray)
11035 {
11036 tree ret = NULL_TREE;
11037 int i;
11038 tree exp;
11039
11040 if (TREE_CODE (fn) == ADDR_EXPR)
11041 {
11042 tree fndecl = TREE_OPERAND (fn, 0);
11043 if (TREE_CODE (fndecl) == FUNCTION_DECL
11044 && DECL_BUILT_IN (fndecl))
11045 {
11046 /* If last argument is __builtin_va_arg_pack (), arguments to this
11047 function are not finalized yet. Defer folding until they are. */
11048 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11049 {
11050 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11051 if (fndecl2
11052 && TREE_CODE (fndecl2) == FUNCTION_DECL
11053 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11054 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11055 return build_call_array (type, fn, n, argarray);
11056 }
11057 if (avoid_folding_inline_builtin (fndecl))
11058 return build_call_array (type, fn, n, argarray);
11059 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11060 {
11061 tree arglist = NULL_TREE;
11062 for (i = n - 1; i >= 0; i--)
11063 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11064 ret = targetm.fold_builtin (fndecl, arglist, false);
11065 if (ret)
11066 return ret;
11067 return build_call_array (type, fn, n, argarray);
11068 }
11069 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11070 {
11071 /* First try the transformations that don't require consing up
11072 an exp. */
11073 ret = fold_builtin_n (fndecl, argarray, n, false);
11074 if (ret)
11075 return ret;
11076 }
11077
11078 /* If we got this far, we need to build an exp. */
11079 exp = build_call_array (type, fn, n, argarray);
11080 ret = fold_builtin_varargs (fndecl, exp, false);
11081 return ret ? ret : exp;
11082 }
11083 }
11084
11085 return build_call_array (type, fn, n, argarray);
11086 }
11087
11088 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11089 along with N new arguments specified as the "..." parameters. SKIP
11090 is the number of arguments in EXP to be omitted. This function is used
11091 to do varargs-to-varargs transformations. */
11092
11093 static tree
11094 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11095 {
11096 int oldnargs = call_expr_nargs (exp);
11097 int nargs = oldnargs - skip + n;
11098 tree fntype = TREE_TYPE (fndecl);
11099 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11100 tree *buffer;
11101
11102 if (n > 0)
11103 {
11104 int i, j;
11105 va_list ap;
11106
11107 buffer = XALLOCAVEC (tree, nargs);
11108 va_start (ap, n);
11109 for (i = 0; i < n; i++)
11110 buffer[i] = va_arg (ap, tree);
11111 va_end (ap);
11112 for (j = skip; j < oldnargs; j++, i++)
11113 buffer[i] = CALL_EXPR_ARG (exp, j);
11114 }
11115 else
11116 buffer = CALL_EXPR_ARGP (exp) + skip;
11117
11118 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11119 }
11120
11121 /* Validate a single argument ARG against a tree code CODE representing
11122 a type. */
11123
11124 static bool
11125 validate_arg (const_tree arg, enum tree_code code)
11126 {
11127 if (!arg)
11128 return false;
11129 else if (code == POINTER_TYPE)
11130 return POINTER_TYPE_P (TREE_TYPE (arg));
11131 else if (code == INTEGER_TYPE)
11132 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11133 return code == TREE_CODE (TREE_TYPE (arg));
11134 }
11135
11136 /* This function validates the types of a function call argument list
11137 against a specified list of tree_codes. If the last specifier is a 0,
11138 that represents an ellipses, otherwise the last specifier must be a
11139 VOID_TYPE.
11140
11141 This is the GIMPLE version of validate_arglist. Eventually we want to
11142 completely convert builtins.c to work from GIMPLEs and the tree based
11143 validate_arglist will then be removed. */
11144
11145 bool
11146 validate_gimple_arglist (const_gimple call, ...)
11147 {
11148 enum tree_code code;
11149 bool res = 0;
11150 va_list ap;
11151 const_tree arg;
11152 size_t i;
11153
11154 va_start (ap, call);
11155 i = 0;
11156
11157 do
11158 {
11159 code = va_arg (ap, enum tree_code);
11160 switch (code)
11161 {
11162 case 0:
11163 /* This signifies an ellipses, any further arguments are all ok. */
11164 res = true;
11165 goto end;
11166 case VOID_TYPE:
11167 /* This signifies an endlink, if no arguments remain, return
11168 true, otherwise return false. */
11169 res = (i == gimple_call_num_args (call));
11170 goto end;
11171 default:
11172 /* If no parameters remain or the parameter's code does not
11173 match the specified code, return false. Otherwise continue
11174 checking any remaining arguments. */
11175 arg = gimple_call_arg (call, i++);
11176 if (!validate_arg (arg, code))
11177 goto end;
11178 break;
11179 }
11180 }
11181 while (1);
11182
11183 /* We need gotos here since we can only have one VA_CLOSE in a
11184 function. */
11185 end: ;
11186 va_end (ap);
11187
11188 return res;
11189 }
11190
11191 /* This function validates the types of a function call argument list
11192 against a specified list of tree_codes. If the last specifier is a 0,
11193 that represents an ellipses, otherwise the last specifier must be a
11194 VOID_TYPE. */
11195
11196 bool
11197 validate_arglist (const_tree callexpr, ...)
11198 {
11199 enum tree_code code;
11200 bool res = 0;
11201 va_list ap;
11202 const_call_expr_arg_iterator iter;
11203 const_tree arg;
11204
11205 va_start (ap, callexpr);
11206 init_const_call_expr_arg_iterator (callexpr, &iter);
11207
11208 do
11209 {
11210 code = va_arg (ap, enum tree_code);
11211 switch (code)
11212 {
11213 case 0:
11214 /* This signifies an ellipses, any further arguments are all ok. */
11215 res = true;
11216 goto end;
11217 case VOID_TYPE:
11218 /* This signifies an endlink, if no arguments remain, return
11219 true, otherwise return false. */
11220 res = !more_const_call_expr_args_p (&iter);
11221 goto end;
11222 default:
11223 /* If no parameters remain or the parameter's code does not
11224 match the specified code, return false. Otherwise continue
11225 checking any remaining arguments. */
11226 arg = next_const_call_expr_arg (&iter);
11227 if (!validate_arg (arg, code))
11228 goto end;
11229 break;
11230 }
11231 }
11232 while (1);
11233
11234 /* We need gotos here since we can only have one VA_CLOSE in a
11235 function. */
11236 end: ;
11237 va_end (ap);
11238
11239 return res;
11240 }
11241
11242 /* Default target-specific builtin expander that does nothing. */
11243
11244 rtx
11245 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11246 rtx target ATTRIBUTE_UNUSED,
11247 rtx subtarget ATTRIBUTE_UNUSED,
11248 enum machine_mode mode ATTRIBUTE_UNUSED,
11249 int ignore ATTRIBUTE_UNUSED)
11250 {
11251 return NULL_RTX;
11252 }
11253
11254 /* Returns true is EXP represents data that would potentially reside
11255 in a readonly section. */
11256
11257 static bool
11258 readonly_data_expr (tree exp)
11259 {
11260 STRIP_NOPS (exp);
11261
11262 if (TREE_CODE (exp) != ADDR_EXPR)
11263 return false;
11264
11265 exp = get_base_address (TREE_OPERAND (exp, 0));
11266 if (!exp)
11267 return false;
11268
11269 /* Make sure we call decl_readonly_section only for trees it
11270 can handle (since it returns true for everything it doesn't
11271 understand). */
11272 if (TREE_CODE (exp) == STRING_CST
11273 || TREE_CODE (exp) == CONSTRUCTOR
11274 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11275 return decl_readonly_section (exp, 0);
11276 else
11277 return false;
11278 }
11279
11280 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11281 to the call, and TYPE is its return type.
11282
11283 Return NULL_TREE if no simplification was possible, otherwise return the
11284 simplified form of the call as a tree.
11285
11286 The simplified form may be a constant or other expression which
11287 computes the same value, but in a more efficient manner (including
11288 calls to other builtin functions).
11289
11290 The call may contain arguments which need to be evaluated, but
11291 which are not useful to determine the result of the call. In
11292 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11293 COMPOUND_EXPR will be an argument which must be evaluated.
11294 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11295 COMPOUND_EXPR in the chain will contain the tree for the simplified
11296 form of the builtin function call. */
11297
11298 static tree
11299 fold_builtin_strstr (tree s1, tree s2, tree type)
11300 {
11301 if (!validate_arg (s1, POINTER_TYPE)
11302 || !validate_arg (s2, POINTER_TYPE))
11303 return NULL_TREE;
11304 else
11305 {
11306 tree fn;
11307 const char *p1, *p2;
11308
11309 p2 = c_getstr (s2);
11310 if (p2 == NULL)
11311 return NULL_TREE;
11312
11313 p1 = c_getstr (s1);
11314 if (p1 != NULL)
11315 {
11316 const char *r = strstr (p1, p2);
11317 tree tem;
11318
11319 if (r == NULL)
11320 return build_int_cst (TREE_TYPE (s1), 0);
11321
11322 /* Return an offset into the constant string argument. */
11323 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11324 s1, size_int (r - p1));
11325 return fold_convert (type, tem);
11326 }
11327
11328 /* The argument is const char *, and the result is char *, so we need
11329 a type conversion here to avoid a warning. */
11330 if (p2[0] == '\0')
11331 return fold_convert (type, s1);
11332
11333 if (p2[1] != '\0')
11334 return NULL_TREE;
11335
11336 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11337 if (!fn)
11338 return NULL_TREE;
11339
11340 /* New argument list transforming strstr(s1, s2) to
11341 strchr(s1, s2[0]). */
11342 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11343 }
11344 }
11345
11346 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11347 the call, and TYPE is its return type.
11348
11349 Return NULL_TREE if no simplification was possible, otherwise return the
11350 simplified form of the call as a tree.
11351
11352 The simplified form may be a constant or other expression which
11353 computes the same value, but in a more efficient manner (including
11354 calls to other builtin functions).
11355
11356 The call may contain arguments which need to be evaluated, but
11357 which are not useful to determine the result of the call. In
11358 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11359 COMPOUND_EXPR will be an argument which must be evaluated.
11360 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11361 COMPOUND_EXPR in the chain will contain the tree for the simplified
11362 form of the builtin function call. */
11363
11364 static tree
11365 fold_builtin_strchr (tree s1, tree s2, tree type)
11366 {
11367 if (!validate_arg (s1, POINTER_TYPE)
11368 || !validate_arg (s2, INTEGER_TYPE))
11369 return NULL_TREE;
11370 else
11371 {
11372 const char *p1;
11373
11374 if (TREE_CODE (s2) != INTEGER_CST)
11375 return NULL_TREE;
11376
11377 p1 = c_getstr (s1);
11378 if (p1 != NULL)
11379 {
11380 char c;
11381 const char *r;
11382 tree tem;
11383
11384 if (target_char_cast (s2, &c))
11385 return NULL_TREE;
11386
11387 r = strchr (p1, c);
11388
11389 if (r == NULL)
11390 return build_int_cst (TREE_TYPE (s1), 0);
11391
11392 /* Return an offset into the constant string argument. */
11393 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11394 s1, size_int (r - p1));
11395 return fold_convert (type, tem);
11396 }
11397 return NULL_TREE;
11398 }
11399 }
11400
11401 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11402 the call, and TYPE is its return type.
11403
11404 Return NULL_TREE if no simplification was possible, otherwise return the
11405 simplified form of the call as a tree.
11406
11407 The simplified form may be a constant or other expression which
11408 computes the same value, but in a more efficient manner (including
11409 calls to other builtin functions).
11410
11411 The call may contain arguments which need to be evaluated, but
11412 which are not useful to determine the result of the call. In
11413 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11414 COMPOUND_EXPR will be an argument which must be evaluated.
11415 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11416 COMPOUND_EXPR in the chain will contain the tree for the simplified
11417 form of the builtin function call. */
11418
11419 static tree
11420 fold_builtin_strrchr (tree s1, tree s2, tree type)
11421 {
11422 if (!validate_arg (s1, POINTER_TYPE)
11423 || !validate_arg (s2, INTEGER_TYPE))
11424 return NULL_TREE;
11425 else
11426 {
11427 tree fn;
11428 const char *p1;
11429
11430 if (TREE_CODE (s2) != INTEGER_CST)
11431 return NULL_TREE;
11432
11433 p1 = c_getstr (s1);
11434 if (p1 != NULL)
11435 {
11436 char c;
11437 const char *r;
11438 tree tem;
11439
11440 if (target_char_cast (s2, &c))
11441 return NULL_TREE;
11442
11443 r = strrchr (p1, c);
11444
11445 if (r == NULL)
11446 return build_int_cst (TREE_TYPE (s1), 0);
11447
11448 /* Return an offset into the constant string argument. */
11449 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11450 s1, size_int (r - p1));
11451 return fold_convert (type, tem);
11452 }
11453
11454 if (! integer_zerop (s2))
11455 return NULL_TREE;
11456
11457 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11458 if (!fn)
11459 return NULL_TREE;
11460
11461 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11462 return build_call_expr (fn, 2, s1, s2);
11463 }
11464 }
11465
11466 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11467 to the call, and TYPE is its return type.
11468
11469 Return NULL_TREE if no simplification was possible, otherwise return the
11470 simplified form of the call as a tree.
11471
11472 The simplified form may be a constant or other expression which
11473 computes the same value, but in a more efficient manner (including
11474 calls to other builtin functions).
11475
11476 The call may contain arguments which need to be evaluated, but
11477 which are not useful to determine the result of the call. In
11478 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11479 COMPOUND_EXPR will be an argument which must be evaluated.
11480 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11481 COMPOUND_EXPR in the chain will contain the tree for the simplified
11482 form of the builtin function call. */
11483
11484 static tree
11485 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11486 {
11487 if (!validate_arg (s1, POINTER_TYPE)
11488 || !validate_arg (s2, POINTER_TYPE))
11489 return NULL_TREE;
11490 else
11491 {
11492 tree fn;
11493 const char *p1, *p2;
11494
11495 p2 = c_getstr (s2);
11496 if (p2 == NULL)
11497 return NULL_TREE;
11498
11499 p1 = c_getstr (s1);
11500 if (p1 != NULL)
11501 {
11502 const char *r = strpbrk (p1, p2);
11503 tree tem;
11504
11505 if (r == NULL)
11506 return build_int_cst (TREE_TYPE (s1), 0);
11507
11508 /* Return an offset into the constant string argument. */
11509 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11510 s1, size_int (r - p1));
11511 return fold_convert (type, tem);
11512 }
11513
11514 if (p2[0] == '\0')
11515 /* strpbrk(x, "") == NULL.
11516 Evaluate and ignore s1 in case it had side-effects. */
11517 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11518
11519 if (p2[1] != '\0')
11520 return NULL_TREE; /* Really call strpbrk. */
11521
11522 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11523 if (!fn)
11524 return NULL_TREE;
11525
11526 /* New argument list transforming strpbrk(s1, s2) to
11527 strchr(s1, s2[0]). */
11528 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11529 }
11530 }
11531
11532 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11533 to the call.
11534
11535 Return NULL_TREE if no simplification was possible, otherwise return the
11536 simplified form of the call as a tree.
11537
11538 The simplified form may be a constant or other expression which
11539 computes the same value, but in a more efficient manner (including
11540 calls to other builtin functions).
11541
11542 The call may contain arguments which need to be evaluated, but
11543 which are not useful to determine the result of the call. In
11544 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11545 COMPOUND_EXPR will be an argument which must be evaluated.
11546 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11547 COMPOUND_EXPR in the chain will contain the tree for the simplified
11548 form of the builtin function call. */
11549
11550 static tree
11551 fold_builtin_strcat (tree dst, tree src)
11552 {
11553 if (!validate_arg (dst, POINTER_TYPE)
11554 || !validate_arg (src, POINTER_TYPE))
11555 return NULL_TREE;
11556 else
11557 {
11558 const char *p = c_getstr (src);
11559
11560 /* If the string length is zero, return the dst parameter. */
11561 if (p && *p == '\0')
11562 return dst;
11563
11564 return NULL_TREE;
11565 }
11566 }
11567
11568 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11569 arguments to the call.
11570
11571 Return NULL_TREE if no simplification was possible, otherwise return the
11572 simplified form of the call as a tree.
11573
11574 The simplified form may be a constant or other expression which
11575 computes the same value, but in a more efficient manner (including
11576 calls to other builtin functions).
11577
11578 The call may contain arguments which need to be evaluated, but
11579 which are not useful to determine the result of the call. In
11580 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11581 COMPOUND_EXPR will be an argument which must be evaluated.
11582 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11583 COMPOUND_EXPR in the chain will contain the tree for the simplified
11584 form of the builtin function call. */
11585
11586 static tree
11587 fold_builtin_strncat (tree dst, tree src, tree len)
11588 {
11589 if (!validate_arg (dst, POINTER_TYPE)
11590 || !validate_arg (src, POINTER_TYPE)
11591 || !validate_arg (len, INTEGER_TYPE))
11592 return NULL_TREE;
11593 else
11594 {
11595 const char *p = c_getstr (src);
11596
11597 /* If the requested length is zero, or the src parameter string
11598 length is zero, return the dst parameter. */
11599 if (integer_zerop (len) || (p && *p == '\0'))
11600 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11601
11602 /* If the requested len is greater than or equal to the string
11603 length, call strcat. */
11604 if (TREE_CODE (len) == INTEGER_CST && p
11605 && compare_tree_int (len, strlen (p)) >= 0)
11606 {
11607 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11608
11609 /* If the replacement _DECL isn't initialized, don't do the
11610 transformation. */
11611 if (!fn)
11612 return NULL_TREE;
11613
11614 return build_call_expr (fn, 2, dst, src);
11615 }
11616 return NULL_TREE;
11617 }
11618 }
11619
11620 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11621 to the call.
11622
11623 Return NULL_TREE if no simplification was possible, otherwise return the
11624 simplified form of the call as a tree.
11625
11626 The simplified form may be a constant or other expression which
11627 computes the same value, but in a more efficient manner (including
11628 calls to other builtin functions).
11629
11630 The call may contain arguments which need to be evaluated, but
11631 which are not useful to determine the result of the call. In
11632 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11633 COMPOUND_EXPR will be an argument which must be evaluated.
11634 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11635 COMPOUND_EXPR in the chain will contain the tree for the simplified
11636 form of the builtin function call. */
11637
11638 static tree
11639 fold_builtin_strspn (tree s1, tree s2)
11640 {
11641 if (!validate_arg (s1, POINTER_TYPE)
11642 || !validate_arg (s2, POINTER_TYPE))
11643 return NULL_TREE;
11644 else
11645 {
11646 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11647
11648 /* If both arguments are constants, evaluate at compile-time. */
11649 if (p1 && p2)
11650 {
11651 const size_t r = strspn (p1, p2);
11652 return size_int (r);
11653 }
11654
11655 /* If either argument is "", return NULL_TREE. */
11656 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11657 /* Evaluate and ignore both arguments in case either one has
11658 side-effects. */
11659 return omit_two_operands (size_type_node, size_zero_node,
11660 s1, s2);
11661 return NULL_TREE;
11662 }
11663 }
11664
11665 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11666 to the call.
11667
11668 Return NULL_TREE if no simplification was possible, otherwise return the
11669 simplified form of the call as a tree.
11670
11671 The simplified form may be a constant or other expression which
11672 computes the same value, but in a more efficient manner (including
11673 calls to other builtin functions).
11674
11675 The call may contain arguments which need to be evaluated, but
11676 which are not useful to determine the result of the call. In
11677 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11678 COMPOUND_EXPR will be an argument which must be evaluated.
11679 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11680 COMPOUND_EXPR in the chain will contain the tree for the simplified
11681 form of the builtin function call. */
11682
11683 static tree
11684 fold_builtin_strcspn (tree s1, tree s2)
11685 {
11686 if (!validate_arg (s1, POINTER_TYPE)
11687 || !validate_arg (s2, POINTER_TYPE))
11688 return NULL_TREE;
11689 else
11690 {
11691 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11692
11693 /* If both arguments are constants, evaluate at compile-time. */
11694 if (p1 && p2)
11695 {
11696 const size_t r = strcspn (p1, p2);
11697 return size_int (r);
11698 }
11699
11700 /* If the first argument is "", return NULL_TREE. */
11701 if (p1 && *p1 == '\0')
11702 {
11703 /* Evaluate and ignore argument s2 in case it has
11704 side-effects. */
11705 return omit_one_operand (size_type_node,
11706 size_zero_node, s2);
11707 }
11708
11709 /* If the second argument is "", return __builtin_strlen(s1). */
11710 if (p2 && *p2 == '\0')
11711 {
11712 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11713
11714 /* If the replacement _DECL isn't initialized, don't do the
11715 transformation. */
11716 if (!fn)
11717 return NULL_TREE;
11718
11719 return build_call_expr (fn, 1, s1);
11720 }
11721 return NULL_TREE;
11722 }
11723 }
11724
11725 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11726 to the call. IGNORE is true if the value returned
11727 by the builtin will be ignored. UNLOCKED is true is true if this
11728 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11729 the known length of the string. Return NULL_TREE if no simplification
11730 was possible. */
11731
11732 tree
11733 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11734 {
11735 /* If we're using an unlocked function, assume the other unlocked
11736 functions exist explicitly. */
11737 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11738 : implicit_built_in_decls[BUILT_IN_FPUTC];
11739 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11740 : implicit_built_in_decls[BUILT_IN_FWRITE];
11741
11742 /* If the return value is used, don't do the transformation. */
11743 if (!ignore)
11744 return NULL_TREE;
11745
11746 /* Verify the arguments in the original call. */
11747 if (!validate_arg (arg0, POINTER_TYPE)
11748 || !validate_arg (arg1, POINTER_TYPE))
11749 return NULL_TREE;
11750
11751 if (! len)
11752 len = c_strlen (arg0, 0);
11753
11754 /* Get the length of the string passed to fputs. If the length
11755 can't be determined, punt. */
11756 if (!len
11757 || TREE_CODE (len) != INTEGER_CST)
11758 return NULL_TREE;
11759
11760 switch (compare_tree_int (len, 1))
11761 {
11762 case -1: /* length is 0, delete the call entirely . */
11763 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11764
11765 case 0: /* length is 1, call fputc. */
11766 {
11767 const char *p = c_getstr (arg0);
11768
11769 if (p != NULL)
11770 {
11771 if (fn_fputc)
11772 return build_call_expr (fn_fputc, 2,
11773 build_int_cst (NULL_TREE, p[0]), arg1);
11774 else
11775 return NULL_TREE;
11776 }
11777 }
11778 /* FALLTHROUGH */
11779 case 1: /* length is greater than 1, call fwrite. */
11780 {
11781 /* If optimizing for size keep fputs. */
11782 if (optimize_function_for_size_p (cfun))
11783 return NULL_TREE;
11784 /* New argument list transforming fputs(string, stream) to
11785 fwrite(string, 1, len, stream). */
11786 if (fn_fwrite)
11787 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11788 else
11789 return NULL_TREE;
11790 }
11791 default:
11792 gcc_unreachable ();
11793 }
11794 return NULL_TREE;
11795 }
11796
11797 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11798 produced. False otherwise. This is done so that we don't output the error
11799 or warning twice or three times. */
11800
11801 bool
11802 fold_builtin_next_arg (tree exp, bool va_start_p)
11803 {
11804 tree fntype = TREE_TYPE (current_function_decl);
11805 int nargs = call_expr_nargs (exp);
11806 tree arg;
11807
11808 if (TYPE_ARG_TYPES (fntype) == 0
11809 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11810 == void_type_node))
11811 {
11812 error ("%<va_start%> used in function with fixed args");
11813 return true;
11814 }
11815
11816 if (va_start_p)
11817 {
11818 if (va_start_p && (nargs != 2))
11819 {
11820 error ("wrong number of arguments to function %<va_start%>");
11821 return true;
11822 }
11823 arg = CALL_EXPR_ARG (exp, 1);
11824 }
11825 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11826 when we checked the arguments and if needed issued a warning. */
11827 else
11828 {
11829 if (nargs == 0)
11830 {
11831 /* Evidently an out of date version of <stdarg.h>; can't validate
11832 va_start's second argument, but can still work as intended. */
11833 warning (0, "%<__builtin_next_arg%> called without an argument");
11834 return true;
11835 }
11836 else if (nargs > 1)
11837 {
11838 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11839 return true;
11840 }
11841 arg = CALL_EXPR_ARG (exp, 0);
11842 }
11843
11844 if (TREE_CODE (arg) == SSA_NAME)
11845 arg = SSA_NAME_VAR (arg);
11846
11847 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11848 or __builtin_next_arg (0) the first time we see it, after checking
11849 the arguments and if needed issuing a warning. */
11850 if (!integer_zerop (arg))
11851 {
11852 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11853
11854 /* Strip off all nops for the sake of the comparison. This
11855 is not quite the same as STRIP_NOPS. It does more.
11856 We must also strip off INDIRECT_EXPR for C++ reference
11857 parameters. */
11858 while (CONVERT_EXPR_P (arg)
11859 || TREE_CODE (arg) == INDIRECT_REF)
11860 arg = TREE_OPERAND (arg, 0);
11861 if (arg != last_parm)
11862 {
11863 /* FIXME: Sometimes with the tree optimizers we can get the
11864 not the last argument even though the user used the last
11865 argument. We just warn and set the arg to be the last
11866 argument so that we will get wrong-code because of
11867 it. */
11868 warning (0, "second parameter of %<va_start%> not last named argument");
11869 }
11870
11871 /* Undefined by C99 7.15.1.4p4 (va_start):
11872 "If the parameter parmN is declared with the register storage
11873 class, with a function or array type, or with a type that is
11874 not compatible with the type that results after application of
11875 the default argument promotions, the behavior is undefined."
11876 */
11877 else if (DECL_REGISTER (arg))
11878 warning (0, "undefined behaviour when second parameter of "
11879 "%<va_start%> is declared with %<register%> storage");
11880
11881 /* We want to verify the second parameter just once before the tree
11882 optimizers are run and then avoid keeping it in the tree,
11883 as otherwise we could warn even for correct code like:
11884 void foo (int i, ...)
11885 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11886 if (va_start_p)
11887 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11888 else
11889 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11890 }
11891 return false;
11892 }
11893
11894
11895 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11896 ORIG may be null if this is a 2-argument call. We don't attempt to
11897 simplify calls with more than 3 arguments.
11898
11899 Return NULL_TREE if no simplification was possible, otherwise return the
11900 simplified form of the call as a tree. If IGNORED is true, it means that
11901 the caller does not use the returned value of the function. */
11902
11903 static tree
11904 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11905 {
11906 tree call, retval;
11907 const char *fmt_str = NULL;
11908
11909 /* Verify the required arguments in the original call. We deal with two
11910 types of sprintf() calls: 'sprintf (str, fmt)' and
11911 'sprintf (dest, "%s", orig)'. */
11912 if (!validate_arg (dest, POINTER_TYPE)
11913 || !validate_arg (fmt, POINTER_TYPE))
11914 return NULL_TREE;
11915 if (orig && !validate_arg (orig, POINTER_TYPE))
11916 return NULL_TREE;
11917
11918 /* Check whether the format is a literal string constant. */
11919 fmt_str = c_getstr (fmt);
11920 if (fmt_str == NULL)
11921 return NULL_TREE;
11922
11923 call = NULL_TREE;
11924 retval = NULL_TREE;
11925
11926 if (!init_target_chars ())
11927 return NULL_TREE;
11928
11929 /* If the format doesn't contain % args or %%, use strcpy. */
11930 if (strchr (fmt_str, target_percent) == NULL)
11931 {
11932 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11933
11934 if (!fn)
11935 return NULL_TREE;
11936
11937 /* Don't optimize sprintf (buf, "abc", ptr++). */
11938 if (orig)
11939 return NULL_TREE;
11940
11941 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11942 'format' is known to contain no % formats. */
11943 call = build_call_expr (fn, 2, dest, fmt);
11944 if (!ignored)
11945 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11946 }
11947
11948 /* If the format is "%s", use strcpy if the result isn't used. */
11949 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11950 {
11951 tree fn;
11952 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11953
11954 if (!fn)
11955 return NULL_TREE;
11956
11957 /* Don't crash on sprintf (str1, "%s"). */
11958 if (!orig)
11959 return NULL_TREE;
11960
11961 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11962 if (!ignored)
11963 {
11964 retval = c_strlen (orig, 1);
11965 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11966 return NULL_TREE;
11967 }
11968 call = build_call_expr (fn, 2, dest, orig);
11969 }
11970
11971 if (call && retval)
11972 {
11973 retval = fold_convert
11974 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11975 retval);
11976 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11977 }
11978 else
11979 return call;
11980 }
11981
11982 /* Expand a call EXP to __builtin_object_size. */
11983
11984 rtx
11985 expand_builtin_object_size (tree exp)
11986 {
11987 tree ost;
11988 int object_size_type;
11989 tree fndecl = get_callee_fndecl (exp);
11990
11991 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11992 {
11993 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11994 exp, fndecl);
11995 expand_builtin_trap ();
11996 return const0_rtx;
11997 }
11998
11999 ost = CALL_EXPR_ARG (exp, 1);
12000 STRIP_NOPS (ost);
12001
12002 if (TREE_CODE (ost) != INTEGER_CST
12003 || tree_int_cst_sgn (ost) < 0
12004 || compare_tree_int (ost, 3) > 0)
12005 {
12006 error ("%Klast argument of %D is not integer constant between 0 and 3",
12007 exp, fndecl);
12008 expand_builtin_trap ();
12009 return const0_rtx;
12010 }
12011
12012 object_size_type = tree_low_cst (ost, 0);
12013
12014 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12015 }
12016
12017 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12018 FCODE is the BUILT_IN_* to use.
12019 Return NULL_RTX if we failed; the caller should emit a normal call,
12020 otherwise try to get the result in TARGET, if convenient (and in
12021 mode MODE if that's convenient). */
12022
12023 static rtx
12024 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12025 enum built_in_function fcode)
12026 {
12027 tree dest, src, len, size;
12028
12029 if (!validate_arglist (exp,
12030 POINTER_TYPE,
12031 fcode == BUILT_IN_MEMSET_CHK
12032 ? INTEGER_TYPE : POINTER_TYPE,
12033 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12034 return NULL_RTX;
12035
12036 dest = CALL_EXPR_ARG (exp, 0);
12037 src = CALL_EXPR_ARG (exp, 1);
12038 len = CALL_EXPR_ARG (exp, 2);
12039 size = CALL_EXPR_ARG (exp, 3);
12040
12041 if (! host_integerp (size, 1))
12042 return NULL_RTX;
12043
12044 if (host_integerp (len, 1) || integer_all_onesp (size))
12045 {
12046 tree fn;
12047
12048 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12049 {
12050 warning_at (tree_nonartificial_location (exp),
12051 0, "%Kcall to %D will always overflow destination buffer",
12052 exp, get_callee_fndecl (exp));
12053 return NULL_RTX;
12054 }
12055
12056 fn = NULL_TREE;
12057 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12058 mem{cpy,pcpy,move,set} is available. */
12059 switch (fcode)
12060 {
12061 case BUILT_IN_MEMCPY_CHK:
12062 fn = built_in_decls[BUILT_IN_MEMCPY];
12063 break;
12064 case BUILT_IN_MEMPCPY_CHK:
12065 fn = built_in_decls[BUILT_IN_MEMPCPY];
12066 break;
12067 case BUILT_IN_MEMMOVE_CHK:
12068 fn = built_in_decls[BUILT_IN_MEMMOVE];
12069 break;
12070 case BUILT_IN_MEMSET_CHK:
12071 fn = built_in_decls[BUILT_IN_MEMSET];
12072 break;
12073 default:
12074 break;
12075 }
12076
12077 if (! fn)
12078 return NULL_RTX;
12079
12080 fn = build_call_expr (fn, 3, dest, src, len);
12081 STRIP_TYPE_NOPS (fn);
12082 while (TREE_CODE (fn) == COMPOUND_EXPR)
12083 {
12084 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12085 EXPAND_NORMAL);
12086 fn = TREE_OPERAND (fn, 1);
12087 }
12088 if (TREE_CODE (fn) == CALL_EXPR)
12089 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12090 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12091 }
12092 else if (fcode == BUILT_IN_MEMSET_CHK)
12093 return NULL_RTX;
12094 else
12095 {
12096 unsigned int dest_align
12097 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12098
12099 /* If DEST is not a pointer type, call the normal function. */
12100 if (dest_align == 0)
12101 return NULL_RTX;
12102
12103 /* If SRC and DEST are the same (and not volatile), do nothing. */
12104 if (operand_equal_p (src, dest, 0))
12105 {
12106 tree expr;
12107
12108 if (fcode != BUILT_IN_MEMPCPY_CHK)
12109 {
12110 /* Evaluate and ignore LEN in case it has side-effects. */
12111 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12112 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12113 }
12114
12115 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12116 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12117 }
12118
12119 /* __memmove_chk special case. */
12120 if (fcode == BUILT_IN_MEMMOVE_CHK)
12121 {
12122 unsigned int src_align
12123 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12124
12125 if (src_align == 0)
12126 return NULL_RTX;
12127
12128 /* If src is categorized for a readonly section we can use
12129 normal __memcpy_chk. */
12130 if (readonly_data_expr (src))
12131 {
12132 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12133 if (!fn)
12134 return NULL_RTX;
12135 fn = build_call_expr (fn, 4, dest, src, len, size);
12136 STRIP_TYPE_NOPS (fn);
12137 while (TREE_CODE (fn) == COMPOUND_EXPR)
12138 {
12139 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12140 EXPAND_NORMAL);
12141 fn = TREE_OPERAND (fn, 1);
12142 }
12143 if (TREE_CODE (fn) == CALL_EXPR)
12144 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12145 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12146 }
12147 }
12148 return NULL_RTX;
12149 }
12150 }
12151
12152 /* Emit warning if a buffer overflow is detected at compile time. */
12153
12154 static void
12155 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12156 {
12157 int is_strlen = 0;
12158 tree len, size;
12159 location_t loc = tree_nonartificial_location (exp);
12160
12161 switch (fcode)
12162 {
12163 case BUILT_IN_STRCPY_CHK:
12164 case BUILT_IN_STPCPY_CHK:
12165 /* For __strcat_chk the warning will be emitted only if overflowing
12166 by at least strlen (dest) + 1 bytes. */
12167 case BUILT_IN_STRCAT_CHK:
12168 len = CALL_EXPR_ARG (exp, 1);
12169 size = CALL_EXPR_ARG (exp, 2);
12170 is_strlen = 1;
12171 break;
12172 case BUILT_IN_STRNCAT_CHK:
12173 case BUILT_IN_STRNCPY_CHK:
12174 len = CALL_EXPR_ARG (exp, 2);
12175 size = CALL_EXPR_ARG (exp, 3);
12176 break;
12177 case BUILT_IN_SNPRINTF_CHK:
12178 case BUILT_IN_VSNPRINTF_CHK:
12179 len = CALL_EXPR_ARG (exp, 1);
12180 size = CALL_EXPR_ARG (exp, 3);
12181 break;
12182 default:
12183 gcc_unreachable ();
12184 }
12185
12186 if (!len || !size)
12187 return;
12188
12189 if (! host_integerp (size, 1) || integer_all_onesp (size))
12190 return;
12191
12192 if (is_strlen)
12193 {
12194 len = c_strlen (len, 1);
12195 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12196 return;
12197 }
12198 else if (fcode == BUILT_IN_STRNCAT_CHK)
12199 {
12200 tree src = CALL_EXPR_ARG (exp, 1);
12201 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12202 return;
12203 src = c_strlen (src, 1);
12204 if (! src || ! host_integerp (src, 1))
12205 {
12206 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12207 exp, get_callee_fndecl (exp));
12208 return;
12209 }
12210 else if (tree_int_cst_lt (src, size))
12211 return;
12212 }
12213 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12214 return;
12215
12216 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12217 exp, get_callee_fndecl (exp));
12218 }
12219
12220 /* Emit warning if a buffer overflow is detected at compile time
12221 in __sprintf_chk/__vsprintf_chk calls. */
12222
12223 static void
12224 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12225 {
12226 tree dest, size, len, fmt, flag;
12227 const char *fmt_str;
12228 int nargs = call_expr_nargs (exp);
12229
12230 /* Verify the required arguments in the original call. */
12231
12232 if (nargs < 4)
12233 return;
12234 dest = CALL_EXPR_ARG (exp, 0);
12235 flag = CALL_EXPR_ARG (exp, 1);
12236 size = CALL_EXPR_ARG (exp, 2);
12237 fmt = CALL_EXPR_ARG (exp, 3);
12238
12239 if (! host_integerp (size, 1) || integer_all_onesp (size))
12240 return;
12241
12242 /* Check whether the format is a literal string constant. */
12243 fmt_str = c_getstr (fmt);
12244 if (fmt_str == NULL)
12245 return;
12246
12247 if (!init_target_chars ())
12248 return;
12249
12250 /* If the format doesn't contain % args or %%, we know its size. */
12251 if (strchr (fmt_str, target_percent) == 0)
12252 len = build_int_cstu (size_type_node, strlen (fmt_str));
12253 /* If the format is "%s" and first ... argument is a string literal,
12254 we know it too. */
12255 else if (fcode == BUILT_IN_SPRINTF_CHK
12256 && strcmp (fmt_str, target_percent_s) == 0)
12257 {
12258 tree arg;
12259
12260 if (nargs < 5)
12261 return;
12262 arg = CALL_EXPR_ARG (exp, 4);
12263 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12264 return;
12265
12266 len = c_strlen (arg, 1);
12267 if (!len || ! host_integerp (len, 1))
12268 return;
12269 }
12270 else
12271 return;
12272
12273 if (! tree_int_cst_lt (len, size))
12274 warning_at (tree_nonartificial_location (exp),
12275 0, "%Kcall to %D will always overflow destination buffer",
12276 exp, get_callee_fndecl (exp));
12277 }
12278
12279 /* Emit warning if a free is called with address of a variable. */
12280
12281 static void
12282 maybe_emit_free_warning (tree exp)
12283 {
12284 tree arg = CALL_EXPR_ARG (exp, 0);
12285
12286 STRIP_NOPS (arg);
12287 if (TREE_CODE (arg) != ADDR_EXPR)
12288 return;
12289
12290 arg = get_base_address (TREE_OPERAND (arg, 0));
12291 if (arg == NULL || INDIRECT_REF_P (arg))
12292 return;
12293
12294 if (SSA_VAR_P (arg))
12295 warning_at (tree_nonartificial_location (exp),
12296 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12297 else
12298 warning_at (tree_nonartificial_location (exp),
12299 0, "%Kattempt to free a non-heap object", exp);
12300 }
12301
12302 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12303 if possible. */
12304
12305 tree
12306 fold_builtin_object_size (tree ptr, tree ost)
12307 {
12308 tree ret = NULL_TREE;
12309 int object_size_type;
12310
12311 if (!validate_arg (ptr, POINTER_TYPE)
12312 || !validate_arg (ost, INTEGER_TYPE))
12313 return NULL_TREE;
12314
12315 STRIP_NOPS (ost);
12316
12317 if (TREE_CODE (ost) != INTEGER_CST
12318 || tree_int_cst_sgn (ost) < 0
12319 || compare_tree_int (ost, 3) > 0)
12320 return NULL_TREE;
12321
12322 object_size_type = tree_low_cst (ost, 0);
12323
12324 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12325 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12326 and (size_t) 0 for types 2 and 3. */
12327 if (TREE_SIDE_EFFECTS (ptr))
12328 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12329
12330 if (TREE_CODE (ptr) == ADDR_EXPR)
12331 ret = build_int_cstu (size_type_node,
12332 compute_builtin_object_size (ptr, object_size_type));
12333
12334 else if (TREE_CODE (ptr) == SSA_NAME)
12335 {
12336 unsigned HOST_WIDE_INT bytes;
12337
12338 /* If object size is not known yet, delay folding until
12339 later. Maybe subsequent passes will help determining
12340 it. */
12341 bytes = compute_builtin_object_size (ptr, object_size_type);
12342 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12343 ? -1 : 0))
12344 ret = build_int_cstu (size_type_node, bytes);
12345 }
12346
12347 if (ret)
12348 {
12349 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12350 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12351 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12352 ret = NULL_TREE;
12353 }
12354
12355 return ret;
12356 }
12357
12358 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12359 DEST, SRC, LEN, and SIZE are the arguments to the call.
12360 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12361 code of the builtin. If MAXLEN is not NULL, it is maximum length
12362 passed as third argument. */
12363
12364 tree
12365 fold_builtin_memory_chk (tree fndecl,
12366 tree dest, tree src, tree len, tree size,
12367 tree maxlen, bool ignore,
12368 enum built_in_function fcode)
12369 {
12370 tree fn;
12371
12372 if (!validate_arg (dest, POINTER_TYPE)
12373 || !validate_arg (src,
12374 (fcode == BUILT_IN_MEMSET_CHK
12375 ? INTEGER_TYPE : POINTER_TYPE))
12376 || !validate_arg (len, INTEGER_TYPE)
12377 || !validate_arg (size, INTEGER_TYPE))
12378 return NULL_TREE;
12379
12380 /* If SRC and DEST are the same (and not volatile), return DEST
12381 (resp. DEST+LEN for __mempcpy_chk). */
12382 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12383 {
12384 if (fcode != BUILT_IN_MEMPCPY_CHK)
12385 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12386 else
12387 {
12388 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12389 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12390 }
12391 }
12392
12393 if (! host_integerp (size, 1))
12394 return NULL_TREE;
12395
12396 if (! integer_all_onesp (size))
12397 {
12398 if (! host_integerp (len, 1))
12399 {
12400 /* If LEN is not constant, try MAXLEN too.
12401 For MAXLEN only allow optimizing into non-_ocs function
12402 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12403 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12404 {
12405 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12406 {
12407 /* (void) __mempcpy_chk () can be optimized into
12408 (void) __memcpy_chk (). */
12409 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12410 if (!fn)
12411 return NULL_TREE;
12412
12413 return build_call_expr (fn, 4, dest, src, len, size);
12414 }
12415 return NULL_TREE;
12416 }
12417 }
12418 else
12419 maxlen = len;
12420
12421 if (tree_int_cst_lt (size, maxlen))
12422 return NULL_TREE;
12423 }
12424
12425 fn = NULL_TREE;
12426 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12427 mem{cpy,pcpy,move,set} is available. */
12428 switch (fcode)
12429 {
12430 case BUILT_IN_MEMCPY_CHK:
12431 fn = built_in_decls[BUILT_IN_MEMCPY];
12432 break;
12433 case BUILT_IN_MEMPCPY_CHK:
12434 fn = built_in_decls[BUILT_IN_MEMPCPY];
12435 break;
12436 case BUILT_IN_MEMMOVE_CHK:
12437 fn = built_in_decls[BUILT_IN_MEMMOVE];
12438 break;
12439 case BUILT_IN_MEMSET_CHK:
12440 fn = built_in_decls[BUILT_IN_MEMSET];
12441 break;
12442 default:
12443 break;
12444 }
12445
12446 if (!fn)
12447 return NULL_TREE;
12448
12449 return build_call_expr (fn, 3, dest, src, len);
12450 }
12451
12452 /* Fold a call to the __st[rp]cpy_chk builtin.
12453 DEST, SRC, and SIZE are the arguments to the call.
12454 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12455 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12456 strings passed as second argument. */
12457
12458 tree
12459 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12460 tree maxlen, bool ignore,
12461 enum built_in_function fcode)
12462 {
12463 tree len, fn;
12464
12465 if (!validate_arg (dest, POINTER_TYPE)
12466 || !validate_arg (src, POINTER_TYPE)
12467 || !validate_arg (size, INTEGER_TYPE))
12468 return NULL_TREE;
12469
12470 /* If SRC and DEST are the same (and not volatile), return DEST. */
12471 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12472 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12473
12474 if (! host_integerp (size, 1))
12475 return NULL_TREE;
12476
12477 if (! integer_all_onesp (size))
12478 {
12479 len = c_strlen (src, 1);
12480 if (! len || ! host_integerp (len, 1))
12481 {
12482 /* If LEN is not constant, try MAXLEN too.
12483 For MAXLEN only allow optimizing into non-_ocs function
12484 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12485 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12486 {
12487 if (fcode == BUILT_IN_STPCPY_CHK)
12488 {
12489 if (! ignore)
12490 return NULL_TREE;
12491
12492 /* If return value of __stpcpy_chk is ignored,
12493 optimize into __strcpy_chk. */
12494 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12495 if (!fn)
12496 return NULL_TREE;
12497
12498 return build_call_expr (fn, 3, dest, src, size);
12499 }
12500
12501 if (! len || TREE_SIDE_EFFECTS (len))
12502 return NULL_TREE;
12503
12504 /* If c_strlen returned something, but not a constant,
12505 transform __strcpy_chk into __memcpy_chk. */
12506 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12507 if (!fn)
12508 return NULL_TREE;
12509
12510 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12511 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12512 build_call_expr (fn, 4,
12513 dest, src, len, size));
12514 }
12515 }
12516 else
12517 maxlen = len;
12518
12519 if (! tree_int_cst_lt (maxlen, size))
12520 return NULL_TREE;
12521 }
12522
12523 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12524 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12525 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12526 if (!fn)
12527 return NULL_TREE;
12528
12529 return build_call_expr (fn, 2, dest, src);
12530 }
12531
12532 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12533 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12534 length passed as third argument. */
12535
12536 tree
12537 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12538 tree maxlen)
12539 {
12540 tree fn;
12541
12542 if (!validate_arg (dest, POINTER_TYPE)
12543 || !validate_arg (src, POINTER_TYPE)
12544 || !validate_arg (len, INTEGER_TYPE)
12545 || !validate_arg (size, INTEGER_TYPE))
12546 return NULL_TREE;
12547
12548 if (! host_integerp (size, 1))
12549 return NULL_TREE;
12550
12551 if (! integer_all_onesp (size))
12552 {
12553 if (! host_integerp (len, 1))
12554 {
12555 /* If LEN is not constant, try MAXLEN too.
12556 For MAXLEN only allow optimizing into non-_ocs function
12557 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12558 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12559 return NULL_TREE;
12560 }
12561 else
12562 maxlen = len;
12563
12564 if (tree_int_cst_lt (size, maxlen))
12565 return NULL_TREE;
12566 }
12567
12568 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12569 fn = built_in_decls[BUILT_IN_STRNCPY];
12570 if (!fn)
12571 return NULL_TREE;
12572
12573 return build_call_expr (fn, 3, dest, src, len);
12574 }
12575
12576 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12577 are the arguments to the call. */
12578
12579 static tree
12580 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12581 {
12582 tree fn;
12583 const char *p;
12584
12585 if (!validate_arg (dest, POINTER_TYPE)
12586 || !validate_arg (src, POINTER_TYPE)
12587 || !validate_arg (size, INTEGER_TYPE))
12588 return NULL_TREE;
12589
12590 p = c_getstr (src);
12591 /* If the SRC parameter is "", return DEST. */
12592 if (p && *p == '\0')
12593 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12594
12595 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12596 return NULL_TREE;
12597
12598 /* If __builtin_strcat_chk is used, assume strcat is available. */
12599 fn = built_in_decls[BUILT_IN_STRCAT];
12600 if (!fn)
12601 return NULL_TREE;
12602
12603 return build_call_expr (fn, 2, dest, src);
12604 }
12605
12606 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12607 LEN, and SIZE. */
12608
12609 static tree
12610 fold_builtin_strncat_chk (tree fndecl,
12611 tree dest, tree src, tree len, tree size)
12612 {
12613 tree fn;
12614 const char *p;
12615
12616 if (!validate_arg (dest, POINTER_TYPE)
12617 || !validate_arg (src, POINTER_TYPE)
12618 || !validate_arg (size, INTEGER_TYPE)
12619 || !validate_arg (size, INTEGER_TYPE))
12620 return NULL_TREE;
12621
12622 p = c_getstr (src);
12623 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12624 if (p && *p == '\0')
12625 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12626 else if (integer_zerop (len))
12627 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12628
12629 if (! host_integerp (size, 1))
12630 return NULL_TREE;
12631
12632 if (! integer_all_onesp (size))
12633 {
12634 tree src_len = c_strlen (src, 1);
12635 if (src_len
12636 && host_integerp (src_len, 1)
12637 && host_integerp (len, 1)
12638 && ! tree_int_cst_lt (len, src_len))
12639 {
12640 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12641 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12642 if (!fn)
12643 return NULL_TREE;
12644
12645 return build_call_expr (fn, 3, dest, src, size);
12646 }
12647 return NULL_TREE;
12648 }
12649
12650 /* If __builtin_strncat_chk is used, assume strncat is available. */
12651 fn = built_in_decls[BUILT_IN_STRNCAT];
12652 if (!fn)
12653 return NULL_TREE;
12654
12655 return build_call_expr (fn, 3, dest, src, len);
12656 }
12657
12658 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12659 a normal call should be emitted rather than expanding the function
12660 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12661
12662 static tree
12663 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12664 {
12665 tree dest, size, len, fn, fmt, flag;
12666 const char *fmt_str;
12667 int nargs = call_expr_nargs (exp);
12668
12669 /* Verify the required arguments in the original call. */
12670 if (nargs < 4)
12671 return NULL_TREE;
12672 dest = CALL_EXPR_ARG (exp, 0);
12673 if (!validate_arg (dest, POINTER_TYPE))
12674 return NULL_TREE;
12675 flag = CALL_EXPR_ARG (exp, 1);
12676 if (!validate_arg (flag, INTEGER_TYPE))
12677 return NULL_TREE;
12678 size = CALL_EXPR_ARG (exp, 2);
12679 if (!validate_arg (size, INTEGER_TYPE))
12680 return NULL_TREE;
12681 fmt = CALL_EXPR_ARG (exp, 3);
12682 if (!validate_arg (fmt, POINTER_TYPE))
12683 return NULL_TREE;
12684
12685 if (! host_integerp (size, 1))
12686 return NULL_TREE;
12687
12688 len = NULL_TREE;
12689
12690 if (!init_target_chars ())
12691 return NULL_TREE;
12692
12693 /* Check whether the format is a literal string constant. */
12694 fmt_str = c_getstr (fmt);
12695 if (fmt_str != NULL)
12696 {
12697 /* If the format doesn't contain % args or %%, we know the size. */
12698 if (strchr (fmt_str, target_percent) == 0)
12699 {
12700 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12701 len = build_int_cstu (size_type_node, strlen (fmt_str));
12702 }
12703 /* If the format is "%s" and first ... argument is a string literal,
12704 we know the size too. */
12705 else if (fcode == BUILT_IN_SPRINTF_CHK
12706 && strcmp (fmt_str, target_percent_s) == 0)
12707 {
12708 tree arg;
12709
12710 if (nargs == 5)
12711 {
12712 arg = CALL_EXPR_ARG (exp, 4);
12713 if (validate_arg (arg, POINTER_TYPE))
12714 {
12715 len = c_strlen (arg, 1);
12716 if (! len || ! host_integerp (len, 1))
12717 len = NULL_TREE;
12718 }
12719 }
12720 }
12721 }
12722
12723 if (! integer_all_onesp (size))
12724 {
12725 if (! len || ! tree_int_cst_lt (len, size))
12726 return NULL_TREE;
12727 }
12728
12729 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12730 or if format doesn't contain % chars or is "%s". */
12731 if (! integer_zerop (flag))
12732 {
12733 if (fmt_str == NULL)
12734 return NULL_TREE;
12735 if (strchr (fmt_str, target_percent) != NULL
12736 && strcmp (fmt_str, target_percent_s))
12737 return NULL_TREE;
12738 }
12739
12740 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12741 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12742 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12743 if (!fn)
12744 return NULL_TREE;
12745
12746 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12747 }
12748
12749 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12750 a normal call should be emitted rather than expanding the function
12751 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12752 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12753 passed as second argument. */
12754
12755 tree
12756 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12757 enum built_in_function fcode)
12758 {
12759 tree dest, size, len, fn, fmt, flag;
12760 const char *fmt_str;
12761
12762 /* Verify the required arguments in the original call. */
12763 if (call_expr_nargs (exp) < 5)
12764 return NULL_TREE;
12765 dest = CALL_EXPR_ARG (exp, 0);
12766 if (!validate_arg (dest, POINTER_TYPE))
12767 return NULL_TREE;
12768 len = CALL_EXPR_ARG (exp, 1);
12769 if (!validate_arg (len, INTEGER_TYPE))
12770 return NULL_TREE;
12771 flag = CALL_EXPR_ARG (exp, 2);
12772 if (!validate_arg (flag, INTEGER_TYPE))
12773 return NULL_TREE;
12774 size = CALL_EXPR_ARG (exp, 3);
12775 if (!validate_arg (size, INTEGER_TYPE))
12776 return NULL_TREE;
12777 fmt = CALL_EXPR_ARG (exp, 4);
12778 if (!validate_arg (fmt, POINTER_TYPE))
12779 return NULL_TREE;
12780
12781 if (! host_integerp (size, 1))
12782 return NULL_TREE;
12783
12784 if (! integer_all_onesp (size))
12785 {
12786 if (! host_integerp (len, 1))
12787 {
12788 /* If LEN is not constant, try MAXLEN too.
12789 For MAXLEN only allow optimizing into non-_ocs function
12790 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12791 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12792 return NULL_TREE;
12793 }
12794 else
12795 maxlen = len;
12796
12797 if (tree_int_cst_lt (size, maxlen))
12798 return NULL_TREE;
12799 }
12800
12801 if (!init_target_chars ())
12802 return NULL_TREE;
12803
12804 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12805 or if format doesn't contain % chars or is "%s". */
12806 if (! integer_zerop (flag))
12807 {
12808 fmt_str = c_getstr (fmt);
12809 if (fmt_str == NULL)
12810 return NULL_TREE;
12811 if (strchr (fmt_str, target_percent) != NULL
12812 && strcmp (fmt_str, target_percent_s))
12813 return NULL_TREE;
12814 }
12815
12816 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12817 available. */
12818 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12819 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12820 if (!fn)
12821 return NULL_TREE;
12822
12823 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12824 }
12825
12826 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12827 FMT and ARG are the arguments to the call; we don't fold cases with
12828 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12829
12830 Return NULL_TREE if no simplification was possible, otherwise return the
12831 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12832 code of the function to be simplified. */
12833
12834 static tree
12835 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12836 enum built_in_function fcode)
12837 {
12838 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12839 const char *fmt_str = NULL;
12840
12841 /* If the return value is used, don't do the transformation. */
12842 if (! ignore)
12843 return NULL_TREE;
12844
12845 /* Verify the required arguments in the original call. */
12846 if (!validate_arg (fmt, POINTER_TYPE))
12847 return NULL_TREE;
12848
12849 /* Check whether the format is a literal string constant. */
12850 fmt_str = c_getstr (fmt);
12851 if (fmt_str == NULL)
12852 return NULL_TREE;
12853
12854 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12855 {
12856 /* If we're using an unlocked function, assume the other
12857 unlocked functions exist explicitly. */
12858 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12859 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12860 }
12861 else
12862 {
12863 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12864 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12865 }
12866
12867 if (!init_target_chars ())
12868 return NULL_TREE;
12869
12870 if (strcmp (fmt_str, target_percent_s) == 0
12871 || strchr (fmt_str, target_percent) == NULL)
12872 {
12873 const char *str;
12874
12875 if (strcmp (fmt_str, target_percent_s) == 0)
12876 {
12877 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12878 return NULL_TREE;
12879
12880 if (!arg || !validate_arg (arg, POINTER_TYPE))
12881 return NULL_TREE;
12882
12883 str = c_getstr (arg);
12884 if (str == NULL)
12885 return NULL_TREE;
12886 }
12887 else
12888 {
12889 /* The format specifier doesn't contain any '%' characters. */
12890 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12891 && arg)
12892 return NULL_TREE;
12893 str = fmt_str;
12894 }
12895
12896 /* If the string was "", printf does nothing. */
12897 if (str[0] == '\0')
12898 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12899
12900 /* If the string has length of 1, call putchar. */
12901 if (str[1] == '\0')
12902 {
12903 /* Given printf("c"), (where c is any one character,)
12904 convert "c"[0] to an int and pass that to the replacement
12905 function. */
12906 newarg = build_int_cst (NULL_TREE, str[0]);
12907 if (fn_putchar)
12908 call = build_call_expr (fn_putchar, 1, newarg);
12909 }
12910 else
12911 {
12912 /* If the string was "string\n", call puts("string"). */
12913 size_t len = strlen (str);
12914 if ((unsigned char)str[len - 1] == target_newline)
12915 {
12916 /* Create a NUL-terminated string that's one char shorter
12917 than the original, stripping off the trailing '\n'. */
12918 char *newstr = XALLOCAVEC (char, len);
12919 memcpy (newstr, str, len - 1);
12920 newstr[len - 1] = 0;
12921
12922 newarg = build_string_literal (len, newstr);
12923 if (fn_puts)
12924 call = build_call_expr (fn_puts, 1, newarg);
12925 }
12926 else
12927 /* We'd like to arrange to call fputs(string,stdout) here,
12928 but we need stdout and don't have a way to get it yet. */
12929 return NULL_TREE;
12930 }
12931 }
12932
12933 /* The other optimizations can be done only on the non-va_list variants. */
12934 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12935 return NULL_TREE;
12936
12937 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12938 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12939 {
12940 if (!arg || !validate_arg (arg, POINTER_TYPE))
12941 return NULL_TREE;
12942 if (fn_puts)
12943 call = build_call_expr (fn_puts, 1, arg);
12944 }
12945
12946 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12947 else if (strcmp (fmt_str, target_percent_c) == 0)
12948 {
12949 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12950 return NULL_TREE;
12951 if (fn_putchar)
12952 call = build_call_expr (fn_putchar, 1, arg);
12953 }
12954
12955 if (!call)
12956 return NULL_TREE;
12957
12958 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12959 }
12960
12961 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12962 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12963 more than 3 arguments, and ARG may be null in the 2-argument case.
12964
12965 Return NULL_TREE if no simplification was possible, otherwise return the
12966 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12967 code of the function to be simplified. */
12968
12969 static tree
12970 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12971 enum built_in_function fcode)
12972 {
12973 tree fn_fputc, fn_fputs, call = NULL_TREE;
12974 const char *fmt_str = NULL;
12975
12976 /* If the return value is used, don't do the transformation. */
12977 if (! ignore)
12978 return NULL_TREE;
12979
12980 /* Verify the required arguments in the original call. */
12981 if (!validate_arg (fp, POINTER_TYPE))
12982 return NULL_TREE;
12983 if (!validate_arg (fmt, POINTER_TYPE))
12984 return NULL_TREE;
12985
12986 /* Check whether the format is a literal string constant. */
12987 fmt_str = c_getstr (fmt);
12988 if (fmt_str == NULL)
12989 return NULL_TREE;
12990
12991 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12992 {
12993 /* If we're using an unlocked function, assume the other
12994 unlocked functions exist explicitly. */
12995 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12996 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12997 }
12998 else
12999 {
13000 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13001 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13002 }
13003
13004 if (!init_target_chars ())
13005 return NULL_TREE;
13006
13007 /* If the format doesn't contain % args or %%, use strcpy. */
13008 if (strchr (fmt_str, target_percent) == NULL)
13009 {
13010 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13011 && arg)
13012 return NULL_TREE;
13013
13014 /* If the format specifier was "", fprintf does nothing. */
13015 if (fmt_str[0] == '\0')
13016 {
13017 /* If FP has side-effects, just wait until gimplification is
13018 done. */
13019 if (TREE_SIDE_EFFECTS (fp))
13020 return NULL_TREE;
13021
13022 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13023 }
13024
13025 /* When "string" doesn't contain %, replace all cases of
13026 fprintf (fp, string) with fputs (string, fp). The fputs
13027 builtin will take care of special cases like length == 1. */
13028 if (fn_fputs)
13029 call = build_call_expr (fn_fputs, 2, fmt, fp);
13030 }
13031
13032 /* The other optimizations can be done only on the non-va_list variants. */
13033 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13034 return NULL_TREE;
13035
13036 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13037 else if (strcmp (fmt_str, target_percent_s) == 0)
13038 {
13039 if (!arg || !validate_arg (arg, POINTER_TYPE))
13040 return NULL_TREE;
13041 if (fn_fputs)
13042 call = build_call_expr (fn_fputs, 2, arg, fp);
13043 }
13044
13045 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13046 else if (strcmp (fmt_str, target_percent_c) == 0)
13047 {
13048 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13049 return NULL_TREE;
13050 if (fn_fputc)
13051 call = build_call_expr (fn_fputc, 2, arg, fp);
13052 }
13053
13054 if (!call)
13055 return NULL_TREE;
13056 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13057 }
13058
13059 /* Initialize format string characters in the target charset. */
13060
13061 static bool
13062 init_target_chars (void)
13063 {
13064 static bool init;
13065 if (!init)
13066 {
13067 target_newline = lang_hooks.to_target_charset ('\n');
13068 target_percent = lang_hooks.to_target_charset ('%');
13069 target_c = lang_hooks.to_target_charset ('c');
13070 target_s = lang_hooks.to_target_charset ('s');
13071 if (target_newline == 0 || target_percent == 0 || target_c == 0
13072 || target_s == 0)
13073 return false;
13074
13075 target_percent_c[0] = target_percent;
13076 target_percent_c[1] = target_c;
13077 target_percent_c[2] = '\0';
13078
13079 target_percent_s[0] = target_percent;
13080 target_percent_s[1] = target_s;
13081 target_percent_s[2] = '\0';
13082
13083 target_percent_s_newline[0] = target_percent;
13084 target_percent_s_newline[1] = target_s;
13085 target_percent_s_newline[2] = target_newline;
13086 target_percent_s_newline[3] = '\0';
13087
13088 init = true;
13089 }
13090 return true;
13091 }
13092
13093 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13094 and no overflow/underflow occurred. INEXACT is true if M was not
13095 exactly calculated. TYPE is the tree type for the result. This
13096 function assumes that you cleared the MPFR flags and then
13097 calculated M to see if anything subsequently set a flag prior to
13098 entering this function. Return NULL_TREE if any checks fail. */
13099
13100 static tree
13101 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13102 {
13103 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13104 overflow/underflow occurred. If -frounding-math, proceed iff the
13105 result of calling FUNC was exact. */
13106 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13107 && (!flag_rounding_math || !inexact))
13108 {
13109 REAL_VALUE_TYPE rr;
13110
13111 real_from_mpfr (&rr, m, type, GMP_RNDN);
13112 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13113 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13114 but the mpft_t is not, then we underflowed in the
13115 conversion. */
13116 if (real_isfinite (&rr)
13117 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13118 {
13119 REAL_VALUE_TYPE rmode;
13120
13121 real_convert (&rmode, TYPE_MODE (type), &rr);
13122 /* Proceed iff the specified mode can hold the value. */
13123 if (real_identical (&rmode, &rr))
13124 return build_real (type, rmode);
13125 }
13126 }
13127 return NULL_TREE;
13128 }
13129
13130 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13131 FUNC on it and return the resulting value as a tree with type TYPE.
13132 If MIN and/or MAX are not NULL, then the supplied ARG must be
13133 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13134 acceptable values, otherwise they are not. The mpfr precision is
13135 set to the precision of TYPE. We assume that function FUNC returns
13136 zero if the result could be calculated exactly within the requested
13137 precision. */
13138
13139 static tree
13140 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13141 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13142 bool inclusive)
13143 {
13144 tree result = NULL_TREE;
13145
13146 STRIP_NOPS (arg);
13147
13148 /* To proceed, MPFR must exactly represent the target floating point
13149 format, which only happens when the target base equals two. */
13150 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13151 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13152 {
13153 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13154
13155 if (real_isfinite (ra)
13156 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13157 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13158 {
13159 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13160 const int prec = fmt->p;
13161 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13162 int inexact;
13163 mpfr_t m;
13164
13165 mpfr_init2 (m, prec);
13166 mpfr_from_real (m, ra, GMP_RNDN);
13167 mpfr_clear_flags ();
13168 inexact = func (m, m, rnd);
13169 result = do_mpfr_ckconv (m, type, inexact);
13170 mpfr_clear (m);
13171 }
13172 }
13173
13174 return result;
13175 }
13176
13177 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13178 FUNC on it and return the resulting value as a tree with type TYPE.
13179 The mpfr precision is set to the precision of TYPE. We assume that
13180 function FUNC returns zero if the result could be calculated
13181 exactly within the requested precision. */
13182
13183 static tree
13184 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13185 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13186 {
13187 tree result = NULL_TREE;
13188
13189 STRIP_NOPS (arg1);
13190 STRIP_NOPS (arg2);
13191
13192 /* To proceed, MPFR must exactly represent the target floating point
13193 format, which only happens when the target base equals two. */
13194 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13195 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13196 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13197 {
13198 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13199 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13200
13201 if (real_isfinite (ra1) && real_isfinite (ra2))
13202 {
13203 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13204 const int prec = fmt->p;
13205 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13206 int inexact;
13207 mpfr_t m1, m2;
13208
13209 mpfr_inits2 (prec, m1, m2, NULL);
13210 mpfr_from_real (m1, ra1, GMP_RNDN);
13211 mpfr_from_real (m2, ra2, GMP_RNDN);
13212 mpfr_clear_flags ();
13213 inexact = func (m1, m1, m2, rnd);
13214 result = do_mpfr_ckconv (m1, type, inexact);
13215 mpfr_clears (m1, m2, NULL);
13216 }
13217 }
13218
13219 return result;
13220 }
13221
13222 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13223 FUNC on it and return the resulting value as a tree with type TYPE.
13224 The mpfr precision is set to the precision of TYPE. We assume that
13225 function FUNC returns zero if the result could be calculated
13226 exactly within the requested precision. */
13227
13228 static tree
13229 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13230 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13231 {
13232 tree result = NULL_TREE;
13233
13234 STRIP_NOPS (arg1);
13235 STRIP_NOPS (arg2);
13236 STRIP_NOPS (arg3);
13237
13238 /* To proceed, MPFR must exactly represent the target floating point
13239 format, which only happens when the target base equals two. */
13240 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13241 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13242 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13243 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13244 {
13245 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13246 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13247 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13248
13249 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13250 {
13251 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13252 const int prec = fmt->p;
13253 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13254 int inexact;
13255 mpfr_t m1, m2, m3;
13256
13257 mpfr_inits2 (prec, m1, m2, m3, NULL);
13258 mpfr_from_real (m1, ra1, GMP_RNDN);
13259 mpfr_from_real (m2, ra2, GMP_RNDN);
13260 mpfr_from_real (m3, ra3, GMP_RNDN);
13261 mpfr_clear_flags ();
13262 inexact = func (m1, m1, m2, m3, rnd);
13263 result = do_mpfr_ckconv (m1, type, inexact);
13264 mpfr_clears (m1, m2, m3, NULL);
13265 }
13266 }
13267
13268 return result;
13269 }
13270
13271 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13272 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13273 If ARG_SINP and ARG_COSP are NULL then the result is returned
13274 as a complex value.
13275 The type is taken from the type of ARG and is used for setting the
13276 precision of the calculation and results. */
13277
13278 static tree
13279 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13280 {
13281 tree const type = TREE_TYPE (arg);
13282 tree result = NULL_TREE;
13283
13284 STRIP_NOPS (arg);
13285
13286 /* To proceed, MPFR must exactly represent the target floating point
13287 format, which only happens when the target base equals two. */
13288 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13289 && TREE_CODE (arg) == REAL_CST
13290 && !TREE_OVERFLOW (arg))
13291 {
13292 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13293
13294 if (real_isfinite (ra))
13295 {
13296 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13297 const int prec = fmt->p;
13298 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13299 tree result_s, result_c;
13300 int inexact;
13301 mpfr_t m, ms, mc;
13302
13303 mpfr_inits2 (prec, m, ms, mc, NULL);
13304 mpfr_from_real (m, ra, GMP_RNDN);
13305 mpfr_clear_flags ();
13306 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13307 result_s = do_mpfr_ckconv (ms, type, inexact);
13308 result_c = do_mpfr_ckconv (mc, type, inexact);
13309 mpfr_clears (m, ms, mc, NULL);
13310 if (result_s && result_c)
13311 {
13312 /* If we are to return in a complex value do so. */
13313 if (!arg_sinp && !arg_cosp)
13314 return build_complex (build_complex_type (type),
13315 result_c, result_s);
13316
13317 /* Dereference the sin/cos pointer arguments. */
13318 arg_sinp = build_fold_indirect_ref (arg_sinp);
13319 arg_cosp = build_fold_indirect_ref (arg_cosp);
13320 /* Proceed if valid pointer type were passed in. */
13321 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13322 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13323 {
13324 /* Set the values. */
13325 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13326 result_s);
13327 TREE_SIDE_EFFECTS (result_s) = 1;
13328 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13329 result_c);
13330 TREE_SIDE_EFFECTS (result_c) = 1;
13331 /* Combine the assignments into a compound expr. */
13332 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13333 result_s, result_c));
13334 }
13335 }
13336 }
13337 }
13338 return result;
13339 }
13340
13341 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13342 two-argument mpfr order N Bessel function FUNC on them and return
13343 the resulting value as a tree with type TYPE. The mpfr precision
13344 is set to the precision of TYPE. We assume that function FUNC
13345 returns zero if the result could be calculated exactly within the
13346 requested precision. */
13347 static tree
13348 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13349 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13350 const REAL_VALUE_TYPE *min, bool inclusive)
13351 {
13352 tree result = NULL_TREE;
13353
13354 STRIP_NOPS (arg1);
13355 STRIP_NOPS (arg2);
13356
13357 /* To proceed, MPFR must exactly represent the target floating point
13358 format, which only happens when the target base equals two. */
13359 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13360 && host_integerp (arg1, 0)
13361 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13362 {
13363 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13364 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13365
13366 if (n == (long)n
13367 && real_isfinite (ra)
13368 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13369 {
13370 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13371 const int prec = fmt->p;
13372 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13373 int inexact;
13374 mpfr_t m;
13375
13376 mpfr_init2 (m, prec);
13377 mpfr_from_real (m, ra, GMP_RNDN);
13378 mpfr_clear_flags ();
13379 inexact = func (m, n, m, rnd);
13380 result = do_mpfr_ckconv (m, type, inexact);
13381 mpfr_clear (m);
13382 }
13383 }
13384
13385 return result;
13386 }
13387
13388 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13389 the pointer *(ARG_QUO) and return the result. The type is taken
13390 from the type of ARG0 and is used for setting the precision of the
13391 calculation and results. */
13392
13393 static tree
13394 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13395 {
13396 tree const type = TREE_TYPE (arg0);
13397 tree result = NULL_TREE;
13398
13399 STRIP_NOPS (arg0);
13400 STRIP_NOPS (arg1);
13401
13402 /* To proceed, MPFR must exactly represent the target floating point
13403 format, which only happens when the target base equals two. */
13404 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13405 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13406 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13407 {
13408 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13409 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13410
13411 if (real_isfinite (ra0) && real_isfinite (ra1))
13412 {
13413 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13414 const int prec = fmt->p;
13415 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13416 tree result_rem;
13417 long integer_quo;
13418 mpfr_t m0, m1;
13419
13420 mpfr_inits2 (prec, m0, m1, NULL);
13421 mpfr_from_real (m0, ra0, GMP_RNDN);
13422 mpfr_from_real (m1, ra1, GMP_RNDN);
13423 mpfr_clear_flags ();
13424 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13425 /* Remquo is independent of the rounding mode, so pass
13426 inexact=0 to do_mpfr_ckconv(). */
13427 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13428 mpfr_clears (m0, m1, NULL);
13429 if (result_rem)
13430 {
13431 /* MPFR calculates quo in the host's long so it may
13432 return more bits in quo than the target int can hold
13433 if sizeof(host long) > sizeof(target int). This can
13434 happen even for native compilers in LP64 mode. In
13435 these cases, modulo the quo value with the largest
13436 number that the target int can hold while leaving one
13437 bit for the sign. */
13438 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13439 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13440
13441 /* Dereference the quo pointer argument. */
13442 arg_quo = build_fold_indirect_ref (arg_quo);
13443 /* Proceed iff a valid pointer type was passed in. */
13444 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13445 {
13446 /* Set the value. */
13447 tree result_quo = fold_build2 (MODIFY_EXPR,
13448 TREE_TYPE (arg_quo), arg_quo,
13449 build_int_cst (NULL, integer_quo));
13450 TREE_SIDE_EFFECTS (result_quo) = 1;
13451 /* Combine the quo assignment with the rem. */
13452 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13453 result_quo, result_rem));
13454 }
13455 }
13456 }
13457 }
13458 return result;
13459 }
13460
13461 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13462 resulting value as a tree with type TYPE. The mpfr precision is
13463 set to the precision of TYPE. We assume that this mpfr function
13464 returns zero if the result could be calculated exactly within the
13465 requested precision. In addition, the integer pointer represented
13466 by ARG_SG will be dereferenced and set to the appropriate signgam
13467 (-1,1) value. */
13468
13469 static tree
13470 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13471 {
13472 tree result = NULL_TREE;
13473
13474 STRIP_NOPS (arg);
13475
13476 /* To proceed, MPFR must exactly represent the target floating point
13477 format, which only happens when the target base equals two. Also
13478 verify ARG is a constant and that ARG_SG is an int pointer. */
13479 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13480 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13481 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13482 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13483 {
13484 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13485
13486 /* In addition to NaN and Inf, the argument cannot be zero or a
13487 negative integer. */
13488 if (real_isfinite (ra)
13489 && ra->cl != rvc_zero
13490 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13491 {
13492 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13493 const int prec = fmt->p;
13494 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13495 int inexact, sg;
13496 mpfr_t m;
13497 tree result_lg;
13498
13499 mpfr_init2 (m, prec);
13500 mpfr_from_real (m, ra, GMP_RNDN);
13501 mpfr_clear_flags ();
13502 inexact = mpfr_lgamma (m, &sg, m, rnd);
13503 result_lg = do_mpfr_ckconv (m, type, inexact);
13504 mpfr_clear (m);
13505 if (result_lg)
13506 {
13507 tree result_sg;
13508
13509 /* Dereference the arg_sg pointer argument. */
13510 arg_sg = build_fold_indirect_ref (arg_sg);
13511 /* Assign the signgam value into *arg_sg. */
13512 result_sg = fold_build2 (MODIFY_EXPR,
13513 TREE_TYPE (arg_sg), arg_sg,
13514 build_int_cst (NULL, sg));
13515 TREE_SIDE_EFFECTS (result_sg) = 1;
13516 /* Combine the signgam assignment with the lgamma result. */
13517 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13518 result_sg, result_lg));
13519 }
13520 }
13521 }
13522
13523 return result;
13524 }
13525
13526 /* FIXME tuples.
13527 The functions below provide an alternate interface for folding
13528 builtin function calls presented as GIMPLE_CALL statements rather
13529 than as CALL_EXPRs. The folded result is still expressed as a
13530 tree. There is too much code duplication in the handling of
13531 varargs functions, and a more intrusive re-factoring would permit
13532 better sharing of code between the tree and statement-based
13533 versions of these functions. */
13534
13535 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13536 along with N new arguments specified as the "..." parameters. SKIP
13537 is the number of arguments in STMT to be omitted. This function is used
13538 to do varargs-to-varargs transformations. */
13539
13540 static tree
13541 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13542 {
13543 int oldnargs = gimple_call_num_args (stmt);
13544 int nargs = oldnargs - skip + n;
13545 tree fntype = TREE_TYPE (fndecl);
13546 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13547 tree *buffer;
13548 int i, j;
13549 va_list ap;
13550
13551 buffer = XALLOCAVEC (tree, nargs);
13552 va_start (ap, n);
13553 for (i = 0; i < n; i++)
13554 buffer[i] = va_arg (ap, tree);
13555 va_end (ap);
13556 for (j = skip; j < oldnargs; j++, i++)
13557 buffer[i] = gimple_call_arg (stmt, j);
13558
13559 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13560 }
13561
13562 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13563 a normal call should be emitted rather than expanding the function
13564 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13565
13566 static tree
13567 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13568 {
13569 tree dest, size, len, fn, fmt, flag;
13570 const char *fmt_str;
13571 int nargs = gimple_call_num_args (stmt);
13572
13573 /* Verify the required arguments in the original call. */
13574 if (nargs < 4)
13575 return NULL_TREE;
13576 dest = gimple_call_arg (stmt, 0);
13577 if (!validate_arg (dest, POINTER_TYPE))
13578 return NULL_TREE;
13579 flag = gimple_call_arg (stmt, 1);
13580 if (!validate_arg (flag, INTEGER_TYPE))
13581 return NULL_TREE;
13582 size = gimple_call_arg (stmt, 2);
13583 if (!validate_arg (size, INTEGER_TYPE))
13584 return NULL_TREE;
13585 fmt = gimple_call_arg (stmt, 3);
13586 if (!validate_arg (fmt, POINTER_TYPE))
13587 return NULL_TREE;
13588
13589 if (! host_integerp (size, 1))
13590 return NULL_TREE;
13591
13592 len = NULL_TREE;
13593
13594 if (!init_target_chars ())
13595 return NULL_TREE;
13596
13597 /* Check whether the format is a literal string constant. */
13598 fmt_str = c_getstr (fmt);
13599 if (fmt_str != NULL)
13600 {
13601 /* If the format doesn't contain % args or %%, we know the size. */
13602 if (strchr (fmt_str, target_percent) == 0)
13603 {
13604 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13605 len = build_int_cstu (size_type_node, strlen (fmt_str));
13606 }
13607 /* If the format is "%s" and first ... argument is a string literal,
13608 we know the size too. */
13609 else if (fcode == BUILT_IN_SPRINTF_CHK
13610 && strcmp (fmt_str, target_percent_s) == 0)
13611 {
13612 tree arg;
13613
13614 if (nargs == 5)
13615 {
13616 arg = gimple_call_arg (stmt, 4);
13617 if (validate_arg (arg, POINTER_TYPE))
13618 {
13619 len = c_strlen (arg, 1);
13620 if (! len || ! host_integerp (len, 1))
13621 len = NULL_TREE;
13622 }
13623 }
13624 }
13625 }
13626
13627 if (! integer_all_onesp (size))
13628 {
13629 if (! len || ! tree_int_cst_lt (len, size))
13630 return NULL_TREE;
13631 }
13632
13633 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13634 or if format doesn't contain % chars or is "%s". */
13635 if (! integer_zerop (flag))
13636 {
13637 if (fmt_str == NULL)
13638 return NULL_TREE;
13639 if (strchr (fmt_str, target_percent) != NULL
13640 && strcmp (fmt_str, target_percent_s))
13641 return NULL_TREE;
13642 }
13643
13644 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13645 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13646 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13647 if (!fn)
13648 return NULL_TREE;
13649
13650 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13651 }
13652
13653 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13654 a normal call should be emitted rather than expanding the function
13655 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13656 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13657 passed as second argument. */
13658
13659 tree
13660 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13661 enum built_in_function fcode)
13662 {
13663 tree dest, size, len, fn, fmt, flag;
13664 const char *fmt_str;
13665
13666 /* Verify the required arguments in the original call. */
13667 if (gimple_call_num_args (stmt) < 5)
13668 return NULL_TREE;
13669 dest = gimple_call_arg (stmt, 0);
13670 if (!validate_arg (dest, POINTER_TYPE))
13671 return NULL_TREE;
13672 len = gimple_call_arg (stmt, 1);
13673 if (!validate_arg (len, INTEGER_TYPE))
13674 return NULL_TREE;
13675 flag = gimple_call_arg (stmt, 2);
13676 if (!validate_arg (flag, INTEGER_TYPE))
13677 return NULL_TREE;
13678 size = gimple_call_arg (stmt, 3);
13679 if (!validate_arg (size, INTEGER_TYPE))
13680 return NULL_TREE;
13681 fmt = gimple_call_arg (stmt, 4);
13682 if (!validate_arg (fmt, POINTER_TYPE))
13683 return NULL_TREE;
13684
13685 if (! host_integerp (size, 1))
13686 return NULL_TREE;
13687
13688 if (! integer_all_onesp (size))
13689 {
13690 if (! host_integerp (len, 1))
13691 {
13692 /* If LEN is not constant, try MAXLEN too.
13693 For MAXLEN only allow optimizing into non-_ocs function
13694 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13695 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13696 return NULL_TREE;
13697 }
13698 else
13699 maxlen = len;
13700
13701 if (tree_int_cst_lt (size, maxlen))
13702 return NULL_TREE;
13703 }
13704
13705 if (!init_target_chars ())
13706 return NULL_TREE;
13707
13708 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13709 or if format doesn't contain % chars or is "%s". */
13710 if (! integer_zerop (flag))
13711 {
13712 fmt_str = c_getstr (fmt);
13713 if (fmt_str == NULL)
13714 return NULL_TREE;
13715 if (strchr (fmt_str, target_percent) != NULL
13716 && strcmp (fmt_str, target_percent_s))
13717 return NULL_TREE;
13718 }
13719
13720 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13721 available. */
13722 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13723 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13724 if (!fn)
13725 return NULL_TREE;
13726
13727 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13728 }
13729
13730 /* Builtins with folding operations that operate on "..." arguments
13731 need special handling; we need to store the arguments in a convenient
13732 data structure before attempting any folding. Fortunately there are
13733 only a few builtins that fall into this category. FNDECL is the
13734 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13735 result of the function call is ignored. */
13736
13737 static tree
13738 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13739 {
13740 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13741 tree ret = NULL_TREE;
13742
13743 switch (fcode)
13744 {
13745 case BUILT_IN_SPRINTF_CHK:
13746 case BUILT_IN_VSPRINTF_CHK:
13747 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13748 break;
13749
13750 case BUILT_IN_SNPRINTF_CHK:
13751 case BUILT_IN_VSNPRINTF_CHK:
13752 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13753
13754 default:
13755 break;
13756 }
13757 if (ret)
13758 {
13759 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13760 TREE_NO_WARNING (ret) = 1;
13761 return ret;
13762 }
13763 return NULL_TREE;
13764 }
13765
13766 /* A wrapper function for builtin folding that prevents warnings for
13767 "statement without effect" and the like, caused by removing the
13768 call node earlier than the warning is generated. */
13769
13770 tree
13771 fold_call_stmt (gimple stmt, bool ignore)
13772 {
13773 tree ret = NULL_TREE;
13774 tree fndecl = gimple_call_fndecl (stmt);
13775 if (fndecl
13776 && TREE_CODE (fndecl) == FUNCTION_DECL
13777 && DECL_BUILT_IN (fndecl)
13778 && !gimple_call_va_arg_pack_p (stmt))
13779 {
13780 int nargs = gimple_call_num_args (stmt);
13781
13782 if (avoid_folding_inline_builtin (fndecl))
13783 return NULL_TREE;
13784 /* FIXME: Don't use a list in this interface. */
13785 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13786 {
13787 tree arglist = NULL_TREE;
13788 int i;
13789 for (i = nargs - 1; i >= 0; i--)
13790 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13791 return targetm.fold_builtin (fndecl, arglist, ignore);
13792 }
13793 else
13794 {
13795 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13796 {
13797 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13798 int i;
13799 for (i = 0; i < nargs; i++)
13800 args[i] = gimple_call_arg (stmt, i);
13801 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13802 }
13803 if (!ret)
13804 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13805 if (ret)
13806 {
13807 /* Propagate location information from original call to
13808 expansion of builtin. Otherwise things like
13809 maybe_emit_chk_warning, that operate on the expansion
13810 of a builtin, will use the wrong location information. */
13811 if (gimple_has_location (stmt))
13812 {
13813 tree realret = ret;
13814 if (TREE_CODE (ret) == NOP_EXPR)
13815 realret = TREE_OPERAND (ret, 0);
13816 if (CAN_HAVE_LOCATION_P (realret)
13817 && !EXPR_HAS_LOCATION (realret))
13818 SET_EXPR_LOCATION (realret, gimple_location (stmt));
13819 return realret;
13820 }
13821 return ret;
13822 }
13823 }
13824 }
13825 return NULL_TREE;
13826 }