tree-ssa-sccvn.c (get_or_alloc_constant_value_id): Allocate a new entry only if needed.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
53
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
183
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
207
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
238 }
239
240
241 /* Return true if DECL is a function symbol representing a built-in. */
242
243 bool
244 is_builtin_fn (tree decl)
245 {
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
247 }
248
249
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
253
254 static bool
255 called_as_built_in (tree node)
256 {
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
262 }
263
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
267
268 int
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
270 {
271 unsigned int inner;
272
273 inner = max_align;
274 if (handled_component_p (exp))
275 {
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
280
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
286 {
287 tree next_offset;
288
289 if (TREE_CODE (offset) == PLUS_EXPR)
290 {
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
293 }
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
297 {
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
302
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
305 }
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
308 {
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
314
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
317 }
318 else
319 {
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
322 }
323 offset = next_offset;
324 }
325 }
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
341 }
342
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
345
346 bool
347 can_trust_pointer_alignment (void)
348 {
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
351 }
352
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
357
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
360
361 int
362 get_pointer_alignment (tree exp, unsigned int max_align)
363 {
364 unsigned int align, inner;
365
366 if (!can_trust_pointer_alignment ())
367 return 0;
368
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
371
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
374
375 while (1)
376 {
377 switch (TREE_CODE (exp))
378 {
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
383
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
387
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
394
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
399
400 exp = TREE_OPERAND (exp, 0);
401 break;
402
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
406
407 default:
408 return align;
409 }
410 }
411 }
412
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
416
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
423
424 The value returned is of type `ssizetype'.
425
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
428
429 tree
430 c_strlen (tree src, int only_value)
431 {
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436
437 STRIP_NOPS (src);
438 if (TREE_CODE (src) == COND_EXPR
439 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
440 {
441 tree len1, len2;
442
443 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
444 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
445 if (tree_int_cst_equal (len1, len2))
446 return len1;
447 }
448
449 if (TREE_CODE (src) == COMPOUND_EXPR
450 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
451 return c_strlen (TREE_OPERAND (src, 1), only_value);
452
453 src = string_constant (src, &offset_node);
454 if (src == 0)
455 return NULL_TREE;
456
457 max = TREE_STRING_LENGTH (src) - 1;
458 ptr = TREE_STRING_POINTER (src);
459
460 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
461 {
462 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
463 compute the offset to the following null if we don't know where to
464 start searching for it. */
465 int i;
466
467 for (i = 0; i < max; i++)
468 if (ptr[i] == 0)
469 return NULL_TREE;
470
471 /* We don't know the starting offset, but we do know that the string
472 has no internal zero bytes. We can assume that the offset falls
473 within the bounds of the string; otherwise, the programmer deserves
474 what he gets. Subtract the offset from the length of the string,
475 and return that. This would perhaps not be valid if we were dealing
476 with named arrays in addition to literal string constants. */
477
478 return size_diffop_loc (input_location, size_int (max), offset_node);
479 }
480
481 /* We have a known offset into the string. Start searching there for
482 a null character if we can represent it as a single HOST_WIDE_INT. */
483 if (offset_node == 0)
484 offset = 0;
485 else if (! host_integerp (offset_node, 0))
486 offset = -1;
487 else
488 offset = tree_low_cst (offset_node, 0);
489
490 /* If the offset is known to be out of bounds, warn, and call strlen at
491 runtime. */
492 if (offset < 0 || offset > max)
493 {
494 /* Suppress multiple warnings for propagated constant strings. */
495 if (! TREE_NO_WARNING (src))
496 {
497 warning (0, "offset outside bounds of constant string");
498 TREE_NO_WARNING (src) = 1;
499 }
500 return NULL_TREE;
501 }
502
503 /* Use strlen to search for the first zero byte. Since any strings
504 constructed with build_string will have nulls appended, we win even
505 if we get handed something like (char[4])"abcd".
506
507 Since OFFSET is our starting index into the string, no further
508 calculation is needed. */
509 return ssize_int (strlen (ptr + offset));
510 }
511
512 /* Return a char pointer for a C string if it is a string constant
513 or sum of string constant and integer constant. */
514
515 static const char *
516 c_getstr (tree src)
517 {
518 tree offset_node;
519
520 src = string_constant (src, &offset_node);
521 if (src == 0)
522 return 0;
523
524 if (offset_node == 0)
525 return TREE_STRING_POINTER (src);
526 else if (!host_integerp (offset_node, 1)
527 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
528 return 0;
529
530 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
531 }
532
533 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
534 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
535
536 static rtx
537 c_readstr (const char *str, enum machine_mode mode)
538 {
539 HOST_WIDE_INT c[2];
540 HOST_WIDE_INT ch;
541 unsigned int i, j;
542
543 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
544
545 c[0] = 0;
546 c[1] = 0;
547 ch = 1;
548 for (i = 0; i < GET_MODE_SIZE (mode); i++)
549 {
550 j = i;
551 if (WORDS_BIG_ENDIAN)
552 j = GET_MODE_SIZE (mode) - i - 1;
553 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
554 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
555 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
556 j *= BITS_PER_UNIT;
557 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
558
559 if (ch)
560 ch = (unsigned char) str[i];
561 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
562 }
563 return immed_double_const (c[0], c[1], mode);
564 }
565
566 /* Cast a target constant CST to target CHAR and if that value fits into
567 host char type, return zero and put that value into variable pointed to by
568 P. */
569
570 static int
571 target_char_cast (tree cst, char *p)
572 {
573 unsigned HOST_WIDE_INT val, hostval;
574
575 if (!host_integerp (cst, 1)
576 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
577 return 1;
578
579 val = tree_low_cst (cst, 1);
580 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
581 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
582
583 hostval = val;
584 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
585 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
586
587 if (val != hostval)
588 return 1;
589
590 *p = hostval;
591 return 0;
592 }
593
594 /* Similar to save_expr, but assumes that arbitrary code is not executed
595 in between the multiple evaluations. In particular, we assume that a
596 non-addressable local variable will not be modified. */
597
598 static tree
599 builtin_save_expr (tree exp)
600 {
601 if (TREE_ADDRESSABLE (exp) == 0
602 && (TREE_CODE (exp) == PARM_DECL
603 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
604 return exp;
605
606 return save_expr (exp);
607 }
608
609 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
610 times to get the address of either a higher stack frame, or a return
611 address located within it (depending on FNDECL_CODE). */
612
613 static rtx
614 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
615 {
616 int i;
617
618 #ifdef INITIAL_FRAME_ADDRESS_RTX
619 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
620 #else
621 rtx tem;
622
623 /* For a zero count with __builtin_return_address, we don't care what
624 frame address we return, because target-specific definitions will
625 override us. Therefore frame pointer elimination is OK, and using
626 the soft frame pointer is OK.
627
628 For a nonzero count, or a zero count with __builtin_frame_address,
629 we require a stable offset from the current frame pointer to the
630 previous one, so we must use the hard frame pointer, and
631 we must disable frame pointer elimination. */
632 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
633 tem = frame_pointer_rtx;
634 else
635 {
636 tem = hard_frame_pointer_rtx;
637
638 /* Tell reload not to eliminate the frame pointer. */
639 crtl->accesses_prior_frames = 1;
640 }
641 #endif
642
643 /* Some machines need special handling before we can access
644 arbitrary frames. For example, on the SPARC, we must first flush
645 all register windows to the stack. */
646 #ifdef SETUP_FRAME_ADDRESSES
647 if (count > 0)
648 SETUP_FRAME_ADDRESSES ();
649 #endif
650
651 /* On the SPARC, the return address is not in the frame, it is in a
652 register. There is no way to access it off of the current frame
653 pointer, but it can be accessed off the previous frame pointer by
654 reading the value from the register window save area. */
655 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
656 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
657 count--;
658 #endif
659
660 /* Scan back COUNT frames to the specified frame. */
661 for (i = 0; i < count; i++)
662 {
663 /* Assume the dynamic chain pointer is in the word that the
664 frame address points to, unless otherwise specified. */
665 #ifdef DYNAMIC_CHAIN_ADDRESS
666 tem = DYNAMIC_CHAIN_ADDRESS (tem);
667 #endif
668 tem = memory_address (Pmode, tem);
669 tem = gen_frame_mem (Pmode, tem);
670 tem = copy_to_reg (tem);
671 }
672
673 /* For __builtin_frame_address, return what we've got. But, on
674 the SPARC for example, we may have to add a bias. */
675 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
676 #ifdef FRAME_ADDR_RTX
677 return FRAME_ADDR_RTX (tem);
678 #else
679 return tem;
680 #endif
681
682 /* For __builtin_return_address, get the return address from that frame. */
683 #ifdef RETURN_ADDR_RTX
684 tem = RETURN_ADDR_RTX (count, tem);
685 #else
686 tem = memory_address (Pmode,
687 plus_constant (tem, GET_MODE_SIZE (Pmode)));
688 tem = gen_frame_mem (Pmode, tem);
689 #endif
690 return tem;
691 }
692
693 /* Alias set used for setjmp buffer. */
694 static alias_set_type setjmp_alias_set = -1;
695
696 /* Construct the leading half of a __builtin_setjmp call. Control will
697 return to RECEIVER_LABEL. This is also called directly by the SJLJ
698 exception handling code. */
699
700 void
701 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
702 {
703 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
704 rtx stack_save;
705 rtx mem;
706
707 if (setjmp_alias_set == -1)
708 setjmp_alias_set = new_alias_set ();
709
710 buf_addr = convert_memory_address (Pmode, buf_addr);
711
712 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
713
714 /* We store the frame pointer and the address of receiver_label in
715 the buffer and use the rest of it for the stack save area, which
716 is machine-dependent. */
717
718 mem = gen_rtx_MEM (Pmode, buf_addr);
719 set_mem_alias_set (mem, setjmp_alias_set);
720 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
721
722 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
723 set_mem_alias_set (mem, setjmp_alias_set);
724
725 emit_move_insn (validize_mem (mem),
726 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
727
728 stack_save = gen_rtx_MEM (sa_mode,
729 plus_constant (buf_addr,
730 2 * GET_MODE_SIZE (Pmode)));
731 set_mem_alias_set (stack_save, setjmp_alias_set);
732 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
733
734 /* If there is further processing to do, do it. */
735 #ifdef HAVE_builtin_setjmp_setup
736 if (HAVE_builtin_setjmp_setup)
737 emit_insn (gen_builtin_setjmp_setup (buf_addr));
738 #endif
739
740 /* Tell optimize_save_area_alloca that extra work is going to
741 need to go on during alloca. */
742 cfun->calls_setjmp = 1;
743
744 /* We have a nonlocal label. */
745 cfun->has_nonlocal_label = 1;
746 }
747
748 /* Construct the trailing part of a __builtin_setjmp call. This is
749 also called directly by the SJLJ exception handling code. */
750
751 void
752 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
753 {
754 rtx chain;
755
756 /* Clobber the FP when we get here, so we have to make sure it's
757 marked as used by this function. */
758 emit_use (hard_frame_pointer_rtx);
759
760 /* Mark the static chain as clobbered here so life information
761 doesn't get messed up for it. */
762 chain = targetm.calls.static_chain (current_function_decl, true);
763 if (chain && REG_P (chain))
764 emit_clobber (chain);
765
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
770 #endif
771 {
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
776 }
777
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
780 {
781 #ifdef ELIMINABLE_REGS
782 size_t i;
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
784
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
788 break;
789
790 if (i == ARRAY_SIZE (elim_regs))
791 #endif
792 {
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
797 }
798 }
799 #endif
800
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
804 else
805 #endif
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
809 else
810 #endif
811 { /* Nothing */ }
812
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
817 }
818
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
823
824 static void
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
826 {
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829
830 /* DRAP is needed for stack realign if longjmp is expanded to current
831 function */
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
834
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
837
838 buf_addr = convert_memory_address (Pmode, buf_addr);
839
840 buf_addr = force_reg (Pmode, buf_addr);
841
842 /* We require that the user must pass a second argument of 1, because
843 that is what builtin_setjmp will return. */
844 gcc_assert (value == const1_rtx);
845
846 last = get_last_insn ();
847 #ifdef HAVE_builtin_longjmp
848 if (HAVE_builtin_longjmp)
849 emit_insn (gen_builtin_longjmp (buf_addr));
850 else
851 #endif
852 {
853 fp = gen_rtx_MEM (Pmode, buf_addr);
854 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
855 GET_MODE_SIZE (Pmode)));
856
857 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (fp, setjmp_alias_set);
860 set_mem_alias_set (lab, setjmp_alias_set);
861 set_mem_alias_set (stack, setjmp_alias_set);
862
863 /* Pick up FP, label, and SP from the block and jump. This code is
864 from expand_goto in stmt.c; see there for detailed comments. */
865 #ifdef HAVE_nonlocal_goto
866 if (HAVE_nonlocal_goto)
867 /* We have to pass a value to the nonlocal_goto pattern that will
868 get copied into the static_chain pointer, but it does not matter
869 what that value is, because builtin_setjmp does not use it. */
870 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
871 else
872 #endif
873 {
874 lab = copy_to_reg (lab);
875
876 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
877 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
878
879 emit_move_insn (hard_frame_pointer_rtx, fp);
880 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
881
882 emit_use (hard_frame_pointer_rtx);
883 emit_use (stack_pointer_rtx);
884 emit_indirect_jump (lab);
885 }
886 }
887
888 /* Search backwards and mark the jump insn as a non-local goto.
889 Note that this precludes the use of __builtin_longjmp to a
890 __builtin_setjmp target in the same function. However, we've
891 already cautioned the user that these functions are for
892 internal exception handling use only. */
893 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
894 {
895 gcc_assert (insn != last);
896
897 if (JUMP_P (insn))
898 {
899 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
900 break;
901 }
902 else if (CALL_P (insn))
903 break;
904 }
905 }
906
907 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
908 and the address of the save area. */
909
910 static rtx
911 expand_builtin_nonlocal_goto (tree exp)
912 {
913 tree t_label, t_save_area;
914 rtx r_label, r_save_area, r_fp, r_sp, insn;
915
916 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
917 return NULL_RTX;
918
919 t_label = CALL_EXPR_ARG (exp, 0);
920 t_save_area = CALL_EXPR_ARG (exp, 1);
921
922 r_label = expand_normal (t_label);
923 r_label = convert_memory_address (Pmode, r_label);
924 r_save_area = expand_normal (t_save_area);
925 r_save_area = convert_memory_address (Pmode, r_save_area);
926 /* Copy the address of the save location to a register just in case it was based
927 on the frame pointer. */
928 r_save_area = copy_to_reg (r_save_area);
929 r_fp = gen_rtx_MEM (Pmode, r_save_area);
930 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
931 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
932
933 crtl->has_nonlocal_goto = 1;
934
935 #ifdef HAVE_nonlocal_goto
936 /* ??? We no longer need to pass the static chain value, afaik. */
937 if (HAVE_nonlocal_goto)
938 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
939 else
940 #endif
941 {
942 r_label = copy_to_reg (r_label);
943
944 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
945 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
946
947 /* Restore frame pointer for containing function.
948 This sets the actual hard register used for the frame pointer
949 to the location of the function's incoming static chain info.
950 The non-local goto handler will then adjust it to contain the
951 proper value and reload the argument pointer, if needed. */
952 emit_move_insn (hard_frame_pointer_rtx, r_fp);
953 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
954
955 /* USE of hard_frame_pointer_rtx added for consistency;
956 not clear if really needed. */
957 emit_use (hard_frame_pointer_rtx);
958 emit_use (stack_pointer_rtx);
959
960 /* If the architecture is using a GP register, we must
961 conservatively assume that the target function makes use of it.
962 The prologue of functions with nonlocal gotos must therefore
963 initialize the GP register to the appropriate value, and we
964 must then make sure that this value is live at the point
965 of the jump. (Note that this doesn't necessarily apply
966 to targets with a nonlocal_goto pattern; they are free
967 to implement it in their own way. Note also that this is
968 a no-op if the GP register is a global invariant.) */
969 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
970 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
971 emit_use (pic_offset_table_rtx);
972
973 emit_indirect_jump (r_label);
974 }
975
976 /* Search backwards to the jump insn and mark it as a
977 non-local goto. */
978 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
979 {
980 if (JUMP_P (insn))
981 {
982 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
983 break;
984 }
985 else if (CALL_P (insn))
986 break;
987 }
988
989 return const0_rtx;
990 }
991
992 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
993 (not all will be used on all machines) that was passed to __builtin_setjmp.
994 It updates the stack pointer in that block to correspond to the current
995 stack pointer. */
996
997 static void
998 expand_builtin_update_setjmp_buf (rtx buf_addr)
999 {
1000 enum machine_mode sa_mode = Pmode;
1001 rtx stack_save;
1002
1003
1004 #ifdef HAVE_save_stack_nonlocal
1005 if (HAVE_save_stack_nonlocal)
1006 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1007 #endif
1008 #ifdef STACK_SAVEAREA_MODE
1009 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1010 #endif
1011
1012 stack_save
1013 = gen_rtx_MEM (sa_mode,
1014 memory_address
1015 (sa_mode,
1016 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1017
1018 #ifdef HAVE_setjmp
1019 if (HAVE_setjmp)
1020 emit_insn (gen_setjmp ());
1021 #endif
1022
1023 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1024 }
1025
1026 /* Expand a call to __builtin_prefetch. For a target that does not support
1027 data prefetch, evaluate the memory address argument in case it has side
1028 effects. */
1029
1030 static void
1031 expand_builtin_prefetch (tree exp)
1032 {
1033 tree arg0, arg1, arg2;
1034 int nargs;
1035 rtx op0, op1, op2;
1036
1037 if (!validate_arglist (exp, POINTER_TYPE, 0))
1038 return;
1039
1040 arg0 = CALL_EXPR_ARG (exp, 0);
1041
1042 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1043 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1044 locality). */
1045 nargs = call_expr_nargs (exp);
1046 if (nargs > 1)
1047 arg1 = CALL_EXPR_ARG (exp, 1);
1048 else
1049 arg1 = integer_zero_node;
1050 if (nargs > 2)
1051 arg2 = CALL_EXPR_ARG (exp, 2);
1052 else
1053 arg2 = build_int_cst (NULL_TREE, 3);
1054
1055 /* Argument 0 is an address. */
1056 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1057
1058 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1059 if (TREE_CODE (arg1) != INTEGER_CST)
1060 {
1061 error ("second argument to %<__builtin_prefetch%> must be a constant");
1062 arg1 = integer_zero_node;
1063 }
1064 op1 = expand_normal (arg1);
1065 /* Argument 1 must be either zero or one. */
1066 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1067 {
1068 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1069 " using zero");
1070 op1 = const0_rtx;
1071 }
1072
1073 /* Argument 2 (locality) must be a compile-time constant int. */
1074 if (TREE_CODE (arg2) != INTEGER_CST)
1075 {
1076 error ("third argument to %<__builtin_prefetch%> must be a constant");
1077 arg2 = integer_zero_node;
1078 }
1079 op2 = expand_normal (arg2);
1080 /* Argument 2 must be 0, 1, 2, or 3. */
1081 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1082 {
1083 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1084 op2 = const0_rtx;
1085 }
1086
1087 #ifdef HAVE_prefetch
1088 if (HAVE_prefetch)
1089 {
1090 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1091 (op0,
1092 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1093 || (GET_MODE (op0) != Pmode))
1094 {
1095 op0 = convert_memory_address (Pmode, op0);
1096 op0 = force_reg (Pmode, op0);
1097 }
1098 emit_insn (gen_prefetch (op0, op1, op2));
1099 }
1100 #endif
1101
1102 /* Don't do anything with direct references to volatile memory, but
1103 generate code to handle other side effects. */
1104 if (!MEM_P (op0) && side_effects_p (op0))
1105 emit_insn (op0);
1106 }
1107
1108 /* Get a MEM rtx for expression EXP which is the address of an operand
1109 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1110 the maximum length of the block of memory that might be accessed or
1111 NULL if unknown. */
1112
1113 static rtx
1114 get_memory_rtx (tree exp, tree len)
1115 {
1116 tree orig_exp = exp;
1117 rtx addr, mem;
1118 HOST_WIDE_INT off;
1119
1120 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1121 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1122 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1123 exp = TREE_OPERAND (exp, 0);
1124
1125 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1126 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1127
1128 /* Get an expression we can use to find the attributes to assign to MEM.
1129 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1130 we can. First remove any nops. */
1131 while (CONVERT_EXPR_P (exp)
1132 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1133 exp = TREE_OPERAND (exp, 0);
1134
1135 off = 0;
1136 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1137 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1138 && host_integerp (TREE_OPERAND (exp, 1), 0)
1139 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1140 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1141 else if (TREE_CODE (exp) == ADDR_EXPR)
1142 exp = TREE_OPERAND (exp, 0);
1143 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1144 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1145 else
1146 exp = NULL;
1147
1148 /* Honor attributes derived from exp, except for the alias set
1149 (as builtin stringops may alias with anything) and the size
1150 (as stringops may access multiple array elements). */
1151 if (exp)
1152 {
1153 set_mem_attributes (mem, exp, 0);
1154
1155 if (off)
1156 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1157
1158 /* Allow the string and memory builtins to overflow from one
1159 field into another, see http://gcc.gnu.org/PR23561.
1160 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1161 memory accessed by the string or memory builtin will fit
1162 within the field. */
1163 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1164 {
1165 tree mem_expr = MEM_EXPR (mem);
1166 HOST_WIDE_INT offset = -1, length = -1;
1167 tree inner = exp;
1168
1169 while (TREE_CODE (inner) == ARRAY_REF
1170 || CONVERT_EXPR_P (inner)
1171 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1172 || TREE_CODE (inner) == SAVE_EXPR)
1173 inner = TREE_OPERAND (inner, 0);
1174
1175 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1176
1177 if (MEM_OFFSET (mem)
1178 && CONST_INT_P (MEM_OFFSET (mem)))
1179 offset = INTVAL (MEM_OFFSET (mem));
1180
1181 if (offset >= 0 && len && host_integerp (len, 0))
1182 length = tree_low_cst (len, 0);
1183
1184 while (TREE_CODE (inner) == COMPONENT_REF)
1185 {
1186 tree field = TREE_OPERAND (inner, 1);
1187 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1188 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1189
1190 /* Bitfields are generally not byte-addressable. */
1191 gcc_assert (!DECL_BIT_FIELD (field)
1192 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1193 % BITS_PER_UNIT) == 0
1194 && host_integerp (DECL_SIZE (field), 0)
1195 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1196 % BITS_PER_UNIT) == 0));
1197
1198 /* If we can prove that the memory starting at XEXP (mem, 0) and
1199 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1200 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1201 fields without DECL_SIZE_UNIT like flexible array members. */
1202 if (length >= 0
1203 && DECL_SIZE_UNIT (field)
1204 && host_integerp (DECL_SIZE_UNIT (field), 0))
1205 {
1206 HOST_WIDE_INT size
1207 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1208 if (offset <= size
1209 && length <= size
1210 && offset + length <= size)
1211 break;
1212 }
1213
1214 if (offset >= 0
1215 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1216 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1217 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1218 / BITS_PER_UNIT;
1219 else
1220 {
1221 offset = -1;
1222 length = -1;
1223 }
1224
1225 mem_expr = TREE_OPERAND (mem_expr, 0);
1226 inner = TREE_OPERAND (inner, 0);
1227 }
1228
1229 if (mem_expr == NULL)
1230 offset = -1;
1231 if (mem_expr != MEM_EXPR (mem))
1232 {
1233 set_mem_expr (mem, mem_expr);
1234 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1235 }
1236 }
1237 set_mem_alias_set (mem, 0);
1238 set_mem_size (mem, NULL_RTX);
1239 }
1240
1241 return mem;
1242 }
1243 \f
1244 /* Built-in functions to perform an untyped call and return. */
1245
1246 /* For each register that may be used for calling a function, this
1247 gives a mode used to copy the register's value. VOIDmode indicates
1248 the register is not used for calling a function. If the machine
1249 has register windows, this gives only the outbound registers.
1250 INCOMING_REGNO gives the corresponding inbound register. */
1251 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1252
1253 /* For each register that may be used for returning values, this gives
1254 a mode used to copy the register's value. VOIDmode indicates the
1255 register is not used for returning values. If the machine has
1256 register windows, this gives only the outbound registers.
1257 INCOMING_REGNO gives the corresponding inbound register. */
1258 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1259
1260 /* Return the size required for the block returned by __builtin_apply_args,
1261 and initialize apply_args_mode. */
1262
1263 static int
1264 apply_args_size (void)
1265 {
1266 static int size = -1;
1267 int align;
1268 unsigned int regno;
1269 enum machine_mode mode;
1270
1271 /* The values computed by this function never change. */
1272 if (size < 0)
1273 {
1274 /* The first value is the incoming arg-pointer. */
1275 size = GET_MODE_SIZE (Pmode);
1276
1277 /* The second value is the structure value address unless this is
1278 passed as an "invisible" first argument. */
1279 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1280 size += GET_MODE_SIZE (Pmode);
1281
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if (FUNCTION_ARG_REGNO_P (regno))
1284 {
1285 mode = reg_raw_mode[regno];
1286
1287 gcc_assert (mode != VOIDmode);
1288
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 size += GET_MODE_SIZE (mode);
1293 apply_args_mode[regno] = mode;
1294 }
1295 else
1296 {
1297 apply_args_mode[regno] = VOIDmode;
1298 }
1299 }
1300 return size;
1301 }
1302
1303 /* Return the size required for the block returned by __builtin_apply,
1304 and initialize apply_result_mode. */
1305
1306 static int
1307 apply_result_size (void)
1308 {
1309 static int size = -1;
1310 int align, regno;
1311 enum machine_mode mode;
1312
1313 /* The values computed by this function never change. */
1314 if (size < 0)
1315 {
1316 size = 0;
1317
1318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1319 if (FUNCTION_VALUE_REGNO_P (regno))
1320 {
1321 mode = reg_raw_mode[regno];
1322
1323 gcc_assert (mode != VOIDmode);
1324
1325 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1326 if (size % align != 0)
1327 size = CEIL (size, align) * align;
1328 size += GET_MODE_SIZE (mode);
1329 apply_result_mode[regno] = mode;
1330 }
1331 else
1332 apply_result_mode[regno] = VOIDmode;
1333
1334 /* Allow targets that use untyped_call and untyped_return to override
1335 the size so that machine-specific information can be stored here. */
1336 #ifdef APPLY_RESULT_SIZE
1337 size = APPLY_RESULT_SIZE;
1338 #endif
1339 }
1340 return size;
1341 }
1342
1343 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1344 /* Create a vector describing the result block RESULT. If SAVEP is true,
1345 the result block is used to save the values; otherwise it is used to
1346 restore the values. */
1347
1348 static rtx
1349 result_vector (int savep, rtx result)
1350 {
1351 int regno, size, align, nelts;
1352 enum machine_mode mode;
1353 rtx reg, mem;
1354 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1355
1356 size = nelts = 0;
1357 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1358 if ((mode = apply_result_mode[regno]) != VOIDmode)
1359 {
1360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1361 if (size % align != 0)
1362 size = CEIL (size, align) * align;
1363 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1364 mem = adjust_address (result, mode, size);
1365 savevec[nelts++] = (savep
1366 ? gen_rtx_SET (VOIDmode, mem, reg)
1367 : gen_rtx_SET (VOIDmode, reg, mem));
1368 size += GET_MODE_SIZE (mode);
1369 }
1370 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1371 }
1372 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1373
1374 /* Save the state required to perform an untyped call with the same
1375 arguments as were passed to the current function. */
1376
1377 static rtx
1378 expand_builtin_apply_args_1 (void)
1379 {
1380 rtx registers, tem;
1381 int size, align, regno;
1382 enum machine_mode mode;
1383 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1384
1385 /* Create a block where the arg-pointer, structure value address,
1386 and argument registers can be saved. */
1387 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1388
1389 /* Walk past the arg-pointer and structure value address. */
1390 size = GET_MODE_SIZE (Pmode);
1391 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1392 size += GET_MODE_SIZE (Pmode);
1393
1394 /* Save each register used in calling a function to the block. */
1395 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1396 if ((mode = apply_args_mode[regno]) != VOIDmode)
1397 {
1398 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1399 if (size % align != 0)
1400 size = CEIL (size, align) * align;
1401
1402 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1403
1404 emit_move_insn (adjust_address (registers, mode, size), tem);
1405 size += GET_MODE_SIZE (mode);
1406 }
1407
1408 /* Save the arg pointer to the block. */
1409 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1410 #ifdef STACK_GROWS_DOWNWARD
1411 /* We need the pointer as the caller actually passed them to us, not
1412 as we might have pretended they were passed. Make sure it's a valid
1413 operand, as emit_move_insn isn't expected to handle a PLUS. */
1414 tem
1415 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1416 NULL_RTX);
1417 #endif
1418 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1419
1420 size = GET_MODE_SIZE (Pmode);
1421
1422 /* Save the structure value address unless this is passed as an
1423 "invisible" first argument. */
1424 if (struct_incoming_value)
1425 {
1426 emit_move_insn (adjust_address (registers, Pmode, size),
1427 copy_to_reg (struct_incoming_value));
1428 size += GET_MODE_SIZE (Pmode);
1429 }
1430
1431 /* Return the address of the block. */
1432 return copy_addr_to_reg (XEXP (registers, 0));
1433 }
1434
1435 /* __builtin_apply_args returns block of memory allocated on
1436 the stack into which is stored the arg pointer, structure
1437 value address, static chain, and all the registers that might
1438 possibly be used in performing a function call. The code is
1439 moved to the start of the function so the incoming values are
1440 saved. */
1441
1442 static rtx
1443 expand_builtin_apply_args (void)
1444 {
1445 /* Don't do __builtin_apply_args more than once in a function.
1446 Save the result of the first call and reuse it. */
1447 if (apply_args_value != 0)
1448 return apply_args_value;
1449 {
1450 /* When this function is called, it means that registers must be
1451 saved on entry to this function. So we migrate the
1452 call to the first insn of this function. */
1453 rtx temp;
1454 rtx seq;
1455
1456 start_sequence ();
1457 temp = expand_builtin_apply_args_1 ();
1458 seq = get_insns ();
1459 end_sequence ();
1460
1461 apply_args_value = temp;
1462
1463 /* Put the insns after the NOTE that starts the function.
1464 If this is inside a start_sequence, make the outer-level insn
1465 chain current, so the code is placed at the start of the
1466 function. If internal_arg_pointer is a non-virtual pseudo,
1467 it needs to be placed after the function that initializes
1468 that pseudo. */
1469 push_topmost_sequence ();
1470 if (REG_P (crtl->args.internal_arg_pointer)
1471 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1472 emit_insn_before (seq, parm_birth_insn);
1473 else
1474 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1475 pop_topmost_sequence ();
1476 return temp;
1477 }
1478 }
1479
1480 /* Perform an untyped call and save the state required to perform an
1481 untyped return of whatever value was returned by the given function. */
1482
1483 static rtx
1484 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1485 {
1486 int size, align, regno;
1487 enum machine_mode mode;
1488 rtx incoming_args, result, reg, dest, src, call_insn;
1489 rtx old_stack_level = 0;
1490 rtx call_fusage = 0;
1491 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1492
1493 arguments = convert_memory_address (Pmode, arguments);
1494
1495 /* Create a block where the return registers can be saved. */
1496 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1497
1498 /* Fetch the arg pointer from the ARGUMENTS block. */
1499 incoming_args = gen_reg_rtx (Pmode);
1500 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1501 #ifndef STACK_GROWS_DOWNWARD
1502 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1503 incoming_args, 0, OPTAB_LIB_WIDEN);
1504 #endif
1505
1506 /* Push a new argument block and copy the arguments. Do not allow
1507 the (potential) memcpy call below to interfere with our stack
1508 manipulations. */
1509 do_pending_stack_adjust ();
1510 NO_DEFER_POP;
1511
1512 /* Save the stack with nonlocal if available. */
1513 #ifdef HAVE_save_stack_nonlocal
1514 if (HAVE_save_stack_nonlocal)
1515 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1516 else
1517 #endif
1518 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1519
1520 /* Allocate a block of memory onto the stack and copy the memory
1521 arguments to the outgoing arguments address. */
1522 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1523
1524 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1525 may have already set current_function_calls_alloca to true.
1526 current_function_calls_alloca won't be set if argsize is zero,
1527 so we have to guarantee need_drap is true here. */
1528 if (SUPPORTS_STACK_ALIGNMENT)
1529 crtl->need_drap = true;
1530
1531 dest = virtual_outgoing_args_rtx;
1532 #ifndef STACK_GROWS_DOWNWARD
1533 if (CONST_INT_P (argsize))
1534 dest = plus_constant (dest, -INTVAL (argsize));
1535 else
1536 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1537 #endif
1538 dest = gen_rtx_MEM (BLKmode, dest);
1539 set_mem_align (dest, PARM_BOUNDARY);
1540 src = gen_rtx_MEM (BLKmode, incoming_args);
1541 set_mem_align (src, PARM_BOUNDARY);
1542 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1543
1544 /* Refer to the argument block. */
1545 apply_args_size ();
1546 arguments = gen_rtx_MEM (BLKmode, arguments);
1547 set_mem_align (arguments, PARM_BOUNDARY);
1548
1549 /* Walk past the arg-pointer and structure value address. */
1550 size = GET_MODE_SIZE (Pmode);
1551 if (struct_value)
1552 size += GET_MODE_SIZE (Pmode);
1553
1554 /* Restore each of the registers previously saved. Make USE insns
1555 for each of these registers for use in making the call. */
1556 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1557 if ((mode = apply_args_mode[regno]) != VOIDmode)
1558 {
1559 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1560 if (size % align != 0)
1561 size = CEIL (size, align) * align;
1562 reg = gen_rtx_REG (mode, regno);
1563 emit_move_insn (reg, adjust_address (arguments, mode, size));
1564 use_reg (&call_fusage, reg);
1565 size += GET_MODE_SIZE (mode);
1566 }
1567
1568 /* Restore the structure value address unless this is passed as an
1569 "invisible" first argument. */
1570 size = GET_MODE_SIZE (Pmode);
1571 if (struct_value)
1572 {
1573 rtx value = gen_reg_rtx (Pmode);
1574 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1575 emit_move_insn (struct_value, value);
1576 if (REG_P (struct_value))
1577 use_reg (&call_fusage, struct_value);
1578 size += GET_MODE_SIZE (Pmode);
1579 }
1580
1581 /* All arguments and registers used for the call are set up by now! */
1582 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1583
1584 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1585 and we don't want to load it into a register as an optimization,
1586 because prepare_call_address already did it if it should be done. */
1587 if (GET_CODE (function) != SYMBOL_REF)
1588 function = memory_address (FUNCTION_MODE, function);
1589
1590 /* Generate the actual call instruction and save the return value. */
1591 #ifdef HAVE_untyped_call
1592 if (HAVE_untyped_call)
1593 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1594 result, result_vector (1, result)));
1595 else
1596 #endif
1597 #ifdef HAVE_call_value
1598 if (HAVE_call_value)
1599 {
1600 rtx valreg = 0;
1601
1602 /* Locate the unique return register. It is not possible to
1603 express a call that sets more than one return register using
1604 call_value; use untyped_call for that. In fact, untyped_call
1605 only needs to save the return registers in the given block. */
1606 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1607 if ((mode = apply_result_mode[regno]) != VOIDmode)
1608 {
1609 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1610
1611 valreg = gen_rtx_REG (mode, regno);
1612 }
1613
1614 emit_call_insn (GEN_CALL_VALUE (valreg,
1615 gen_rtx_MEM (FUNCTION_MODE, function),
1616 const0_rtx, NULL_RTX, const0_rtx));
1617
1618 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1619 }
1620 else
1621 #endif
1622 gcc_unreachable ();
1623
1624 /* Find the CALL insn we just emitted, and attach the register usage
1625 information. */
1626 call_insn = last_call_insn ();
1627 add_function_usage_to (call_insn, call_fusage);
1628
1629 /* Restore the stack. */
1630 #ifdef HAVE_save_stack_nonlocal
1631 if (HAVE_save_stack_nonlocal)
1632 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1633 else
1634 #endif
1635 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1636
1637 OK_DEFER_POP;
1638
1639 /* Return the address of the result block. */
1640 result = copy_addr_to_reg (XEXP (result, 0));
1641 return convert_memory_address (ptr_mode, result);
1642 }
1643
1644 /* Perform an untyped return. */
1645
1646 static void
1647 expand_builtin_return (rtx result)
1648 {
1649 int size, align, regno;
1650 enum machine_mode mode;
1651 rtx reg;
1652 rtx call_fusage = 0;
1653
1654 result = convert_memory_address (Pmode, result);
1655
1656 apply_result_size ();
1657 result = gen_rtx_MEM (BLKmode, result);
1658
1659 #ifdef HAVE_untyped_return
1660 if (HAVE_untyped_return)
1661 {
1662 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1663 emit_barrier ();
1664 return;
1665 }
1666 #endif
1667
1668 /* Restore the return value and note that each value is used. */
1669 size = 0;
1670 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1671 if ((mode = apply_result_mode[regno]) != VOIDmode)
1672 {
1673 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1674 if (size % align != 0)
1675 size = CEIL (size, align) * align;
1676 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1677 emit_move_insn (reg, adjust_address (result, mode, size));
1678
1679 push_to_sequence (call_fusage);
1680 emit_use (reg);
1681 call_fusage = get_insns ();
1682 end_sequence ();
1683 size += GET_MODE_SIZE (mode);
1684 }
1685
1686 /* Put the USE insns before the return. */
1687 emit_insn (call_fusage);
1688
1689 /* Return whatever values was restored by jumping directly to the end
1690 of the function. */
1691 expand_naked_return ();
1692 }
1693
1694 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1695
1696 static enum type_class
1697 type_to_class (tree type)
1698 {
1699 switch (TREE_CODE (type))
1700 {
1701 case VOID_TYPE: return void_type_class;
1702 case INTEGER_TYPE: return integer_type_class;
1703 case ENUMERAL_TYPE: return enumeral_type_class;
1704 case BOOLEAN_TYPE: return boolean_type_class;
1705 case POINTER_TYPE: return pointer_type_class;
1706 case REFERENCE_TYPE: return reference_type_class;
1707 case OFFSET_TYPE: return offset_type_class;
1708 case REAL_TYPE: return real_type_class;
1709 case COMPLEX_TYPE: return complex_type_class;
1710 case FUNCTION_TYPE: return function_type_class;
1711 case METHOD_TYPE: return method_type_class;
1712 case RECORD_TYPE: return record_type_class;
1713 case UNION_TYPE:
1714 case QUAL_UNION_TYPE: return union_type_class;
1715 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1716 ? string_type_class : array_type_class);
1717 case LANG_TYPE: return lang_type_class;
1718 default: return no_type_class;
1719 }
1720 }
1721
1722 /* Expand a call EXP to __builtin_classify_type. */
1723
1724 static rtx
1725 expand_builtin_classify_type (tree exp)
1726 {
1727 if (call_expr_nargs (exp))
1728 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1729 return GEN_INT (no_type_class);
1730 }
1731
1732 /* This helper macro, meant to be used in mathfn_built_in below,
1733 determines which among a set of three builtin math functions is
1734 appropriate for a given type mode. The `F' and `L' cases are
1735 automatically generated from the `double' case. */
1736 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1737 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1738 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1739 fcodel = BUILT_IN_MATHFN##L ; break;
1740 /* Similar to above, but appends _R after any F/L suffix. */
1741 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1742 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1743 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1744 fcodel = BUILT_IN_MATHFN##L_R ; break;
1745
1746 /* Return mathematic function equivalent to FN but operating directly
1747 on TYPE, if available. If IMPLICIT is true find the function in
1748 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1749 can't do the conversion, return zero. */
1750
1751 static tree
1752 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1753 {
1754 tree const *const fn_arr
1755 = implicit ? implicit_built_in_decls : built_in_decls;
1756 enum built_in_function fcode, fcodef, fcodel;
1757
1758 switch (fn)
1759 {
1760 CASE_MATHFN (BUILT_IN_ACOS)
1761 CASE_MATHFN (BUILT_IN_ACOSH)
1762 CASE_MATHFN (BUILT_IN_ASIN)
1763 CASE_MATHFN (BUILT_IN_ASINH)
1764 CASE_MATHFN (BUILT_IN_ATAN)
1765 CASE_MATHFN (BUILT_IN_ATAN2)
1766 CASE_MATHFN (BUILT_IN_ATANH)
1767 CASE_MATHFN (BUILT_IN_CBRT)
1768 CASE_MATHFN (BUILT_IN_CEIL)
1769 CASE_MATHFN (BUILT_IN_CEXPI)
1770 CASE_MATHFN (BUILT_IN_COPYSIGN)
1771 CASE_MATHFN (BUILT_IN_COS)
1772 CASE_MATHFN (BUILT_IN_COSH)
1773 CASE_MATHFN (BUILT_IN_DREM)
1774 CASE_MATHFN (BUILT_IN_ERF)
1775 CASE_MATHFN (BUILT_IN_ERFC)
1776 CASE_MATHFN (BUILT_IN_EXP)
1777 CASE_MATHFN (BUILT_IN_EXP10)
1778 CASE_MATHFN (BUILT_IN_EXP2)
1779 CASE_MATHFN (BUILT_IN_EXPM1)
1780 CASE_MATHFN (BUILT_IN_FABS)
1781 CASE_MATHFN (BUILT_IN_FDIM)
1782 CASE_MATHFN (BUILT_IN_FLOOR)
1783 CASE_MATHFN (BUILT_IN_FMA)
1784 CASE_MATHFN (BUILT_IN_FMAX)
1785 CASE_MATHFN (BUILT_IN_FMIN)
1786 CASE_MATHFN (BUILT_IN_FMOD)
1787 CASE_MATHFN (BUILT_IN_FREXP)
1788 CASE_MATHFN (BUILT_IN_GAMMA)
1789 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1790 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1791 CASE_MATHFN (BUILT_IN_HYPOT)
1792 CASE_MATHFN (BUILT_IN_ILOGB)
1793 CASE_MATHFN (BUILT_IN_INF)
1794 CASE_MATHFN (BUILT_IN_ISINF)
1795 CASE_MATHFN (BUILT_IN_J0)
1796 CASE_MATHFN (BUILT_IN_J1)
1797 CASE_MATHFN (BUILT_IN_JN)
1798 CASE_MATHFN (BUILT_IN_LCEIL)
1799 CASE_MATHFN (BUILT_IN_LDEXP)
1800 CASE_MATHFN (BUILT_IN_LFLOOR)
1801 CASE_MATHFN (BUILT_IN_LGAMMA)
1802 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1803 CASE_MATHFN (BUILT_IN_LLCEIL)
1804 CASE_MATHFN (BUILT_IN_LLFLOOR)
1805 CASE_MATHFN (BUILT_IN_LLRINT)
1806 CASE_MATHFN (BUILT_IN_LLROUND)
1807 CASE_MATHFN (BUILT_IN_LOG)
1808 CASE_MATHFN (BUILT_IN_LOG10)
1809 CASE_MATHFN (BUILT_IN_LOG1P)
1810 CASE_MATHFN (BUILT_IN_LOG2)
1811 CASE_MATHFN (BUILT_IN_LOGB)
1812 CASE_MATHFN (BUILT_IN_LRINT)
1813 CASE_MATHFN (BUILT_IN_LROUND)
1814 CASE_MATHFN (BUILT_IN_MODF)
1815 CASE_MATHFN (BUILT_IN_NAN)
1816 CASE_MATHFN (BUILT_IN_NANS)
1817 CASE_MATHFN (BUILT_IN_NEARBYINT)
1818 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1819 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1820 CASE_MATHFN (BUILT_IN_POW)
1821 CASE_MATHFN (BUILT_IN_POWI)
1822 CASE_MATHFN (BUILT_IN_POW10)
1823 CASE_MATHFN (BUILT_IN_REMAINDER)
1824 CASE_MATHFN (BUILT_IN_REMQUO)
1825 CASE_MATHFN (BUILT_IN_RINT)
1826 CASE_MATHFN (BUILT_IN_ROUND)
1827 CASE_MATHFN (BUILT_IN_SCALB)
1828 CASE_MATHFN (BUILT_IN_SCALBLN)
1829 CASE_MATHFN (BUILT_IN_SCALBN)
1830 CASE_MATHFN (BUILT_IN_SIGNBIT)
1831 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1832 CASE_MATHFN (BUILT_IN_SIN)
1833 CASE_MATHFN (BUILT_IN_SINCOS)
1834 CASE_MATHFN (BUILT_IN_SINH)
1835 CASE_MATHFN (BUILT_IN_SQRT)
1836 CASE_MATHFN (BUILT_IN_TAN)
1837 CASE_MATHFN (BUILT_IN_TANH)
1838 CASE_MATHFN (BUILT_IN_TGAMMA)
1839 CASE_MATHFN (BUILT_IN_TRUNC)
1840 CASE_MATHFN (BUILT_IN_Y0)
1841 CASE_MATHFN (BUILT_IN_Y1)
1842 CASE_MATHFN (BUILT_IN_YN)
1843
1844 default:
1845 return NULL_TREE;
1846 }
1847
1848 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1849 return fn_arr[fcode];
1850 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1851 return fn_arr[fcodef];
1852 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1853 return fn_arr[fcodel];
1854 else
1855 return NULL_TREE;
1856 }
1857
1858 /* Like mathfn_built_in_1(), but always use the implicit array. */
1859
1860 tree
1861 mathfn_built_in (tree type, enum built_in_function fn)
1862 {
1863 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1864 }
1865
1866 /* If errno must be maintained, expand the RTL to check if the result,
1867 TARGET, of a built-in function call, EXP, is NaN, and if so set
1868 errno to EDOM. */
1869
1870 static void
1871 expand_errno_check (tree exp, rtx target)
1872 {
1873 rtx lab = gen_label_rtx ();
1874
1875 /* Test the result; if it is NaN, set errno=EDOM because
1876 the argument was not in the domain. */
1877 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1878 NULL_RTX, NULL_RTX, lab);
1879
1880 #ifdef TARGET_EDOM
1881 /* If this built-in doesn't throw an exception, set errno directly. */
1882 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1883 {
1884 #ifdef GEN_ERRNO_RTX
1885 rtx errno_rtx = GEN_ERRNO_RTX;
1886 #else
1887 rtx errno_rtx
1888 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1889 #endif
1890 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1891 emit_label (lab);
1892 return;
1893 }
1894 #endif
1895
1896 /* Make sure the library call isn't expanded as a tail call. */
1897 CALL_EXPR_TAILCALL (exp) = 0;
1898
1899 /* We can't set errno=EDOM directly; let the library call do it.
1900 Pop the arguments right away in case the call gets deleted. */
1901 NO_DEFER_POP;
1902 expand_call (exp, target, 0);
1903 OK_DEFER_POP;
1904 emit_label (lab);
1905 }
1906
1907 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1908 Return NULL_RTX if a normal call should be emitted rather than expanding
1909 the function in-line. EXP is the expression that is a call to the builtin
1910 function; if convenient, the result should be placed in TARGET.
1911 SUBTARGET may be used as the target for computing one of EXP's operands. */
1912
1913 static rtx
1914 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1915 {
1916 optab builtin_optab;
1917 rtx op0, insns;
1918 tree fndecl = get_callee_fndecl (exp);
1919 enum machine_mode mode;
1920 bool errno_set = false;
1921 tree arg;
1922
1923 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1924 return NULL_RTX;
1925
1926 arg = CALL_EXPR_ARG (exp, 0);
1927
1928 switch (DECL_FUNCTION_CODE (fndecl))
1929 {
1930 CASE_FLT_FN (BUILT_IN_SQRT):
1931 errno_set = ! tree_expr_nonnegative_p (arg);
1932 builtin_optab = sqrt_optab;
1933 break;
1934 CASE_FLT_FN (BUILT_IN_EXP):
1935 errno_set = true; builtin_optab = exp_optab; break;
1936 CASE_FLT_FN (BUILT_IN_EXP10):
1937 CASE_FLT_FN (BUILT_IN_POW10):
1938 errno_set = true; builtin_optab = exp10_optab; break;
1939 CASE_FLT_FN (BUILT_IN_EXP2):
1940 errno_set = true; builtin_optab = exp2_optab; break;
1941 CASE_FLT_FN (BUILT_IN_EXPM1):
1942 errno_set = true; builtin_optab = expm1_optab; break;
1943 CASE_FLT_FN (BUILT_IN_LOGB):
1944 errno_set = true; builtin_optab = logb_optab; break;
1945 CASE_FLT_FN (BUILT_IN_LOG):
1946 errno_set = true; builtin_optab = log_optab; break;
1947 CASE_FLT_FN (BUILT_IN_LOG10):
1948 errno_set = true; builtin_optab = log10_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOG2):
1950 errno_set = true; builtin_optab = log2_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG1P):
1952 errno_set = true; builtin_optab = log1p_optab; break;
1953 CASE_FLT_FN (BUILT_IN_ASIN):
1954 builtin_optab = asin_optab; break;
1955 CASE_FLT_FN (BUILT_IN_ACOS):
1956 builtin_optab = acos_optab; break;
1957 CASE_FLT_FN (BUILT_IN_TAN):
1958 builtin_optab = tan_optab; break;
1959 CASE_FLT_FN (BUILT_IN_ATAN):
1960 builtin_optab = atan_optab; break;
1961 CASE_FLT_FN (BUILT_IN_FLOOR):
1962 builtin_optab = floor_optab; break;
1963 CASE_FLT_FN (BUILT_IN_CEIL):
1964 builtin_optab = ceil_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TRUNC):
1966 builtin_optab = btrunc_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ROUND):
1968 builtin_optab = round_optab; break;
1969 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1970 builtin_optab = nearbyint_optab;
1971 if (flag_trapping_math)
1972 break;
1973 /* Else fallthrough and expand as rint. */
1974 CASE_FLT_FN (BUILT_IN_RINT):
1975 builtin_optab = rint_optab; break;
1976 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1977 builtin_optab = significand_optab; break;
1978 default:
1979 gcc_unreachable ();
1980 }
1981
1982 /* Make a suitable register to place result in. */
1983 mode = TYPE_MODE (TREE_TYPE (exp));
1984
1985 if (! flag_errno_math || ! HONOR_NANS (mode))
1986 errno_set = false;
1987
1988 /* Before working hard, check whether the instruction is available. */
1989 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1990 {
1991 target = gen_reg_rtx (mode);
1992
1993 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1994 need to expand the argument again. This way, we will not perform
1995 side-effects more the once. */
1996 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1997
1998 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1999
2000 start_sequence ();
2001
2002 /* Compute into TARGET.
2003 Set TARGET to wherever the result comes back. */
2004 target = expand_unop (mode, builtin_optab, op0, target, 0);
2005
2006 if (target != 0)
2007 {
2008 if (errno_set)
2009 expand_errno_check (exp, target);
2010
2011 /* Output the entire sequence. */
2012 insns = get_insns ();
2013 end_sequence ();
2014 emit_insn (insns);
2015 return target;
2016 }
2017
2018 /* If we were unable to expand via the builtin, stop the sequence
2019 (without outputting the insns) and call to the library function
2020 with the stabilized argument list. */
2021 end_sequence ();
2022 }
2023
2024 return expand_call (exp, target, target == const0_rtx);
2025 }
2026
2027 /* Expand a call to the builtin binary math functions (pow and atan2).
2028 Return NULL_RTX if a normal call should be emitted rather than expanding the
2029 function in-line. EXP is the expression that is a call to the builtin
2030 function; if convenient, the result should be placed in TARGET.
2031 SUBTARGET may be used as the target for computing one of EXP's
2032 operands. */
2033
2034 static rtx
2035 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2036 {
2037 optab builtin_optab;
2038 rtx op0, op1, insns;
2039 int op1_type = REAL_TYPE;
2040 tree fndecl = get_callee_fndecl (exp);
2041 tree arg0, arg1;
2042 enum machine_mode mode;
2043 bool errno_set = true;
2044
2045 switch (DECL_FUNCTION_CODE (fndecl))
2046 {
2047 CASE_FLT_FN (BUILT_IN_SCALBN):
2048 CASE_FLT_FN (BUILT_IN_SCALBLN):
2049 CASE_FLT_FN (BUILT_IN_LDEXP):
2050 op1_type = INTEGER_TYPE;
2051 default:
2052 break;
2053 }
2054
2055 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2056 return NULL_RTX;
2057
2058 arg0 = CALL_EXPR_ARG (exp, 0);
2059 arg1 = CALL_EXPR_ARG (exp, 1);
2060
2061 switch (DECL_FUNCTION_CODE (fndecl))
2062 {
2063 CASE_FLT_FN (BUILT_IN_POW):
2064 builtin_optab = pow_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ATAN2):
2066 builtin_optab = atan2_optab; break;
2067 CASE_FLT_FN (BUILT_IN_SCALB):
2068 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2069 return 0;
2070 builtin_optab = scalb_optab; break;
2071 CASE_FLT_FN (BUILT_IN_SCALBN):
2072 CASE_FLT_FN (BUILT_IN_SCALBLN):
2073 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2074 return 0;
2075 /* Fall through... */
2076 CASE_FLT_FN (BUILT_IN_LDEXP):
2077 builtin_optab = ldexp_optab; break;
2078 CASE_FLT_FN (BUILT_IN_FMOD):
2079 builtin_optab = fmod_optab; break;
2080 CASE_FLT_FN (BUILT_IN_REMAINDER):
2081 CASE_FLT_FN (BUILT_IN_DREM):
2082 builtin_optab = remainder_optab; break;
2083 default:
2084 gcc_unreachable ();
2085 }
2086
2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
2089
2090 /* Before working hard, check whether the instruction is available. */
2091 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2092 return NULL_RTX;
2093
2094 target = gen_reg_rtx (mode);
2095
2096 if (! flag_errno_math || ! HONOR_NANS (mode))
2097 errno_set = false;
2098
2099 /* Always stabilize the argument list. */
2100 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2101 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2102
2103 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2104 op1 = expand_normal (arg1);
2105
2106 start_sequence ();
2107
2108 /* Compute into TARGET.
2109 Set TARGET to wherever the result comes back. */
2110 target = expand_binop (mode, builtin_optab, op0, op1,
2111 target, 0, OPTAB_DIRECT);
2112
2113 /* If we were unable to expand via the builtin, stop the sequence
2114 (without outputting the insns) and call to the library function
2115 with the stabilized argument list. */
2116 if (target == 0)
2117 {
2118 end_sequence ();
2119 return expand_call (exp, target, target == const0_rtx);
2120 }
2121
2122 if (errno_set)
2123 expand_errno_check (exp, target);
2124
2125 /* Output the entire sequence. */
2126 insns = get_insns ();
2127 end_sequence ();
2128 emit_insn (insns);
2129
2130 return target;
2131 }
2132
2133 /* Expand a call to the builtin sin and cos math functions.
2134 Return NULL_RTX if a normal call should be emitted rather than expanding the
2135 function in-line. EXP is the expression that is a call to the builtin
2136 function; if convenient, the result should be placed in TARGET.
2137 SUBTARGET may be used as the target for computing one of EXP's
2138 operands. */
2139
2140 static rtx
2141 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2142 {
2143 optab builtin_optab;
2144 rtx op0, insns;
2145 tree fndecl = get_callee_fndecl (exp);
2146 enum machine_mode mode;
2147 tree arg;
2148
2149 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2150 return NULL_RTX;
2151
2152 arg = CALL_EXPR_ARG (exp, 0);
2153
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SIN):
2157 CASE_FLT_FN (BUILT_IN_COS):
2158 builtin_optab = sincos_optab; break;
2159 default:
2160 gcc_unreachable ();
2161 }
2162
2163 /* Make a suitable register to place result in. */
2164 mode = TYPE_MODE (TREE_TYPE (exp));
2165
2166 /* Check if sincos insn is available, otherwise fallback
2167 to sin or cos insn. */
2168 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2169 switch (DECL_FUNCTION_CODE (fndecl))
2170 {
2171 CASE_FLT_FN (BUILT_IN_SIN):
2172 builtin_optab = sin_optab; break;
2173 CASE_FLT_FN (BUILT_IN_COS):
2174 builtin_optab = cos_optab; break;
2175 default:
2176 gcc_unreachable ();
2177 }
2178
2179 /* Before working hard, check whether the instruction is available. */
2180 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2181 {
2182 target = gen_reg_rtx (mode);
2183
2184 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2185 need to expand the argument again. This way, we will not perform
2186 side-effects more the once. */
2187 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2188
2189 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2190
2191 start_sequence ();
2192
2193 /* Compute into TARGET.
2194 Set TARGET to wherever the result comes back. */
2195 if (builtin_optab == sincos_optab)
2196 {
2197 int result;
2198
2199 switch (DECL_FUNCTION_CODE (fndecl))
2200 {
2201 CASE_FLT_FN (BUILT_IN_SIN):
2202 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2203 break;
2204 CASE_FLT_FN (BUILT_IN_COS):
2205 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2206 break;
2207 default:
2208 gcc_unreachable ();
2209 }
2210 gcc_assert (result);
2211 }
2212 else
2213 {
2214 target = expand_unop (mode, builtin_optab, op0, target, 0);
2215 }
2216
2217 if (target != 0)
2218 {
2219 /* Output the entire sequence. */
2220 insns = get_insns ();
2221 end_sequence ();
2222 emit_insn (insns);
2223 return target;
2224 }
2225
2226 /* If we were unable to expand via the builtin, stop the sequence
2227 (without outputting the insns) and call to the library function
2228 with the stabilized argument list. */
2229 end_sequence ();
2230 }
2231
2232 target = expand_call (exp, target, target == const0_rtx);
2233
2234 return target;
2235 }
2236
2237 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2238 return an RTL instruction code that implements the functionality.
2239 If that isn't possible or available return CODE_FOR_nothing. */
2240
2241 static enum insn_code
2242 interclass_mathfn_icode (tree arg, tree fndecl)
2243 {
2244 bool errno_set = false;
2245 optab builtin_optab = 0;
2246 enum machine_mode mode;
2247
2248 switch (DECL_FUNCTION_CODE (fndecl))
2249 {
2250 CASE_FLT_FN (BUILT_IN_ILOGB):
2251 errno_set = true; builtin_optab = ilogb_optab; break;
2252 CASE_FLT_FN (BUILT_IN_ISINF):
2253 builtin_optab = isinf_optab; break;
2254 case BUILT_IN_ISNORMAL:
2255 case BUILT_IN_ISFINITE:
2256 CASE_FLT_FN (BUILT_IN_FINITE):
2257 case BUILT_IN_FINITED32:
2258 case BUILT_IN_FINITED64:
2259 case BUILT_IN_FINITED128:
2260 case BUILT_IN_ISINFD32:
2261 case BUILT_IN_ISINFD64:
2262 case BUILT_IN_ISINFD128:
2263 /* These builtins have no optabs (yet). */
2264 break;
2265 default:
2266 gcc_unreachable ();
2267 }
2268
2269 /* There's no easy way to detect the case we need to set EDOM. */
2270 if (flag_errno_math && errno_set)
2271 return CODE_FOR_nothing;
2272
2273 /* Optab mode depends on the mode of the input argument. */
2274 mode = TYPE_MODE (TREE_TYPE (arg));
2275
2276 if (builtin_optab)
2277 return optab_handler (builtin_optab, mode)->insn_code;
2278 return CODE_FOR_nothing;
2279 }
2280
2281 /* Expand a call to one of the builtin math functions that operate on
2282 floating point argument and output an integer result (ilogb, isinf,
2283 isnan, etc).
2284 Return 0 if a normal call should be emitted rather than expanding the
2285 function in-line. EXP is the expression that is a call to the builtin
2286 function; if convenient, the result should be placed in TARGET.
2287 SUBTARGET may be used as the target for computing one of EXP's operands. */
2288
2289 static rtx
2290 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2291 {
2292 enum insn_code icode = CODE_FOR_nothing;
2293 rtx op0;
2294 tree fndecl = get_callee_fndecl (exp);
2295 enum machine_mode mode;
2296 tree arg;
2297
2298 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2299 return NULL_RTX;
2300
2301 arg = CALL_EXPR_ARG (exp, 0);
2302 icode = interclass_mathfn_icode (arg, fndecl);
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2304
2305 if (icode != CODE_FOR_nothing)
2306 {
2307 /* Make a suitable register to place result in. */
2308 if (!target
2309 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2310 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2311
2312 gcc_assert (insn_data[icode].operand[0].predicate
2313 (target, GET_MODE (target)));
2314
2315 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2316 need to expand the argument again. This way, we will not perform
2317 side-effects more the once. */
2318 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319
2320 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321
2322 if (mode != GET_MODE (op0))
2323 op0 = convert_to_mode (mode, op0, 0);
2324
2325 /* Compute into TARGET.
2326 Set TARGET to wherever the result comes back. */
2327 emit_unop_insn (icode, target, op0, UNKNOWN);
2328 return target;
2329 }
2330
2331 return NULL_RTX;
2332 }
2333
2334 /* Expand a call to the builtin sincos math function.
2335 Return NULL_RTX if a normal call should be emitted rather than expanding the
2336 function in-line. EXP is the expression that is a call to the builtin
2337 function. */
2338
2339 static rtx
2340 expand_builtin_sincos (tree exp)
2341 {
2342 rtx op0, op1, op2, target1, target2;
2343 enum machine_mode mode;
2344 tree arg, sinp, cosp;
2345 int result;
2346 location_t loc = EXPR_LOCATION (exp);
2347
2348 if (!validate_arglist (exp, REAL_TYPE,
2349 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2350 return NULL_RTX;
2351
2352 arg = CALL_EXPR_ARG (exp, 0);
2353 sinp = CALL_EXPR_ARG (exp, 1);
2354 cosp = CALL_EXPR_ARG (exp, 2);
2355
2356 /* Make a suitable register to place result in. */
2357 mode = TYPE_MODE (TREE_TYPE (arg));
2358
2359 /* Check if sincos insn is available, otherwise emit the call. */
2360 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2361 return NULL_RTX;
2362
2363 target1 = gen_reg_rtx (mode);
2364 target2 = gen_reg_rtx (mode);
2365
2366 op0 = expand_normal (arg);
2367 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2368 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2369
2370 /* Compute into target1 and target2.
2371 Set TARGET to wherever the result comes back. */
2372 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2373 gcc_assert (result);
2374
2375 /* Move target1 and target2 to the memory locations indicated
2376 by op1 and op2. */
2377 emit_move_insn (op1, target1);
2378 emit_move_insn (op2, target2);
2379
2380 return const0_rtx;
2381 }
2382
2383 /* Expand a call to the internal cexpi builtin to the sincos math function.
2384 EXP is the expression that is a call to the builtin function; if convenient,
2385 the result should be placed in TARGET. SUBTARGET may be used as the target
2386 for computing one of EXP's operands. */
2387
2388 static rtx
2389 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2390 {
2391 tree fndecl = get_callee_fndecl (exp);
2392 tree arg, type;
2393 enum machine_mode mode;
2394 rtx op0, op1, op2;
2395 location_t loc = EXPR_LOCATION (exp);
2396
2397 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2398 return NULL_RTX;
2399
2400 arg = CALL_EXPR_ARG (exp, 0);
2401 type = TREE_TYPE (arg);
2402 mode = TYPE_MODE (TREE_TYPE (arg));
2403
2404 /* Try expanding via a sincos optab, fall back to emitting a libcall
2405 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2406 is only generated from sincos, cexp or if we have either of them. */
2407 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2408 {
2409 op1 = gen_reg_rtx (mode);
2410 op2 = gen_reg_rtx (mode);
2411
2412 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2413
2414 /* Compute into op1 and op2. */
2415 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2416 }
2417 else if (TARGET_HAS_SINCOS)
2418 {
2419 tree call, fn = NULL_TREE;
2420 tree top1, top2;
2421 rtx op1a, op2a;
2422
2423 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2424 fn = built_in_decls[BUILT_IN_SINCOSF];
2425 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2426 fn = built_in_decls[BUILT_IN_SINCOS];
2427 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2428 fn = built_in_decls[BUILT_IN_SINCOSL];
2429 else
2430 gcc_unreachable ();
2431
2432 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2433 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2434 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2435 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2436 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2437 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2438
2439 /* Make sure not to fold the sincos call again. */
2440 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2441 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2442 call, 3, arg, top1, top2));
2443 }
2444 else
2445 {
2446 tree call, fn = NULL_TREE, narg;
2447 tree ctype = build_complex_type (type);
2448
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 fn = built_in_decls[BUILT_IN_CEXPF];
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 fn = built_in_decls[BUILT_IN_CEXP];
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 fn = built_in_decls[BUILT_IN_CEXPL];
2455 else
2456 gcc_unreachable ();
2457
2458 /* If we don't have a decl for cexp create one. This is the
2459 friendliest fallback if the user calls __builtin_cexpi
2460 without full target C99 function support. */
2461 if (fn == NULL_TREE)
2462 {
2463 tree fntype;
2464 const char *name = NULL;
2465
2466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2467 name = "cexpf";
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2469 name = "cexp";
2470 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2471 name = "cexpl";
2472
2473 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2474 fn = build_fn_decl (name, fntype);
2475 }
2476
2477 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2478 build_real (type, dconst0), arg);
2479
2480 /* Make sure not to fold the cexp call again. */
2481 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2482 return expand_expr (build_call_nary (ctype, call, 1, narg),
2483 target, VOIDmode, EXPAND_NORMAL);
2484 }
2485
2486 /* Now build the proper return type. */
2487 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2488 make_tree (TREE_TYPE (arg), op2),
2489 make_tree (TREE_TYPE (arg), op1)),
2490 target, VOIDmode, EXPAND_NORMAL);
2491 }
2492
2493 /* Conveniently construct a function call expression. FNDECL names the
2494 function to be called, N is the number of arguments, and the "..."
2495 parameters are the argument expressions. Unlike build_call_exr
2496 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2497
2498 static tree
2499 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2500 {
2501 va_list ap;
2502 tree fntype = TREE_TYPE (fndecl);
2503 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2504
2505 va_start (ap, n);
2506 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2507 va_end (ap);
2508 SET_EXPR_LOCATION (fn, loc);
2509 return fn;
2510 }
2511 #define build_call_nofold(...) \
2512 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2513
2514 /* Expand a call to one of the builtin rounding functions gcc defines
2515 as an extension (lfloor and lceil). As these are gcc extensions we
2516 do not need to worry about setting errno to EDOM.
2517 If expanding via optab fails, lower expression to (int)(floor(x)).
2518 EXP is the expression that is a call to the builtin function;
2519 if convenient, the result should be placed in TARGET. */
2520
2521 static rtx
2522 expand_builtin_int_roundingfn (tree exp, rtx target)
2523 {
2524 convert_optab builtin_optab;
2525 rtx op0, insns, tmp;
2526 tree fndecl = get_callee_fndecl (exp);
2527 enum built_in_function fallback_fn;
2528 tree fallback_fndecl;
2529 enum machine_mode mode;
2530 tree arg;
2531
2532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2533 gcc_unreachable ();
2534
2535 arg = CALL_EXPR_ARG (exp, 0);
2536
2537 switch (DECL_FUNCTION_CODE (fndecl))
2538 {
2539 CASE_FLT_FN (BUILT_IN_LCEIL):
2540 CASE_FLT_FN (BUILT_IN_LLCEIL):
2541 builtin_optab = lceil_optab;
2542 fallback_fn = BUILT_IN_CEIL;
2543 break;
2544
2545 CASE_FLT_FN (BUILT_IN_LFLOOR):
2546 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2547 builtin_optab = lfloor_optab;
2548 fallback_fn = BUILT_IN_FLOOR;
2549 break;
2550
2551 default:
2552 gcc_unreachable ();
2553 }
2554
2555 /* Make a suitable register to place result in. */
2556 mode = TYPE_MODE (TREE_TYPE (exp));
2557
2558 target = gen_reg_rtx (mode);
2559
2560 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2561 need to expand the argument again. This way, we will not perform
2562 side-effects more the once. */
2563 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2564
2565 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2566
2567 start_sequence ();
2568
2569 /* Compute into TARGET. */
2570 if (expand_sfix_optab (target, op0, builtin_optab))
2571 {
2572 /* Output the entire sequence. */
2573 insns = get_insns ();
2574 end_sequence ();
2575 emit_insn (insns);
2576 return target;
2577 }
2578
2579 /* If we were unable to expand via the builtin, stop the sequence
2580 (without outputting the insns). */
2581 end_sequence ();
2582
2583 /* Fall back to floating point rounding optab. */
2584 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2585
2586 /* For non-C99 targets we may end up without a fallback fndecl here
2587 if the user called __builtin_lfloor directly. In this case emit
2588 a call to the floor/ceil variants nevertheless. This should result
2589 in the best user experience for not full C99 targets. */
2590 if (fallback_fndecl == NULL_TREE)
2591 {
2592 tree fntype;
2593 const char *name = NULL;
2594
2595 switch (DECL_FUNCTION_CODE (fndecl))
2596 {
2597 case BUILT_IN_LCEIL:
2598 case BUILT_IN_LLCEIL:
2599 name = "ceil";
2600 break;
2601 case BUILT_IN_LCEILF:
2602 case BUILT_IN_LLCEILF:
2603 name = "ceilf";
2604 break;
2605 case BUILT_IN_LCEILL:
2606 case BUILT_IN_LLCEILL:
2607 name = "ceill";
2608 break;
2609 case BUILT_IN_LFLOOR:
2610 case BUILT_IN_LLFLOOR:
2611 name = "floor";
2612 break;
2613 case BUILT_IN_LFLOORF:
2614 case BUILT_IN_LLFLOORF:
2615 name = "floorf";
2616 break;
2617 case BUILT_IN_LFLOORL:
2618 case BUILT_IN_LLFLOORL:
2619 name = "floorl";
2620 break;
2621 default:
2622 gcc_unreachable ();
2623 }
2624
2625 fntype = build_function_type_list (TREE_TYPE (arg),
2626 TREE_TYPE (arg), NULL_TREE);
2627 fallback_fndecl = build_fn_decl (name, fntype);
2628 }
2629
2630 exp = build_call_nofold (fallback_fndecl, 1, arg);
2631
2632 tmp = expand_normal (exp);
2633
2634 /* Truncate the result of floating point optab to integer
2635 via expand_fix (). */
2636 target = gen_reg_rtx (mode);
2637 expand_fix (target, tmp, 0);
2638
2639 return target;
2640 }
2641
2642 /* Expand a call to one of the builtin math functions doing integer
2643 conversion (lrint).
2644 Return 0 if a normal call should be emitted rather than expanding the
2645 function in-line. EXP is the expression that is a call to the builtin
2646 function; if convenient, the result should be placed in TARGET. */
2647
2648 static rtx
2649 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2650 {
2651 convert_optab builtin_optab;
2652 rtx op0, insns;
2653 tree fndecl = get_callee_fndecl (exp);
2654 tree arg;
2655 enum machine_mode mode;
2656
2657 /* There's no easy way to detect the case we need to set EDOM. */
2658 if (flag_errno_math)
2659 return NULL_RTX;
2660
2661 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2662 gcc_unreachable ();
2663
2664 arg = CALL_EXPR_ARG (exp, 0);
2665
2666 switch (DECL_FUNCTION_CODE (fndecl))
2667 {
2668 CASE_FLT_FN (BUILT_IN_LRINT):
2669 CASE_FLT_FN (BUILT_IN_LLRINT):
2670 builtin_optab = lrint_optab; break;
2671 CASE_FLT_FN (BUILT_IN_LROUND):
2672 CASE_FLT_FN (BUILT_IN_LLROUND):
2673 builtin_optab = lround_optab; break;
2674 default:
2675 gcc_unreachable ();
2676 }
2677
2678 /* Make a suitable register to place result in. */
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2680
2681 target = gen_reg_rtx (mode);
2682
2683 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2684 need to expand the argument again. This way, we will not perform
2685 side-effects more the once. */
2686 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2687
2688 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2689
2690 start_sequence ();
2691
2692 if (expand_sfix_optab (target, op0, builtin_optab))
2693 {
2694 /* Output the entire sequence. */
2695 insns = get_insns ();
2696 end_sequence ();
2697 emit_insn (insns);
2698 return target;
2699 }
2700
2701 /* If we were unable to expand via the builtin, stop the sequence
2702 (without outputting the insns) and call to the library function
2703 with the stabilized argument list. */
2704 end_sequence ();
2705
2706 target = expand_call (exp, target, target == const0_rtx);
2707
2708 return target;
2709 }
2710
2711 /* To evaluate powi(x,n), the floating point value x raised to the
2712 constant integer exponent n, we use a hybrid algorithm that
2713 combines the "window method" with look-up tables. For an
2714 introduction to exponentiation algorithms and "addition chains",
2715 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2716 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2717 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2718 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2719
2720 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2721 multiplications to inline before calling the system library's pow
2722 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2723 so this default never requires calling pow, powf or powl. */
2724
2725 #ifndef POWI_MAX_MULTS
2726 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2727 #endif
2728
2729 /* The size of the "optimal power tree" lookup table. All
2730 exponents less than this value are simply looked up in the
2731 powi_table below. This threshold is also used to size the
2732 cache of pseudo registers that hold intermediate results. */
2733 #define POWI_TABLE_SIZE 256
2734
2735 /* The size, in bits of the window, used in the "window method"
2736 exponentiation algorithm. This is equivalent to a radix of
2737 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2738 #define POWI_WINDOW_SIZE 3
2739
2740 /* The following table is an efficient representation of an
2741 "optimal power tree". For each value, i, the corresponding
2742 value, j, in the table states than an optimal evaluation
2743 sequence for calculating pow(x,i) can be found by evaluating
2744 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2745 100 integers is given in Knuth's "Seminumerical algorithms". */
2746
2747 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2748 {
2749 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2750 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2751 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2752 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2753 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2754 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2755 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2756 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2757 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2758 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2759 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2760 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2761 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2762 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2763 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2764 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2765 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2766 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2767 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2768 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2769 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2770 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2771 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2772 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2773 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2774 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2775 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2776 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2777 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2778 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2779 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2780 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2781 };
2782
2783
2784 /* Return the number of multiplications required to calculate
2785 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2786 subroutine of powi_cost. CACHE is an array indicating
2787 which exponents have already been calculated. */
2788
2789 static int
2790 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2791 {
2792 /* If we've already calculated this exponent, then this evaluation
2793 doesn't require any additional multiplications. */
2794 if (cache[n])
2795 return 0;
2796
2797 cache[n] = true;
2798 return powi_lookup_cost (n - powi_table[n], cache)
2799 + powi_lookup_cost (powi_table[n], cache) + 1;
2800 }
2801
2802 /* Return the number of multiplications required to calculate
2803 powi(x,n) for an arbitrary x, given the exponent N. This
2804 function needs to be kept in sync with expand_powi below. */
2805
2806 static int
2807 powi_cost (HOST_WIDE_INT n)
2808 {
2809 bool cache[POWI_TABLE_SIZE];
2810 unsigned HOST_WIDE_INT digit;
2811 unsigned HOST_WIDE_INT val;
2812 int result;
2813
2814 if (n == 0)
2815 return 0;
2816
2817 /* Ignore the reciprocal when calculating the cost. */
2818 val = (n < 0) ? -n : n;
2819
2820 /* Initialize the exponent cache. */
2821 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2822 cache[1] = true;
2823
2824 result = 0;
2825
2826 while (val >= POWI_TABLE_SIZE)
2827 {
2828 if (val & 1)
2829 {
2830 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2831 result += powi_lookup_cost (digit, cache)
2832 + POWI_WINDOW_SIZE + 1;
2833 val >>= POWI_WINDOW_SIZE;
2834 }
2835 else
2836 {
2837 val >>= 1;
2838 result++;
2839 }
2840 }
2841
2842 return result + powi_lookup_cost (val, cache);
2843 }
2844
2845 /* Recursive subroutine of expand_powi. This function takes the array,
2846 CACHE, of already calculated exponents and an exponent N and returns
2847 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2848
2849 static rtx
2850 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2851 {
2852 unsigned HOST_WIDE_INT digit;
2853 rtx target, result;
2854 rtx op0, op1;
2855
2856 if (n < POWI_TABLE_SIZE)
2857 {
2858 if (cache[n])
2859 return cache[n];
2860
2861 target = gen_reg_rtx (mode);
2862 cache[n] = target;
2863
2864 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2865 op1 = expand_powi_1 (mode, powi_table[n], cache);
2866 }
2867 else if (n & 1)
2868 {
2869 target = gen_reg_rtx (mode);
2870 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2871 op0 = expand_powi_1 (mode, n - digit, cache);
2872 op1 = expand_powi_1 (mode, digit, cache);
2873 }
2874 else
2875 {
2876 target = gen_reg_rtx (mode);
2877 op0 = expand_powi_1 (mode, n >> 1, cache);
2878 op1 = op0;
2879 }
2880
2881 result = expand_mult (mode, op0, op1, target, 0);
2882 if (result != target)
2883 emit_move_insn (target, result);
2884 return target;
2885 }
2886
2887 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2888 floating point operand in mode MODE, and N is the exponent. This
2889 function needs to be kept in sync with powi_cost above. */
2890
2891 static rtx
2892 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2893 {
2894 rtx cache[POWI_TABLE_SIZE];
2895 rtx result;
2896
2897 if (n == 0)
2898 return CONST1_RTX (mode);
2899
2900 memset (cache, 0, sizeof (cache));
2901 cache[1] = x;
2902
2903 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2904
2905 /* If the original exponent was negative, reciprocate the result. */
2906 if (n < 0)
2907 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2908 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2909
2910 return result;
2911 }
2912
2913 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2914 a normal call should be emitted rather than expanding the function
2915 in-line. EXP is the expression that is a call to the builtin
2916 function; if convenient, the result should be placed in TARGET. */
2917
2918 static rtx
2919 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2920 {
2921 tree arg0, arg1;
2922 tree fn, narg0;
2923 tree type = TREE_TYPE (exp);
2924 REAL_VALUE_TYPE cint, c, c2;
2925 HOST_WIDE_INT n;
2926 rtx op, op2;
2927 enum machine_mode mode = TYPE_MODE (type);
2928
2929 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2930 return NULL_RTX;
2931
2932 arg0 = CALL_EXPR_ARG (exp, 0);
2933 arg1 = CALL_EXPR_ARG (exp, 1);
2934
2935 if (TREE_CODE (arg1) != REAL_CST
2936 || TREE_OVERFLOW (arg1))
2937 return expand_builtin_mathfn_2 (exp, target, subtarget);
2938
2939 /* Handle constant exponents. */
2940
2941 /* For integer valued exponents we can expand to an optimal multiplication
2942 sequence using expand_powi. */
2943 c = TREE_REAL_CST (arg1);
2944 n = real_to_integer (&c);
2945 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2946 if (real_identical (&c, &cint)
2947 && ((n >= -1 && n <= 2)
2948 || (flag_unsafe_math_optimizations
2949 && optimize_insn_for_speed_p ()
2950 && powi_cost (n) <= POWI_MAX_MULTS)))
2951 {
2952 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2953 if (n != 1)
2954 {
2955 op = force_reg (mode, op);
2956 op = expand_powi (op, mode, n);
2957 }
2958 return op;
2959 }
2960
2961 narg0 = builtin_save_expr (arg0);
2962
2963 /* If the exponent is not integer valued, check if it is half of an integer.
2964 In this case we can expand to sqrt (x) * x**(n/2). */
2965 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2966 if (fn != NULL_TREE)
2967 {
2968 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2969 n = real_to_integer (&c2);
2970 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2971 if (real_identical (&c2, &cint)
2972 && ((flag_unsafe_math_optimizations
2973 && optimize_insn_for_speed_p ()
2974 && powi_cost (n/2) <= POWI_MAX_MULTS)
2975 || n == 1))
2976 {
2977 tree call_expr = build_call_nofold (fn, 1, narg0);
2978 /* Use expand_expr in case the newly built call expression
2979 was folded to a non-call. */
2980 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2981 if (n != 1)
2982 {
2983 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2984 op2 = force_reg (mode, op2);
2985 op2 = expand_powi (op2, mode, abs (n / 2));
2986 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2987 0, OPTAB_LIB_WIDEN);
2988 /* If the original exponent was negative, reciprocate the
2989 result. */
2990 if (n < 0)
2991 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2992 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2993 }
2994 return op;
2995 }
2996 }
2997
2998 /* Try if the exponent is a third of an integer. In this case
2999 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3000 different from pow (x, 1./3.) due to rounding and behavior
3001 with negative x we need to constrain this transformation to
3002 unsafe math and positive x or finite math. */
3003 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3004 if (fn != NULL_TREE
3005 && flag_unsafe_math_optimizations
3006 && (tree_expr_nonnegative_p (arg0)
3007 || !HONOR_NANS (mode)))
3008 {
3009 REAL_VALUE_TYPE dconst3;
3010 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3011 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3012 real_round (&c2, mode, &c2);
3013 n = real_to_integer (&c2);
3014 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3015 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3016 real_convert (&c2, mode, &c2);
3017 if (real_identical (&c2, &c)
3018 && ((optimize_insn_for_speed_p ()
3019 && powi_cost (n/3) <= POWI_MAX_MULTS)
3020 || n == 1))
3021 {
3022 tree call_expr = build_call_nofold (fn, 1,narg0);
3023 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3024 if (abs (n) % 3 == 2)
3025 op = expand_simple_binop (mode, MULT, op, op, op,
3026 0, OPTAB_LIB_WIDEN);
3027 if (n != 1)
3028 {
3029 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3030 op2 = force_reg (mode, op2);
3031 op2 = expand_powi (op2, mode, abs (n / 3));
3032 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3033 0, OPTAB_LIB_WIDEN);
3034 /* If the original exponent was negative, reciprocate the
3035 result. */
3036 if (n < 0)
3037 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3038 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3039 }
3040 return op;
3041 }
3042 }
3043
3044 /* Fall back to optab expansion. */
3045 return expand_builtin_mathfn_2 (exp, target, subtarget);
3046 }
3047
3048 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3049 a normal call should be emitted rather than expanding the function
3050 in-line. EXP is the expression that is a call to the builtin
3051 function; if convenient, the result should be placed in TARGET. */
3052
3053 static rtx
3054 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3055 {
3056 tree arg0, arg1;
3057 rtx op0, op1;
3058 enum machine_mode mode;
3059 enum machine_mode mode2;
3060
3061 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3062 return NULL_RTX;
3063
3064 arg0 = CALL_EXPR_ARG (exp, 0);
3065 arg1 = CALL_EXPR_ARG (exp, 1);
3066 mode = TYPE_MODE (TREE_TYPE (exp));
3067
3068 /* Handle constant power. */
3069
3070 if (TREE_CODE (arg1) == INTEGER_CST
3071 && !TREE_OVERFLOW (arg1))
3072 {
3073 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3074
3075 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3076 Otherwise, check the number of multiplications required. */
3077 if ((TREE_INT_CST_HIGH (arg1) == 0
3078 || TREE_INT_CST_HIGH (arg1) == -1)
3079 && ((n >= -1 && n <= 2)
3080 || (optimize_insn_for_speed_p ()
3081 && powi_cost (n) <= POWI_MAX_MULTS)))
3082 {
3083 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3084 op0 = force_reg (mode, op0);
3085 return expand_powi (op0, mode, n);
3086 }
3087 }
3088
3089 /* Emit a libcall to libgcc. */
3090
3091 /* Mode of the 2nd argument must match that of an int. */
3092 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3093
3094 if (target == NULL_RTX)
3095 target = gen_reg_rtx (mode);
3096
3097 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3098 if (GET_MODE (op0) != mode)
3099 op0 = convert_to_mode (mode, op0, 0);
3100 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3101 if (GET_MODE (op1) != mode2)
3102 op1 = convert_to_mode (mode2, op1, 0);
3103
3104 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3105 target, LCT_CONST, mode, 2,
3106 op0, mode, op1, mode2);
3107
3108 return target;
3109 }
3110
3111 /* Expand expression EXP which is a call to the strlen builtin. Return
3112 NULL_RTX if we failed the caller should emit a normal call, otherwise
3113 try to get the result in TARGET, if convenient. */
3114
3115 static rtx
3116 expand_builtin_strlen (tree exp, rtx target,
3117 enum machine_mode target_mode)
3118 {
3119 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3120 return NULL_RTX;
3121 else
3122 {
3123 rtx pat;
3124 tree len;
3125 tree src = CALL_EXPR_ARG (exp, 0);
3126 rtx result, src_reg, char_rtx, before_strlen;
3127 enum machine_mode insn_mode = target_mode, char_mode;
3128 enum insn_code icode = CODE_FOR_nothing;
3129 int align;
3130
3131 /* If the length can be computed at compile-time, return it. */
3132 len = c_strlen (src, 0);
3133 if (len)
3134 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3135
3136 /* If the length can be computed at compile-time and is constant
3137 integer, but there are side-effects in src, evaluate
3138 src for side-effects, then return len.
3139 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3140 can be optimized into: i++; x = 3; */
3141 len = c_strlen (src, 1);
3142 if (len && TREE_CODE (len) == INTEGER_CST)
3143 {
3144 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3145 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3146 }
3147
3148 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3149
3150 /* If SRC is not a pointer type, don't do this operation inline. */
3151 if (align == 0)
3152 return NULL_RTX;
3153
3154 /* Bail out if we can't compute strlen in the right mode. */
3155 while (insn_mode != VOIDmode)
3156 {
3157 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3158 if (icode != CODE_FOR_nothing)
3159 break;
3160
3161 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3162 }
3163 if (insn_mode == VOIDmode)
3164 return NULL_RTX;
3165
3166 /* Make a place to write the result of the instruction. */
3167 result = target;
3168 if (! (result != 0
3169 && REG_P (result)
3170 && GET_MODE (result) == insn_mode
3171 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3172 result = gen_reg_rtx (insn_mode);
3173
3174 /* Make a place to hold the source address. We will not expand
3175 the actual source until we are sure that the expansion will
3176 not fail -- there are trees that cannot be expanded twice. */
3177 src_reg = gen_reg_rtx (Pmode);
3178
3179 /* Mark the beginning of the strlen sequence so we can emit the
3180 source operand later. */
3181 before_strlen = get_last_insn ();
3182
3183 char_rtx = const0_rtx;
3184 char_mode = insn_data[(int) icode].operand[2].mode;
3185 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3186 char_mode))
3187 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3188
3189 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3190 char_rtx, GEN_INT (align));
3191 if (! pat)
3192 return NULL_RTX;
3193 emit_insn (pat);
3194
3195 /* Now that we are assured of success, expand the source. */
3196 start_sequence ();
3197 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3198 if (pat != src_reg)
3199 emit_move_insn (src_reg, pat);
3200 pat = get_insns ();
3201 end_sequence ();
3202
3203 if (before_strlen)
3204 emit_insn_after (pat, before_strlen);
3205 else
3206 emit_insn_before (pat, get_insns ());
3207
3208 /* Return the value in the proper mode for this function. */
3209 if (GET_MODE (result) == target_mode)
3210 target = result;
3211 else if (target != 0)
3212 convert_move (target, result, 0);
3213 else
3214 target = convert_to_mode (target_mode, result, 0);
3215
3216 return target;
3217 }
3218 }
3219
3220 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3221 bytes from constant string DATA + OFFSET and return it as target
3222 constant. */
3223
3224 static rtx
3225 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3226 enum machine_mode mode)
3227 {
3228 const char *str = (const char *) data;
3229
3230 gcc_assert (offset >= 0
3231 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3232 <= strlen (str) + 1));
3233
3234 return c_readstr (str + offset, mode);
3235 }
3236
3237 /* Expand a call EXP to the memcpy builtin.
3238 Return NULL_RTX if we failed, the caller should emit a normal call,
3239 otherwise try to get the result in TARGET, if convenient (and in
3240 mode MODE if that's convenient). */
3241
3242 static rtx
3243 expand_builtin_memcpy (tree exp, rtx target)
3244 {
3245 if (!validate_arglist (exp,
3246 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 return NULL_RTX;
3248 else
3249 {
3250 tree dest = CALL_EXPR_ARG (exp, 0);
3251 tree src = CALL_EXPR_ARG (exp, 1);
3252 tree len = CALL_EXPR_ARG (exp, 2);
3253 const char *src_str;
3254 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3255 unsigned int dest_align
3256 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3257 rtx dest_mem, src_mem, dest_addr, len_rtx;
3258 HOST_WIDE_INT expected_size = -1;
3259 unsigned int expected_align = 0;
3260
3261 /* If DEST is not a pointer type, call the normal function. */
3262 if (dest_align == 0)
3263 return NULL_RTX;
3264
3265 /* If either SRC is not a pointer type, don't do this
3266 operation in-line. */
3267 if (src_align == 0)
3268 return NULL_RTX;
3269
3270 if (currently_expanding_gimple_stmt)
3271 stringop_block_profile (currently_expanding_gimple_stmt,
3272 &expected_align, &expected_size);
3273
3274 if (expected_align < dest_align)
3275 expected_align = dest_align;
3276 dest_mem = get_memory_rtx (dest, len);
3277 set_mem_align (dest_mem, dest_align);
3278 len_rtx = expand_normal (len);
3279 src_str = c_getstr (src);
3280
3281 /* If SRC is a string constant and block move would be done
3282 by pieces, we can avoid loading the string from memory
3283 and only stored the computed constants. */
3284 if (src_str
3285 && CONST_INT_P (len_rtx)
3286 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3287 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3288 CONST_CAST (char *, src_str),
3289 dest_align, false))
3290 {
3291 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3292 builtin_memcpy_read_str,
3293 CONST_CAST (char *, src_str),
3294 dest_align, false, 0);
3295 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3296 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3297 return dest_mem;
3298 }
3299
3300 src_mem = get_memory_rtx (src, len);
3301 set_mem_align (src_mem, src_align);
3302
3303 /* Copy word part most expediently. */
3304 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3305 CALL_EXPR_TAILCALL (exp)
3306 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3307 expected_align, expected_size);
3308
3309 if (dest_addr == 0)
3310 {
3311 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3312 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3313 }
3314 return dest_addr;
3315 }
3316 }
3317
3318 /* Expand a call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed; the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). If ENDP is 0 return the
3322 destination pointer, if ENDP is 1 return the end pointer ala
3323 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3324 stpcpy. */
3325
3326 static rtx
3327 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3328 {
3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3331 return NULL_RTX;
3332 else
3333 {
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 1);
3336 tree len = CALL_EXPR_ARG (exp, 2);
3337 return expand_builtin_mempcpy_args (dest, src, len,
3338 target, mode, /*endp=*/ 1);
3339 }
3340 }
3341
3342 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3343 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3344 so that this can also be called without constructing an actual CALL_EXPR.
3345 The other arguments and return value are the same as for
3346 expand_builtin_mempcpy. */
3347
3348 static rtx
3349 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3350 rtx target, enum machine_mode mode, int endp)
3351 {
3352 /* If return value is ignored, transform mempcpy into memcpy. */
3353 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3354 {
3355 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3356 tree result = build_call_nofold (fn, 3, dest, src, len);
3357 return expand_expr (result, target, mode, EXPAND_NORMAL);
3358 }
3359 else
3360 {
3361 const char *src_str;
3362 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3363 unsigned int dest_align
3364 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3365 rtx dest_mem, src_mem, len_rtx;
3366
3367 /* If either SRC or DEST is not a pointer type, don't do this
3368 operation in-line. */
3369 if (dest_align == 0 || src_align == 0)
3370 return NULL_RTX;
3371
3372 /* If LEN is not constant, call the normal function. */
3373 if (! host_integerp (len, 1))
3374 return NULL_RTX;
3375
3376 len_rtx = expand_normal (len);
3377 src_str = c_getstr (src);
3378
3379 /* If SRC is a string constant and block move would be done
3380 by pieces, we can avoid loading the string from memory
3381 and only stored the computed constants. */
3382 if (src_str
3383 && CONST_INT_P (len_rtx)
3384 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3385 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3386 CONST_CAST (char *, src_str),
3387 dest_align, false))
3388 {
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3392 builtin_memcpy_read_str,
3393 CONST_CAST (char *, src_str),
3394 dest_align, false, endp);
3395 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3396 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3397 return dest_mem;
3398 }
3399
3400 if (CONST_INT_P (len_rtx)
3401 && can_move_by_pieces (INTVAL (len_rtx),
3402 MIN (dest_align, src_align)))
3403 {
3404 dest_mem = get_memory_rtx (dest, len);
3405 set_mem_align (dest_mem, dest_align);
3406 src_mem = get_memory_rtx (src, len);
3407 set_mem_align (src_mem, src_align);
3408 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3409 MIN (dest_align, src_align), endp);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3412 return dest_mem;
3413 }
3414
3415 return NULL_RTX;
3416 }
3417 }
3418
3419 #ifndef HAVE_movstr
3420 # define HAVE_movstr 0
3421 # define CODE_FOR_movstr CODE_FOR_nothing
3422 #endif
3423
3424 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3425 we failed, the caller should emit a normal call, otherwise try to
3426 get the result in TARGET, if convenient. If ENDP is 0 return the
3427 destination pointer, if ENDP is 1 return the end pointer ala
3428 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3429 stpcpy. */
3430
3431 static rtx
3432 expand_movstr (tree dest, tree src, rtx target, int endp)
3433 {
3434 rtx end;
3435 rtx dest_mem;
3436 rtx src_mem;
3437 rtx insn;
3438 const struct insn_data * data;
3439
3440 if (!HAVE_movstr)
3441 return NULL_RTX;
3442
3443 dest_mem = get_memory_rtx (dest, NULL);
3444 src_mem = get_memory_rtx (src, NULL);
3445 if (!endp)
3446 {
3447 target = force_reg (Pmode, XEXP (dest_mem, 0));
3448 dest_mem = replace_equiv_address (dest_mem, target);
3449 end = gen_reg_rtx (Pmode);
3450 }
3451 else
3452 {
3453 if (target == 0 || target == const0_rtx)
3454 {
3455 end = gen_reg_rtx (Pmode);
3456 if (target == 0)
3457 target = end;
3458 }
3459 else
3460 end = target;
3461 }
3462
3463 data = insn_data + CODE_FOR_movstr;
3464
3465 if (data->operand[0].mode != VOIDmode)
3466 end = gen_lowpart (data->operand[0].mode, end);
3467
3468 insn = data->genfun (end, dest_mem, src_mem);
3469
3470 gcc_assert (insn);
3471
3472 emit_insn (insn);
3473
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3476 adjust it. */
3477 if (endp == 1 && target != const0_rtx)
3478 {
3479 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3480 emit_move_insn (target, force_operand (tem, NULL_RTX));
3481 }
3482
3483 return target;
3484 }
3485
3486 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call, otherwise
3488 try to get the result in TARGET, if convenient (and in mode MODE if that's
3489 convenient). */
3490
3491 static rtx
3492 expand_builtin_strcpy (tree exp, rtx target)
3493 {
3494 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3495 {
3496 tree dest = CALL_EXPR_ARG (exp, 0);
3497 tree src = CALL_EXPR_ARG (exp, 1);
3498 return expand_builtin_strcpy_args (dest, src, target);
3499 }
3500 return NULL_RTX;
3501 }
3502
3503 /* Helper function to do the actual work for expand_builtin_strcpy. The
3504 arguments to the builtin_strcpy call DEST and SRC are broken out
3505 so that this can also be called without constructing an actual CALL_EXPR.
3506 The other arguments and return value are the same as for
3507 expand_builtin_strcpy. */
3508
3509 static rtx
3510 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3511 {
3512 return expand_movstr (dest, src, target, /*endp=*/0);
3513 }
3514
3515 /* Expand a call EXP to the stpcpy builtin.
3516 Return NULL_RTX if we failed the caller should emit a normal call,
3517 otherwise try to get the result in TARGET, if convenient (and in
3518 mode MODE if that's convenient). */
3519
3520 static rtx
3521 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3522 {
3523 tree dst, src;
3524 location_t loc = EXPR_LOCATION (exp);
3525
3526 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3527 return NULL_RTX;
3528
3529 dst = CALL_EXPR_ARG (exp, 0);
3530 src = CALL_EXPR_ARG (exp, 1);
3531
3532 /* If return value is ignored, transform stpcpy into strcpy. */
3533 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3534 {
3535 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3536 tree result = build_call_nofold (fn, 2, dst, src);
3537 return expand_expr (result, target, mode, EXPAND_NORMAL);
3538 }
3539 else
3540 {
3541 tree len, lenp1;
3542 rtx ret;
3543
3544 /* Ensure we get an actual string whose length can be evaluated at
3545 compile-time, not an expression containing a string. This is
3546 because the latter will potentially produce pessimized code
3547 when used to produce the return value. */
3548 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3549 return expand_movstr (dst, src, target, /*endp=*/2);
3550
3551 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3552 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3553 target, mode, /*endp=*/2);
3554
3555 if (ret)
3556 return ret;
3557
3558 if (TREE_CODE (len) == INTEGER_CST)
3559 {
3560 rtx len_rtx = expand_normal (len);
3561
3562 if (CONST_INT_P (len_rtx))
3563 {
3564 ret = expand_builtin_strcpy_args (dst, src, target);
3565
3566 if (ret)
3567 {
3568 if (! target)
3569 {
3570 if (mode != VOIDmode)
3571 target = gen_reg_rtx (mode);
3572 else
3573 target = gen_reg_rtx (GET_MODE (ret));
3574 }
3575 if (GET_MODE (target) != GET_MODE (ret))
3576 ret = gen_lowpart (GET_MODE (target), ret);
3577
3578 ret = plus_constant (ret, INTVAL (len_rtx));
3579 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3580 gcc_assert (ret);
3581
3582 return target;
3583 }
3584 }
3585 }
3586
3587 return expand_movstr (dst, src, target, /*endp=*/2);
3588 }
3589 }
3590
3591 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3592 bytes from constant string DATA + OFFSET and return it as target
3593 constant. */
3594
3595 rtx
3596 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3597 enum machine_mode mode)
3598 {
3599 const char *str = (const char *) data;
3600
3601 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3602 return const0_rtx;
3603
3604 return c_readstr (str + offset, mode);
3605 }
3606
3607 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3608 NULL_RTX if we failed the caller should emit a normal call. */
3609
3610 static rtx
3611 expand_builtin_strncpy (tree exp, rtx target)
3612 {
3613 location_t loc = EXPR_LOCATION (exp);
3614
3615 if (validate_arglist (exp,
3616 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3617 {
3618 tree dest = CALL_EXPR_ARG (exp, 0);
3619 tree src = CALL_EXPR_ARG (exp, 1);
3620 tree len = CALL_EXPR_ARG (exp, 2);
3621 tree slen = c_strlen (src, 1);
3622
3623 /* We must be passed a constant len and src parameter. */
3624 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3625 return NULL_RTX;
3626
3627 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3628
3629 /* We're required to pad with trailing zeros if the requested
3630 len is greater than strlen(s2)+1. In that case try to
3631 use store_by_pieces, if it fails, punt. */
3632 if (tree_int_cst_lt (slen, len))
3633 {
3634 unsigned int dest_align
3635 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3636 const char *p = c_getstr (src);
3637 rtx dest_mem;
3638
3639 if (!p || dest_align == 0 || !host_integerp (len, 1)
3640 || !can_store_by_pieces (tree_low_cst (len, 1),
3641 builtin_strncpy_read_str,
3642 CONST_CAST (char *, p),
3643 dest_align, false))
3644 return NULL_RTX;
3645
3646 dest_mem = get_memory_rtx (dest, len);
3647 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3648 builtin_strncpy_read_str,
3649 CONST_CAST (char *, p), dest_align, false, 0);
3650 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3651 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3652 return dest_mem;
3653 }
3654 }
3655 return NULL_RTX;
3656 }
3657
3658 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3659 bytes from constant string DATA + OFFSET and return it as target
3660 constant. */
3661
3662 rtx
3663 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3664 enum machine_mode mode)
3665 {
3666 const char *c = (const char *) data;
3667 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3668
3669 memset (p, *c, GET_MODE_SIZE (mode));
3670
3671 return c_readstr (p, mode);
3672 }
3673
3674 /* Callback routine for store_by_pieces. Return the RTL of a register
3675 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3676 char value given in the RTL register data. For example, if mode is
3677 4 bytes wide, return the RTL for 0x01010101*data. */
3678
3679 static rtx
3680 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3681 enum machine_mode mode)
3682 {
3683 rtx target, coeff;
3684 size_t size;
3685 char *p;
3686
3687 size = GET_MODE_SIZE (mode);
3688 if (size == 1)
3689 return (rtx) data;
3690
3691 p = XALLOCAVEC (char, size);
3692 memset (p, 1, size);
3693 coeff = c_readstr (p, mode);
3694
3695 target = convert_to_mode (mode, (rtx) data, 1);
3696 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3697 return force_reg (mode, target);
3698 }
3699
3700 /* Expand expression EXP, which is a call to the memset builtin. Return
3701 NULL_RTX if we failed the caller should emit a normal call, otherwise
3702 try to get the result in TARGET, if convenient (and in mode MODE if that's
3703 convenient). */
3704
3705 static rtx
3706 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3707 {
3708 if (!validate_arglist (exp,
3709 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3710 return NULL_RTX;
3711 else
3712 {
3713 tree dest = CALL_EXPR_ARG (exp, 0);
3714 tree val = CALL_EXPR_ARG (exp, 1);
3715 tree len = CALL_EXPR_ARG (exp, 2);
3716 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3717 }
3718 }
3719
3720 /* Helper function to do the actual work for expand_builtin_memset. The
3721 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3722 so that this can also be called without constructing an actual CALL_EXPR.
3723 The other arguments and return value are the same as for
3724 expand_builtin_memset. */
3725
3726 static rtx
3727 expand_builtin_memset_args (tree dest, tree val, tree len,
3728 rtx target, enum machine_mode mode, tree orig_exp)
3729 {
3730 tree fndecl, fn;
3731 enum built_in_function fcode;
3732 char c;
3733 unsigned int dest_align;
3734 rtx dest_mem, dest_addr, len_rtx;
3735 HOST_WIDE_INT expected_size = -1;
3736 unsigned int expected_align = 0;
3737
3738 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3739
3740 /* If DEST is not a pointer type, don't do this operation in-line. */
3741 if (dest_align == 0)
3742 return NULL_RTX;
3743
3744 if (currently_expanding_gimple_stmt)
3745 stringop_block_profile (currently_expanding_gimple_stmt,
3746 &expected_align, &expected_size);
3747
3748 if (expected_align < dest_align)
3749 expected_align = dest_align;
3750
3751 /* If the LEN parameter is zero, return DEST. */
3752 if (integer_zerop (len))
3753 {
3754 /* Evaluate and ignore VAL in case it has side-effects. */
3755 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3756 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3757 }
3758
3759 /* Stabilize the arguments in case we fail. */
3760 dest = builtin_save_expr (dest);
3761 val = builtin_save_expr (val);
3762 len = builtin_save_expr (len);
3763
3764 len_rtx = expand_normal (len);
3765 dest_mem = get_memory_rtx (dest, len);
3766
3767 if (TREE_CODE (val) != INTEGER_CST)
3768 {
3769 rtx val_rtx;
3770
3771 val_rtx = expand_normal (val);
3772 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3773 val_rtx, 0);
3774
3775 /* Assume that we can memset by pieces if we can store
3776 * the coefficients by pieces (in the required modes).
3777 * We can't pass builtin_memset_gen_str as that emits RTL. */
3778 c = 1;
3779 if (host_integerp (len, 1)
3780 && can_store_by_pieces (tree_low_cst (len, 1),
3781 builtin_memset_read_str, &c, dest_align,
3782 true))
3783 {
3784 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3785 val_rtx);
3786 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3787 builtin_memset_gen_str, val_rtx, dest_align,
3788 true, 0);
3789 }
3790 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3791 dest_align, expected_align,
3792 expected_size))
3793 goto do_libcall;
3794
3795 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3796 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3797 return dest_mem;
3798 }
3799
3800 if (target_char_cast (val, &c))
3801 goto do_libcall;
3802
3803 if (c)
3804 {
3805 if (host_integerp (len, 1)
3806 && can_store_by_pieces (tree_low_cst (len, 1),
3807 builtin_memset_read_str, &c, dest_align,
3808 true))
3809 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3810 builtin_memset_read_str, &c, dest_align, true, 0);
3811 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3812 dest_align, expected_align,
3813 expected_size))
3814 goto do_libcall;
3815
3816 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3817 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3818 return dest_mem;
3819 }
3820
3821 set_mem_align (dest_mem, dest_align);
3822 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3823 CALL_EXPR_TAILCALL (orig_exp)
3824 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3825 expected_align, expected_size);
3826
3827 if (dest_addr == 0)
3828 {
3829 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3830 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3831 }
3832
3833 return dest_addr;
3834
3835 do_libcall:
3836 fndecl = get_callee_fndecl (orig_exp);
3837 fcode = DECL_FUNCTION_CODE (fndecl);
3838 if (fcode == BUILT_IN_MEMSET)
3839 fn = build_call_nofold (fndecl, 3, dest, val, len);
3840 else if (fcode == BUILT_IN_BZERO)
3841 fn = build_call_nofold (fndecl, 2, dest, len);
3842 else
3843 gcc_unreachable ();
3844 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3845 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3846 return expand_call (fn, target, target == const0_rtx);
3847 }
3848
3849 /* Expand expression EXP, which is a call to the bzero builtin. Return
3850 NULL_RTX if we failed the caller should emit a normal call. */
3851
3852 static rtx
3853 expand_builtin_bzero (tree exp)
3854 {
3855 tree dest, size;
3856 location_t loc = EXPR_LOCATION (exp);
3857
3858 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3859 return NULL_RTX;
3860
3861 dest = CALL_EXPR_ARG (exp, 0);
3862 size = CALL_EXPR_ARG (exp, 1);
3863
3864 /* New argument list transforming bzero(ptr x, int y) to
3865 memset(ptr x, int 0, size_t y). This is done this way
3866 so that if it isn't expanded inline, we fallback to
3867 calling bzero instead of memset. */
3868
3869 return expand_builtin_memset_args (dest, integer_zero_node,
3870 fold_convert_loc (loc, sizetype, size),
3871 const0_rtx, VOIDmode, exp);
3872 }
3873
3874 /* Expand expression EXP, which is a call to the memcmp built-in function.
3875 Return NULL_RTX if we failed and the
3876 caller should emit a normal call, otherwise try to get the result in
3877 TARGET, if convenient (and in mode MODE, if that's convenient). */
3878
3879 static rtx
3880 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3881 ATTRIBUTE_UNUSED enum machine_mode mode)
3882 {
3883 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3884
3885 if (!validate_arglist (exp,
3886 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3887 return NULL_RTX;
3888
3889 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3890 {
3891 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3892 rtx result;
3893 rtx insn;
3894 tree arg1 = CALL_EXPR_ARG (exp, 0);
3895 tree arg2 = CALL_EXPR_ARG (exp, 1);
3896 tree len = CALL_EXPR_ARG (exp, 2);
3897
3898 int arg1_align
3899 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3900 int arg2_align
3901 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3902 enum machine_mode insn_mode;
3903
3904 #ifdef HAVE_cmpmemsi
3905 if (HAVE_cmpmemsi)
3906 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3907 else
3908 #endif
3909 #ifdef HAVE_cmpstrnsi
3910 if (HAVE_cmpstrnsi)
3911 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3912 else
3913 #endif
3914 return NULL_RTX;
3915
3916 /* If we don't have POINTER_TYPE, call the function. */
3917 if (arg1_align == 0 || arg2_align == 0)
3918 return NULL_RTX;
3919
3920 /* Make a place to write the result of the instruction. */
3921 result = target;
3922 if (! (result != 0
3923 && REG_P (result) && GET_MODE (result) == insn_mode
3924 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3925 result = gen_reg_rtx (insn_mode);
3926
3927 arg1_rtx = get_memory_rtx (arg1, len);
3928 arg2_rtx = get_memory_rtx (arg2, len);
3929 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3930
3931 /* Set MEM_SIZE as appropriate. */
3932 if (CONST_INT_P (arg3_rtx))
3933 {
3934 set_mem_size (arg1_rtx, arg3_rtx);
3935 set_mem_size (arg2_rtx, arg3_rtx);
3936 }
3937
3938 #ifdef HAVE_cmpmemsi
3939 if (HAVE_cmpmemsi)
3940 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3941 GEN_INT (MIN (arg1_align, arg2_align)));
3942 else
3943 #endif
3944 #ifdef HAVE_cmpstrnsi
3945 if (HAVE_cmpstrnsi)
3946 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3947 GEN_INT (MIN (arg1_align, arg2_align)));
3948 else
3949 #endif
3950 gcc_unreachable ();
3951
3952 if (insn)
3953 emit_insn (insn);
3954 else
3955 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3956 TYPE_MODE (integer_type_node), 3,
3957 XEXP (arg1_rtx, 0), Pmode,
3958 XEXP (arg2_rtx, 0), Pmode,
3959 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3960 TYPE_UNSIGNED (sizetype)),
3961 TYPE_MODE (sizetype));
3962
3963 /* Return the value in the proper mode for this function. */
3964 mode = TYPE_MODE (TREE_TYPE (exp));
3965 if (GET_MODE (result) == mode)
3966 return result;
3967 else if (target != 0)
3968 {
3969 convert_move (target, result, 0);
3970 return target;
3971 }
3972 else
3973 return convert_to_mode (mode, result, 0);
3974 }
3975 #endif
3976
3977 return NULL_RTX;
3978 }
3979
3980 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3981 if we failed the caller should emit a normal call, otherwise try to get
3982 the result in TARGET, if convenient. */
3983
3984 static rtx
3985 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3986 {
3987 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3988 return NULL_RTX;
3989
3990 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3991 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3992 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3993 {
3994 rtx arg1_rtx, arg2_rtx;
3995 rtx result, insn = NULL_RTX;
3996 tree fndecl, fn;
3997 tree arg1 = CALL_EXPR_ARG (exp, 0);
3998 tree arg2 = CALL_EXPR_ARG (exp, 1);
3999
4000 int arg1_align
4001 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4002 int arg2_align
4003 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4004
4005 /* If we don't have POINTER_TYPE, call the function. */
4006 if (arg1_align == 0 || arg2_align == 0)
4007 return NULL_RTX;
4008
4009 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4010 arg1 = builtin_save_expr (arg1);
4011 arg2 = builtin_save_expr (arg2);
4012
4013 arg1_rtx = get_memory_rtx (arg1, NULL);
4014 arg2_rtx = get_memory_rtx (arg2, NULL);
4015
4016 #ifdef HAVE_cmpstrsi
4017 /* Try to call cmpstrsi. */
4018 if (HAVE_cmpstrsi)
4019 {
4020 enum machine_mode insn_mode
4021 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4022
4023 /* Make a place to write the result of the instruction. */
4024 result = target;
4025 if (! (result != 0
4026 && REG_P (result) && GET_MODE (result) == insn_mode
4027 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4028 result = gen_reg_rtx (insn_mode);
4029
4030 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4031 GEN_INT (MIN (arg1_align, arg2_align)));
4032 }
4033 #endif
4034 #ifdef HAVE_cmpstrnsi
4035 /* Try to determine at least one length and call cmpstrnsi. */
4036 if (!insn && HAVE_cmpstrnsi)
4037 {
4038 tree len;
4039 rtx arg3_rtx;
4040
4041 enum machine_mode insn_mode
4042 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4043 tree len1 = c_strlen (arg1, 1);
4044 tree len2 = c_strlen (arg2, 1);
4045
4046 if (len1)
4047 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4048 if (len2)
4049 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4050
4051 /* If we don't have a constant length for the first, use the length
4052 of the second, if we know it. We don't require a constant for
4053 this case; some cost analysis could be done if both are available
4054 but neither is constant. For now, assume they're equally cheap,
4055 unless one has side effects. If both strings have constant lengths,
4056 use the smaller. */
4057
4058 if (!len1)
4059 len = len2;
4060 else if (!len2)
4061 len = len1;
4062 else if (TREE_SIDE_EFFECTS (len1))
4063 len = len2;
4064 else if (TREE_SIDE_EFFECTS (len2))
4065 len = len1;
4066 else if (TREE_CODE (len1) != INTEGER_CST)
4067 len = len2;
4068 else if (TREE_CODE (len2) != INTEGER_CST)
4069 len = len1;
4070 else if (tree_int_cst_lt (len1, len2))
4071 len = len1;
4072 else
4073 len = len2;
4074
4075 /* If both arguments have side effects, we cannot optimize. */
4076 if (!len || TREE_SIDE_EFFECTS (len))
4077 goto do_libcall;
4078
4079 arg3_rtx = expand_normal (len);
4080
4081 /* Make a place to write the result of the instruction. */
4082 result = target;
4083 if (! (result != 0
4084 && REG_P (result) && GET_MODE (result) == insn_mode
4085 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4086 result = gen_reg_rtx (insn_mode);
4087
4088 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4089 GEN_INT (MIN (arg1_align, arg2_align)));
4090 }
4091 #endif
4092
4093 if (insn)
4094 {
4095 enum machine_mode mode;
4096 emit_insn (insn);
4097
4098 /* Return the value in the proper mode for this function. */
4099 mode = TYPE_MODE (TREE_TYPE (exp));
4100 if (GET_MODE (result) == mode)
4101 return result;
4102 if (target == 0)
4103 return convert_to_mode (mode, result, 0);
4104 convert_move (target, result, 0);
4105 return target;
4106 }
4107
4108 /* Expand the library call ourselves using a stabilized argument
4109 list to avoid re-evaluating the function's arguments twice. */
4110 #ifdef HAVE_cmpstrnsi
4111 do_libcall:
4112 #endif
4113 fndecl = get_callee_fndecl (exp);
4114 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4115 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4116 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4117 return expand_call (fn, target, target == const0_rtx);
4118 }
4119 #endif
4120 return NULL_RTX;
4121 }
4122
4123 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4124 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4125 the result in TARGET, if convenient. */
4126
4127 static rtx
4128 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4129 ATTRIBUTE_UNUSED enum machine_mode mode)
4130 {
4131 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4132
4133 if (!validate_arglist (exp,
4134 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4135 return NULL_RTX;
4136
4137 /* If c_strlen can determine an expression for one of the string
4138 lengths, and it doesn't have side effects, then emit cmpstrnsi
4139 using length MIN(strlen(string)+1, arg3). */
4140 #ifdef HAVE_cmpstrnsi
4141 if (HAVE_cmpstrnsi)
4142 {
4143 tree len, len1, len2;
4144 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4145 rtx result, insn;
4146 tree fndecl, fn;
4147 tree arg1 = CALL_EXPR_ARG (exp, 0);
4148 tree arg2 = CALL_EXPR_ARG (exp, 1);
4149 tree arg3 = CALL_EXPR_ARG (exp, 2);
4150
4151 int arg1_align
4152 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4153 int arg2_align
4154 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4155 enum machine_mode insn_mode
4156 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4157
4158 len1 = c_strlen (arg1, 1);
4159 len2 = c_strlen (arg2, 1);
4160
4161 if (len1)
4162 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4163 if (len2)
4164 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4165
4166 /* If we don't have a constant length for the first, use the length
4167 of the second, if we know it. We don't require a constant for
4168 this case; some cost analysis could be done if both are available
4169 but neither is constant. For now, assume they're equally cheap,
4170 unless one has side effects. If both strings have constant lengths,
4171 use the smaller. */
4172
4173 if (!len1)
4174 len = len2;
4175 else if (!len2)
4176 len = len1;
4177 else if (TREE_SIDE_EFFECTS (len1))
4178 len = len2;
4179 else if (TREE_SIDE_EFFECTS (len2))
4180 len = len1;
4181 else if (TREE_CODE (len1) != INTEGER_CST)
4182 len = len2;
4183 else if (TREE_CODE (len2) != INTEGER_CST)
4184 len = len1;
4185 else if (tree_int_cst_lt (len1, len2))
4186 len = len1;
4187 else
4188 len = len2;
4189
4190 /* If both arguments have side effects, we cannot optimize. */
4191 if (!len || TREE_SIDE_EFFECTS (len))
4192 return NULL_RTX;
4193
4194 /* The actual new length parameter is MIN(len,arg3). */
4195 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4196 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4197
4198 /* If we don't have POINTER_TYPE, call the function. */
4199 if (arg1_align == 0 || arg2_align == 0)
4200 return NULL_RTX;
4201
4202 /* Make a place to write the result of the instruction. */
4203 result = target;
4204 if (! (result != 0
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4208
4209 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4210 arg1 = builtin_save_expr (arg1);
4211 arg2 = builtin_save_expr (arg2);
4212 len = builtin_save_expr (len);
4213
4214 arg1_rtx = get_memory_rtx (arg1, len);
4215 arg2_rtx = get_memory_rtx (arg2, len);
4216 arg3_rtx = expand_normal (len);
4217 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4218 GEN_INT (MIN (arg1_align, arg2_align)));
4219 if (insn)
4220 {
4221 emit_insn (insn);
4222
4223 /* Return the value in the proper mode for this function. */
4224 mode = TYPE_MODE (TREE_TYPE (exp));
4225 if (GET_MODE (result) == mode)
4226 return result;
4227 if (target == 0)
4228 return convert_to_mode (mode, result, 0);
4229 convert_move (target, result, 0);
4230 return target;
4231 }
4232
4233 /* Expand the library call ourselves using a stabilized argument
4234 list to avoid re-evaluating the function's arguments twice. */
4235 fndecl = get_callee_fndecl (exp);
4236 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4237 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4238 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4239 return expand_call (fn, target, target == const0_rtx);
4240 }
4241 #endif
4242 return NULL_RTX;
4243 }
4244
4245 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4246 if that's convenient. */
4247
4248 rtx
4249 expand_builtin_saveregs (void)
4250 {
4251 rtx val, seq;
4252
4253 /* Don't do __builtin_saveregs more than once in a function.
4254 Save the result of the first call and reuse it. */
4255 if (saveregs_value != 0)
4256 return saveregs_value;
4257
4258 /* When this function is called, it means that registers must be
4259 saved on entry to this function. So we migrate the call to the
4260 first insn of this function. */
4261
4262 start_sequence ();
4263
4264 /* Do whatever the machine needs done in this case. */
4265 val = targetm.calls.expand_builtin_saveregs ();
4266
4267 seq = get_insns ();
4268 end_sequence ();
4269
4270 saveregs_value = val;
4271
4272 /* Put the insns after the NOTE that starts the function. If this
4273 is inside a start_sequence, make the outer-level insn chain current, so
4274 the code is placed at the start of the function. */
4275 push_topmost_sequence ();
4276 emit_insn_after (seq, entry_of_function ());
4277 pop_topmost_sequence ();
4278
4279 return val;
4280 }
4281
4282 /* __builtin_args_info (N) returns word N of the arg space info
4283 for the current function. The number and meanings of words
4284 is controlled by the definition of CUMULATIVE_ARGS. */
4285
4286 static rtx
4287 expand_builtin_args_info (tree exp)
4288 {
4289 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4290 int *word_ptr = (int *) &crtl->args.info;
4291
4292 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4293
4294 if (call_expr_nargs (exp) != 0)
4295 {
4296 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4297 error ("argument of %<__builtin_args_info%> must be constant");
4298 else
4299 {
4300 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4301
4302 if (wordnum < 0 || wordnum >= nwords)
4303 error ("argument of %<__builtin_args_info%> out of range");
4304 else
4305 return GEN_INT (word_ptr[wordnum]);
4306 }
4307 }
4308 else
4309 error ("missing argument in %<__builtin_args_info%>");
4310
4311 return const0_rtx;
4312 }
4313
4314 /* Expand a call to __builtin_next_arg. */
4315
4316 static rtx
4317 expand_builtin_next_arg (void)
4318 {
4319 /* Checking arguments is already done in fold_builtin_next_arg
4320 that must be called before this function. */
4321 return expand_binop (ptr_mode, add_optab,
4322 crtl->args.internal_arg_pointer,
4323 crtl->args.arg_offset_rtx,
4324 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4325 }
4326
4327 /* Make it easier for the backends by protecting the valist argument
4328 from multiple evaluations. */
4329
4330 static tree
4331 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4332 {
4333 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4334
4335 gcc_assert (vatype != NULL_TREE);
4336
4337 if (TREE_CODE (vatype) == ARRAY_TYPE)
4338 {
4339 if (TREE_SIDE_EFFECTS (valist))
4340 valist = save_expr (valist);
4341
4342 /* For this case, the backends will be expecting a pointer to
4343 vatype, but it's possible we've actually been given an array
4344 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4345 So fix it. */
4346 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4347 {
4348 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4349 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4350 }
4351 }
4352 else
4353 {
4354 tree pt;
4355
4356 if (! needs_lvalue)
4357 {
4358 if (! TREE_SIDE_EFFECTS (valist))
4359 return valist;
4360
4361 pt = build_pointer_type (vatype);
4362 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4363 TREE_SIDE_EFFECTS (valist) = 1;
4364 }
4365
4366 if (TREE_SIDE_EFFECTS (valist))
4367 valist = save_expr (valist);
4368 valist = build_fold_indirect_ref_loc (loc, valist);
4369 }
4370
4371 return valist;
4372 }
4373
4374 /* The "standard" definition of va_list is void*. */
4375
4376 tree
4377 std_build_builtin_va_list (void)
4378 {
4379 return ptr_type_node;
4380 }
4381
4382 /* The "standard" abi va_list is va_list_type_node. */
4383
4384 tree
4385 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4386 {
4387 return va_list_type_node;
4388 }
4389
4390 /* The "standard" type of va_list is va_list_type_node. */
4391
4392 tree
4393 std_canonical_va_list_type (tree type)
4394 {
4395 tree wtype, htype;
4396
4397 if (INDIRECT_REF_P (type))
4398 type = TREE_TYPE (type);
4399 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4400 type = TREE_TYPE (type);
4401 wtype = va_list_type_node;
4402 htype = type;
4403 /* Treat structure va_list types. */
4404 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4405 htype = TREE_TYPE (htype);
4406 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4407 {
4408 /* If va_list is an array type, the argument may have decayed
4409 to a pointer type, e.g. by being passed to another function.
4410 In that case, unwrap both types so that we can compare the
4411 underlying records. */
4412 if (TREE_CODE (htype) == ARRAY_TYPE
4413 || POINTER_TYPE_P (htype))
4414 {
4415 wtype = TREE_TYPE (wtype);
4416 htype = TREE_TYPE (htype);
4417 }
4418 }
4419 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4420 return va_list_type_node;
4421
4422 return NULL_TREE;
4423 }
4424
4425 /* The "standard" implementation of va_start: just assign `nextarg' to
4426 the variable. */
4427
4428 void
4429 std_expand_builtin_va_start (tree valist, rtx nextarg)
4430 {
4431 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4432 convert_move (va_r, nextarg, 0);
4433 }
4434
4435 /* Expand EXP, a call to __builtin_va_start. */
4436
4437 static rtx
4438 expand_builtin_va_start (tree exp)
4439 {
4440 rtx nextarg;
4441 tree valist;
4442 location_t loc = EXPR_LOCATION (exp);
4443
4444 if (call_expr_nargs (exp) < 2)
4445 {
4446 error_at (loc, "too few arguments to function %<va_start%>");
4447 return const0_rtx;
4448 }
4449
4450 if (fold_builtin_next_arg (exp, true))
4451 return const0_rtx;
4452
4453 nextarg = expand_builtin_next_arg ();
4454 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4455
4456 if (targetm.expand_builtin_va_start)
4457 targetm.expand_builtin_va_start (valist, nextarg);
4458 else
4459 std_expand_builtin_va_start (valist, nextarg);
4460
4461 return const0_rtx;
4462 }
4463
4464 /* The "standard" implementation of va_arg: read the value from the
4465 current (padded) address and increment by the (padded) size. */
4466
4467 tree
4468 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4469 gimple_seq *post_p)
4470 {
4471 tree addr, t, type_size, rounded_size, valist_tmp;
4472 unsigned HOST_WIDE_INT align, boundary;
4473 bool indirect;
4474
4475 #ifdef ARGS_GROW_DOWNWARD
4476 /* All of the alignment and movement below is for args-grow-up machines.
4477 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4478 implement their own specialized gimplify_va_arg_expr routines. */
4479 gcc_unreachable ();
4480 #endif
4481
4482 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4483 if (indirect)
4484 type = build_pointer_type (type);
4485
4486 align = PARM_BOUNDARY / BITS_PER_UNIT;
4487 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4488
4489 /* When we align parameter on stack for caller, if the parameter
4490 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4491 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4492 here with caller. */
4493 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4494 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4495
4496 boundary /= BITS_PER_UNIT;
4497
4498 /* Hoist the valist value into a temporary for the moment. */
4499 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4500
4501 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4502 requires greater alignment, we must perform dynamic alignment. */
4503 if (boundary > align
4504 && !integer_zerop (TYPE_SIZE (type)))
4505 {
4506 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4507 fold_build2 (POINTER_PLUS_EXPR,
4508 TREE_TYPE (valist),
4509 valist_tmp, size_int (boundary - 1)));
4510 gimplify_and_add (t, pre_p);
4511
4512 t = fold_convert (sizetype, valist_tmp);
4513 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4514 fold_convert (TREE_TYPE (valist),
4515 fold_build2 (BIT_AND_EXPR, sizetype, t,
4516 size_int (-boundary))));
4517 gimplify_and_add (t, pre_p);
4518 }
4519 else
4520 boundary = align;
4521
4522 /* If the actual alignment is less than the alignment of the type,
4523 adjust the type accordingly so that we don't assume strict alignment
4524 when dereferencing the pointer. */
4525 boundary *= BITS_PER_UNIT;
4526 if (boundary < TYPE_ALIGN (type))
4527 {
4528 type = build_variant_type_copy (type);
4529 TYPE_ALIGN (type) = boundary;
4530 }
4531
4532 /* Compute the rounded size of the type. */
4533 type_size = size_in_bytes (type);
4534 rounded_size = round_up (type_size, align);
4535
4536 /* Reduce rounded_size so it's sharable with the postqueue. */
4537 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4538
4539 /* Get AP. */
4540 addr = valist_tmp;
4541 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4542 {
4543 /* Small args are padded downward. */
4544 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4545 rounded_size, size_int (align));
4546 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4547 size_binop (MINUS_EXPR, rounded_size, type_size));
4548 addr = fold_build2 (POINTER_PLUS_EXPR,
4549 TREE_TYPE (addr), addr, t);
4550 }
4551
4552 /* Compute new value for AP. */
4553 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4554 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4555 gimplify_and_add (t, pre_p);
4556
4557 addr = fold_convert (build_pointer_type (type), addr);
4558
4559 if (indirect)
4560 addr = build_va_arg_indirect_ref (addr);
4561
4562 return build_va_arg_indirect_ref (addr);
4563 }
4564
4565 /* Build an indirect-ref expression over the given TREE, which represents a
4566 piece of a va_arg() expansion. */
4567 tree
4568 build_va_arg_indirect_ref (tree addr)
4569 {
4570 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4571
4572 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4573 mf_mark (addr);
4574
4575 return addr;
4576 }
4577
4578 /* Return a dummy expression of type TYPE in order to keep going after an
4579 error. */
4580
4581 static tree
4582 dummy_object (tree type)
4583 {
4584 tree t = build_int_cst (build_pointer_type (type), 0);
4585 return build1 (INDIRECT_REF, type, t);
4586 }
4587
4588 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4589 builtin function, but a very special sort of operator. */
4590
4591 enum gimplify_status
4592 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4593 {
4594 tree promoted_type, have_va_type;
4595 tree valist = TREE_OPERAND (*expr_p, 0);
4596 tree type = TREE_TYPE (*expr_p);
4597 tree t;
4598 location_t loc = EXPR_LOCATION (*expr_p);
4599
4600 /* Verify that valist is of the proper type. */
4601 have_va_type = TREE_TYPE (valist);
4602 if (have_va_type == error_mark_node)
4603 return GS_ERROR;
4604 have_va_type = targetm.canonical_va_list_type (have_va_type);
4605
4606 if (have_va_type == NULL_TREE)
4607 {
4608 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4609 return GS_ERROR;
4610 }
4611
4612 /* Generate a diagnostic for requesting data of a type that cannot
4613 be passed through `...' due to type promotion at the call site. */
4614 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4615 != type)
4616 {
4617 static bool gave_help;
4618 bool warned;
4619
4620 /* Unfortunately, this is merely undefined, rather than a constraint
4621 violation, so we cannot make this an error. If this call is never
4622 executed, the program is still strictly conforming. */
4623 warned = warning_at (loc, 0,
4624 "%qT is promoted to %qT when passed through %<...%>",
4625 type, promoted_type);
4626 if (!gave_help && warned)
4627 {
4628 gave_help = true;
4629 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4630 promoted_type, type);
4631 }
4632
4633 /* We can, however, treat "undefined" any way we please.
4634 Call abort to encourage the user to fix the program. */
4635 if (warned)
4636 inform (loc, "if this code is reached, the program will abort");
4637 /* Before the abort, allow the evaluation of the va_list
4638 expression to exit or longjmp. */
4639 gimplify_and_add (valist, pre_p);
4640 t = build_call_expr_loc (loc,
4641 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4642 gimplify_and_add (t, pre_p);
4643
4644 /* This is dead code, but go ahead and finish so that the
4645 mode of the result comes out right. */
4646 *expr_p = dummy_object (type);
4647 return GS_ALL_DONE;
4648 }
4649 else
4650 {
4651 /* Make it easier for the backends by protecting the valist argument
4652 from multiple evaluations. */
4653 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4654 {
4655 /* For this case, the backends will be expecting a pointer to
4656 TREE_TYPE (abi), but it's possible we've
4657 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4658 So fix it. */
4659 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4660 {
4661 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4662 valist = fold_convert_loc (loc, p1,
4663 build_fold_addr_expr_loc (loc, valist));
4664 }
4665
4666 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4667 }
4668 else
4669 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4670
4671 if (!targetm.gimplify_va_arg_expr)
4672 /* FIXME: Once most targets are converted we should merely
4673 assert this is non-null. */
4674 return GS_ALL_DONE;
4675
4676 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4677 return GS_OK;
4678 }
4679 }
4680
4681 /* Expand EXP, a call to __builtin_va_end. */
4682
4683 static rtx
4684 expand_builtin_va_end (tree exp)
4685 {
4686 tree valist = CALL_EXPR_ARG (exp, 0);
4687
4688 /* Evaluate for side effects, if needed. I hate macros that don't
4689 do that. */
4690 if (TREE_SIDE_EFFECTS (valist))
4691 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4692
4693 return const0_rtx;
4694 }
4695
4696 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4697 builtin rather than just as an assignment in stdarg.h because of the
4698 nastiness of array-type va_list types. */
4699
4700 static rtx
4701 expand_builtin_va_copy (tree exp)
4702 {
4703 tree dst, src, t;
4704 location_t loc = EXPR_LOCATION (exp);
4705
4706 dst = CALL_EXPR_ARG (exp, 0);
4707 src = CALL_EXPR_ARG (exp, 1);
4708
4709 dst = stabilize_va_list_loc (loc, dst, 1);
4710 src = stabilize_va_list_loc (loc, src, 0);
4711
4712 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4713
4714 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4715 {
4716 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4717 TREE_SIDE_EFFECTS (t) = 1;
4718 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4719 }
4720 else
4721 {
4722 rtx dstb, srcb, size;
4723
4724 /* Evaluate to pointers. */
4725 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4726 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4727 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4728 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4729
4730 dstb = convert_memory_address (Pmode, dstb);
4731 srcb = convert_memory_address (Pmode, srcb);
4732
4733 /* "Dereference" to BLKmode memories. */
4734 dstb = gen_rtx_MEM (BLKmode, dstb);
4735 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4736 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4737 srcb = gen_rtx_MEM (BLKmode, srcb);
4738 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4739 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4740
4741 /* Copy. */
4742 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4743 }
4744
4745 return const0_rtx;
4746 }
4747
4748 /* Expand a call to one of the builtin functions __builtin_frame_address or
4749 __builtin_return_address. */
4750
4751 static rtx
4752 expand_builtin_frame_address (tree fndecl, tree exp)
4753 {
4754 /* The argument must be a nonnegative integer constant.
4755 It counts the number of frames to scan up the stack.
4756 The value is the return address saved in that frame. */
4757 if (call_expr_nargs (exp) == 0)
4758 /* Warning about missing arg was already issued. */
4759 return const0_rtx;
4760 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4761 {
4762 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4763 error ("invalid argument to %<__builtin_frame_address%>");
4764 else
4765 error ("invalid argument to %<__builtin_return_address%>");
4766 return const0_rtx;
4767 }
4768 else
4769 {
4770 rtx tem
4771 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4772 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4773
4774 /* Some ports cannot access arbitrary stack frames. */
4775 if (tem == NULL)
4776 {
4777 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4778 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4779 else
4780 warning (0, "unsupported argument to %<__builtin_return_address%>");
4781 return const0_rtx;
4782 }
4783
4784 /* For __builtin_frame_address, return what we've got. */
4785 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4786 return tem;
4787
4788 if (!REG_P (tem)
4789 && ! CONSTANT_P (tem))
4790 tem = copy_to_mode_reg (Pmode, tem);
4791 return tem;
4792 }
4793 }
4794
4795 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4796 we failed and the caller should emit a normal call, otherwise try to get
4797 the result in TARGET, if convenient. */
4798
4799 static rtx
4800 expand_builtin_alloca (tree exp, rtx target)
4801 {
4802 rtx op0;
4803 rtx result;
4804
4805 /* Emit normal call if marked not-inlineable. */
4806 if (CALL_CANNOT_INLINE_P (exp))
4807 return NULL_RTX;
4808
4809 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4810 return NULL_RTX;
4811
4812 /* Compute the argument. */
4813 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4814
4815 /* Allocate the desired space. */
4816 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4817 result = convert_memory_address (ptr_mode, result);
4818
4819 return result;
4820 }
4821
4822 /* Expand a call to a bswap builtin with argument ARG0. MODE
4823 is the mode to expand with. */
4824
4825 static rtx
4826 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4827 {
4828 enum machine_mode mode;
4829 tree arg;
4830 rtx op0;
4831
4832 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4833 return NULL_RTX;
4834
4835 arg = CALL_EXPR_ARG (exp, 0);
4836 mode = TYPE_MODE (TREE_TYPE (arg));
4837 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4838
4839 target = expand_unop (mode, bswap_optab, op0, target, 1);
4840
4841 gcc_assert (target);
4842
4843 return convert_to_mode (mode, target, 0);
4844 }
4845
4846 /* Expand a call to a unary builtin in EXP.
4847 Return NULL_RTX if a normal call should be emitted rather than expanding the
4848 function in-line. If convenient, the result should be placed in TARGET.
4849 SUBTARGET may be used as the target for computing one of EXP's operands. */
4850
4851 static rtx
4852 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4853 rtx subtarget, optab op_optab)
4854 {
4855 rtx op0;
4856
4857 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4858 return NULL_RTX;
4859
4860 /* Compute the argument. */
4861 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4862 VOIDmode, EXPAND_NORMAL);
4863 /* Compute op, into TARGET if possible.
4864 Set TARGET to wherever the result comes back. */
4865 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4866 op_optab, op0, target, 1);
4867 gcc_assert (target);
4868
4869 return convert_to_mode (target_mode, target, 0);
4870 }
4871
4872 /* Expand a call to __builtin_expect. We just return our argument
4873 as the builtin_expect semantic should've been already executed by
4874 tree branch prediction pass. */
4875
4876 static rtx
4877 expand_builtin_expect (tree exp, rtx target)
4878 {
4879 tree arg;
4880
4881 if (call_expr_nargs (exp) < 2)
4882 return const0_rtx;
4883 arg = CALL_EXPR_ARG (exp, 0);
4884
4885 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4886 /* When guessing was done, the hints should be already stripped away. */
4887 gcc_assert (!flag_guess_branch_prob
4888 || optimize == 0 || errorcount || sorrycount);
4889 return target;
4890 }
4891
4892 void
4893 expand_builtin_trap (void)
4894 {
4895 #ifdef HAVE_trap
4896 if (HAVE_trap)
4897 emit_insn (gen_trap ());
4898 else
4899 #endif
4900 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4901 emit_barrier ();
4902 }
4903
4904 /* Expand a call to __builtin_unreachable. We do nothing except emit
4905 a barrier saying that control flow will not pass here.
4906
4907 It is the responsibility of the program being compiled to ensure
4908 that control flow does never reach __builtin_unreachable. */
4909 static void
4910 expand_builtin_unreachable (void)
4911 {
4912 emit_barrier ();
4913 }
4914
4915 /* Expand EXP, a call to fabs, fabsf or fabsl.
4916 Return NULL_RTX if a normal call should be emitted rather than expanding
4917 the function inline. If convenient, the result should be placed
4918 in TARGET. SUBTARGET may be used as the target for computing
4919 the operand. */
4920
4921 static rtx
4922 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4923 {
4924 enum machine_mode mode;
4925 tree arg;
4926 rtx op0;
4927
4928 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4929 return NULL_RTX;
4930
4931 arg = CALL_EXPR_ARG (exp, 0);
4932 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4933 mode = TYPE_MODE (TREE_TYPE (arg));
4934 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4935 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4936 }
4937
4938 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4939 Return NULL is a normal call should be emitted rather than expanding the
4940 function inline. If convenient, the result should be placed in TARGET.
4941 SUBTARGET may be used as the target for computing the operand. */
4942
4943 static rtx
4944 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4945 {
4946 rtx op0, op1;
4947 tree arg;
4948
4949 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4950 return NULL_RTX;
4951
4952 arg = CALL_EXPR_ARG (exp, 0);
4953 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4954
4955 arg = CALL_EXPR_ARG (exp, 1);
4956 op1 = expand_normal (arg);
4957
4958 return expand_copysign (op0, op1, target);
4959 }
4960
4961 /* Create a new constant string literal and return a char* pointer to it.
4962 The STRING_CST value is the LEN characters at STR. */
4963 tree
4964 build_string_literal (int len, const char *str)
4965 {
4966 tree t, elem, index, type;
4967
4968 t = build_string (len, str);
4969 elem = build_type_variant (char_type_node, 1, 0);
4970 index = build_index_type (size_int (len - 1));
4971 type = build_array_type (elem, index);
4972 TREE_TYPE (t) = type;
4973 TREE_CONSTANT (t) = 1;
4974 TREE_READONLY (t) = 1;
4975 TREE_STATIC (t) = 1;
4976
4977 type = build_pointer_type (elem);
4978 t = build1 (ADDR_EXPR, type,
4979 build4 (ARRAY_REF, elem,
4980 t, integer_zero_node, NULL_TREE, NULL_TREE));
4981 return t;
4982 }
4983
4984 /* Expand a call to either the entry or exit function profiler. */
4985
4986 static rtx
4987 expand_builtin_profile_func (bool exitp)
4988 {
4989 rtx this_rtx, which;
4990
4991 this_rtx = DECL_RTL (current_function_decl);
4992 gcc_assert (MEM_P (this_rtx));
4993 this_rtx = XEXP (this_rtx, 0);
4994
4995 if (exitp)
4996 which = profile_function_exit_libfunc;
4997 else
4998 which = profile_function_entry_libfunc;
4999
5000 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5001 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5002 0),
5003 Pmode);
5004
5005 return const0_rtx;
5006 }
5007
5008 /* Expand a call to __builtin___clear_cache. */
5009
5010 static rtx
5011 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5012 {
5013 #ifndef HAVE_clear_cache
5014 #ifdef CLEAR_INSN_CACHE
5015 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5016 does something. Just do the default expansion to a call to
5017 __clear_cache(). */
5018 return NULL_RTX;
5019 #else
5020 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5021 does nothing. There is no need to call it. Do nothing. */
5022 return const0_rtx;
5023 #endif /* CLEAR_INSN_CACHE */
5024 #else
5025 /* We have a "clear_cache" insn, and it will handle everything. */
5026 tree begin, end;
5027 rtx begin_rtx, end_rtx;
5028 enum insn_code icode;
5029
5030 /* We must not expand to a library call. If we did, any
5031 fallback library function in libgcc that might contain a call to
5032 __builtin___clear_cache() would recurse infinitely. */
5033 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5034 {
5035 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5036 return const0_rtx;
5037 }
5038
5039 if (HAVE_clear_cache)
5040 {
5041 icode = CODE_FOR_clear_cache;
5042
5043 begin = CALL_EXPR_ARG (exp, 0);
5044 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5045 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5046 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5047 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5048
5049 end = CALL_EXPR_ARG (exp, 1);
5050 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5051 end_rtx = convert_memory_address (Pmode, end_rtx);
5052 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5053 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5054
5055 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5056 }
5057 return const0_rtx;
5058 #endif /* HAVE_clear_cache */
5059 }
5060
5061 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5062
5063 static rtx
5064 round_trampoline_addr (rtx tramp)
5065 {
5066 rtx temp, addend, mask;
5067
5068 /* If we don't need too much alignment, we'll have been guaranteed
5069 proper alignment by get_trampoline_type. */
5070 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5071 return tramp;
5072
5073 /* Round address up to desired boundary. */
5074 temp = gen_reg_rtx (Pmode);
5075 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5076 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5077
5078 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5079 temp, 0, OPTAB_LIB_WIDEN);
5080 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5081 temp, 0, OPTAB_LIB_WIDEN);
5082
5083 return tramp;
5084 }
5085
5086 static rtx
5087 expand_builtin_init_trampoline (tree exp)
5088 {
5089 tree t_tramp, t_func, t_chain;
5090 rtx m_tramp, r_tramp, r_chain, tmp;
5091
5092 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5093 POINTER_TYPE, VOID_TYPE))
5094 return NULL_RTX;
5095
5096 t_tramp = CALL_EXPR_ARG (exp, 0);
5097 t_func = CALL_EXPR_ARG (exp, 1);
5098 t_chain = CALL_EXPR_ARG (exp, 2);
5099
5100 r_tramp = expand_normal (t_tramp);
5101 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5102 MEM_NOTRAP_P (m_tramp) = 1;
5103
5104 /* The TRAMP argument should be the address of a field within the
5105 local function's FRAME decl. Let's see if we can fill in the
5106 to fill in the MEM_ATTRs for this memory. */
5107 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5108 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5109 true, 0);
5110
5111 tmp = round_trampoline_addr (r_tramp);
5112 if (tmp != r_tramp)
5113 {
5114 m_tramp = change_address (m_tramp, BLKmode, tmp);
5115 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5116 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5117 }
5118
5119 /* The FUNC argument should be the address of the nested function.
5120 Extract the actual function decl to pass to the hook. */
5121 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5122 t_func = TREE_OPERAND (t_func, 0);
5123 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5124
5125 r_chain = expand_normal (t_chain);
5126
5127 /* Generate insns to initialize the trampoline. */
5128 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5129
5130 trampolines_created = 1;
5131 return const0_rtx;
5132 }
5133
5134 static rtx
5135 expand_builtin_adjust_trampoline (tree exp)
5136 {
5137 rtx tramp;
5138
5139 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5140 return NULL_RTX;
5141
5142 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5143 tramp = round_trampoline_addr (tramp);
5144 if (targetm.calls.trampoline_adjust_address)
5145 tramp = targetm.calls.trampoline_adjust_address (tramp);
5146
5147 return tramp;
5148 }
5149
5150 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5151 function. The function first checks whether the back end provides
5152 an insn to implement signbit for the respective mode. If not, it
5153 checks whether the floating point format of the value is such that
5154 the sign bit can be extracted. If that is not the case, the
5155 function returns NULL_RTX to indicate that a normal call should be
5156 emitted rather than expanding the function in-line. EXP is the
5157 expression that is a call to the builtin function; if convenient,
5158 the result should be placed in TARGET. */
5159 static rtx
5160 expand_builtin_signbit (tree exp, rtx target)
5161 {
5162 const struct real_format *fmt;
5163 enum machine_mode fmode, imode, rmode;
5164 HOST_WIDE_INT hi, lo;
5165 tree arg;
5166 int word, bitpos;
5167 enum insn_code icode;
5168 rtx temp;
5169 location_t loc = EXPR_LOCATION (exp);
5170
5171 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5172 return NULL_RTX;
5173
5174 arg = CALL_EXPR_ARG (exp, 0);
5175 fmode = TYPE_MODE (TREE_TYPE (arg));
5176 rmode = TYPE_MODE (TREE_TYPE (exp));
5177 fmt = REAL_MODE_FORMAT (fmode);
5178
5179 arg = builtin_save_expr (arg);
5180
5181 /* Expand the argument yielding a RTX expression. */
5182 temp = expand_normal (arg);
5183
5184 /* Check if the back end provides an insn that handles signbit for the
5185 argument's mode. */
5186 icode = signbit_optab->handlers [(int) fmode].insn_code;
5187 if (icode != CODE_FOR_nothing)
5188 {
5189 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5190 emit_unop_insn (icode, target, temp, UNKNOWN);
5191 return target;
5192 }
5193
5194 /* For floating point formats without a sign bit, implement signbit
5195 as "ARG < 0.0". */
5196 bitpos = fmt->signbit_ro;
5197 if (bitpos < 0)
5198 {
5199 /* But we can't do this if the format supports signed zero. */
5200 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5201 return NULL_RTX;
5202
5203 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5204 build_real (TREE_TYPE (arg), dconst0));
5205 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5206 }
5207
5208 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5209 {
5210 imode = int_mode_for_mode (fmode);
5211 if (imode == BLKmode)
5212 return NULL_RTX;
5213 temp = gen_lowpart (imode, temp);
5214 }
5215 else
5216 {
5217 imode = word_mode;
5218 /* Handle targets with different FP word orders. */
5219 if (FLOAT_WORDS_BIG_ENDIAN)
5220 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5221 else
5222 word = bitpos / BITS_PER_WORD;
5223 temp = operand_subword_force (temp, word, fmode);
5224 bitpos = bitpos % BITS_PER_WORD;
5225 }
5226
5227 /* Force the intermediate word_mode (or narrower) result into a
5228 register. This avoids attempting to create paradoxical SUBREGs
5229 of floating point modes below. */
5230 temp = force_reg (imode, temp);
5231
5232 /* If the bitpos is within the "result mode" lowpart, the operation
5233 can be implement with a single bitwise AND. Otherwise, we need
5234 a right shift and an AND. */
5235
5236 if (bitpos < GET_MODE_BITSIZE (rmode))
5237 {
5238 if (bitpos < HOST_BITS_PER_WIDE_INT)
5239 {
5240 hi = 0;
5241 lo = (HOST_WIDE_INT) 1 << bitpos;
5242 }
5243 else
5244 {
5245 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5246 lo = 0;
5247 }
5248
5249 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5250 temp = gen_lowpart (rmode, temp);
5251 temp = expand_binop (rmode, and_optab, temp,
5252 immed_double_const (lo, hi, rmode),
5253 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5254 }
5255 else
5256 {
5257 /* Perform a logical right shift to place the signbit in the least
5258 significant bit, then truncate the result to the desired mode
5259 and mask just this bit. */
5260 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5261 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5262 temp = gen_lowpart (rmode, temp);
5263 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5264 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5265 }
5266
5267 return temp;
5268 }
5269
5270 /* Expand fork or exec calls. TARGET is the desired target of the
5271 call. EXP is the call. FN is the
5272 identificator of the actual function. IGNORE is nonzero if the
5273 value is to be ignored. */
5274
5275 static rtx
5276 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5277 {
5278 tree id, decl;
5279 tree call;
5280
5281 /* If we are not profiling, just call the function. */
5282 if (!profile_arc_flag)
5283 return NULL_RTX;
5284
5285 /* Otherwise call the wrapper. This should be equivalent for the rest of
5286 compiler, so the code does not diverge, and the wrapper may run the
5287 code necessary for keeping the profiling sane. */
5288
5289 switch (DECL_FUNCTION_CODE (fn))
5290 {
5291 case BUILT_IN_FORK:
5292 id = get_identifier ("__gcov_fork");
5293 break;
5294
5295 case BUILT_IN_EXECL:
5296 id = get_identifier ("__gcov_execl");
5297 break;
5298
5299 case BUILT_IN_EXECV:
5300 id = get_identifier ("__gcov_execv");
5301 break;
5302
5303 case BUILT_IN_EXECLP:
5304 id = get_identifier ("__gcov_execlp");
5305 break;
5306
5307 case BUILT_IN_EXECLE:
5308 id = get_identifier ("__gcov_execle");
5309 break;
5310
5311 case BUILT_IN_EXECVP:
5312 id = get_identifier ("__gcov_execvp");
5313 break;
5314
5315 case BUILT_IN_EXECVE:
5316 id = get_identifier ("__gcov_execve");
5317 break;
5318
5319 default:
5320 gcc_unreachable ();
5321 }
5322
5323 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5324 FUNCTION_DECL, id, TREE_TYPE (fn));
5325 DECL_EXTERNAL (decl) = 1;
5326 TREE_PUBLIC (decl) = 1;
5327 DECL_ARTIFICIAL (decl) = 1;
5328 TREE_NOTHROW (decl) = 1;
5329 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5330 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5331 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5332 return expand_call (call, target, ignore);
5333 }
5334
5335
5336 \f
5337 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5338 the pointer in these functions is void*, the tree optimizers may remove
5339 casts. The mode computed in expand_builtin isn't reliable either, due
5340 to __sync_bool_compare_and_swap.
5341
5342 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5343 group of builtins. This gives us log2 of the mode size. */
5344
5345 static inline enum machine_mode
5346 get_builtin_sync_mode (int fcode_diff)
5347 {
5348 /* The size is not negotiable, so ask not to get BLKmode in return
5349 if the target indicates that a smaller size would be better. */
5350 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5351 }
5352
5353 /* Expand the memory expression LOC and return the appropriate memory operand
5354 for the builtin_sync operations. */
5355
5356 static rtx
5357 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5358 {
5359 rtx addr, mem;
5360
5361 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5362 addr = convert_memory_address (Pmode, addr);
5363
5364 /* Note that we explicitly do not want any alias information for this
5365 memory, so that we kill all other live memories. Otherwise we don't
5366 satisfy the full barrier semantics of the intrinsic. */
5367 mem = validize_mem (gen_rtx_MEM (mode, addr));
5368
5369 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5370 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5371 MEM_VOLATILE_P (mem) = 1;
5372
5373 return mem;
5374 }
5375
5376 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5377 EXP is the CALL_EXPR. CODE is the rtx code
5378 that corresponds to the arithmetic or logical operation from the name;
5379 an exception here is that NOT actually means NAND. TARGET is an optional
5380 place for us to store the results; AFTER is true if this is the
5381 fetch_and_xxx form. IGNORE is true if we don't actually care about
5382 the result of the operation at all. */
5383
5384 static rtx
5385 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5386 enum rtx_code code, bool after,
5387 rtx target, bool ignore)
5388 {
5389 rtx val, mem;
5390 enum machine_mode old_mode;
5391 location_t loc = EXPR_LOCATION (exp);
5392
5393 if (code == NOT && warn_sync_nand)
5394 {
5395 tree fndecl = get_callee_fndecl (exp);
5396 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5397
5398 static bool warned_f_a_n, warned_n_a_f;
5399
5400 switch (fcode)
5401 {
5402 case BUILT_IN_FETCH_AND_NAND_1:
5403 case BUILT_IN_FETCH_AND_NAND_2:
5404 case BUILT_IN_FETCH_AND_NAND_4:
5405 case BUILT_IN_FETCH_AND_NAND_8:
5406 case BUILT_IN_FETCH_AND_NAND_16:
5407
5408 if (warned_f_a_n)
5409 break;
5410
5411 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5412 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5413 warned_f_a_n = true;
5414 break;
5415
5416 case BUILT_IN_NAND_AND_FETCH_1:
5417 case BUILT_IN_NAND_AND_FETCH_2:
5418 case BUILT_IN_NAND_AND_FETCH_4:
5419 case BUILT_IN_NAND_AND_FETCH_8:
5420 case BUILT_IN_NAND_AND_FETCH_16:
5421
5422 if (warned_n_a_f)
5423 break;
5424
5425 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5426 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5427 warned_n_a_f = true;
5428 break;
5429
5430 default:
5431 gcc_unreachable ();
5432 }
5433 }
5434
5435 /* Expand the operands. */
5436 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5437
5438 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5439 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5440 of CONST_INTs, where we know the old_mode only from the call argument. */
5441 old_mode = GET_MODE (val);
5442 if (old_mode == VOIDmode)
5443 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5444 val = convert_modes (mode, old_mode, val, 1);
5445
5446 if (ignore)
5447 return expand_sync_operation (mem, val, code);
5448 else
5449 return expand_sync_fetch_operation (mem, val, code, after, target);
5450 }
5451
5452 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5453 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5454 true if this is the boolean form. TARGET is a place for us to store the
5455 results; this is NOT optional if IS_BOOL is true. */
5456
5457 static rtx
5458 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5459 bool is_bool, rtx target)
5460 {
5461 rtx old_val, new_val, mem;
5462 enum machine_mode old_mode;
5463
5464 /* Expand the operands. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5466
5467
5468 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5469 mode, EXPAND_NORMAL);
5470 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5471 of CONST_INTs, where we know the old_mode only from the call argument. */
5472 old_mode = GET_MODE (old_val);
5473 if (old_mode == VOIDmode)
5474 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5475 old_val = convert_modes (mode, old_mode, old_val, 1);
5476
5477 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5478 mode, EXPAND_NORMAL);
5479 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5480 of CONST_INTs, where we know the old_mode only from the call argument. */
5481 old_mode = GET_MODE (new_val);
5482 if (old_mode == VOIDmode)
5483 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5484 new_val = convert_modes (mode, old_mode, new_val, 1);
5485
5486 if (is_bool)
5487 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5488 else
5489 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5490 }
5491
5492 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5493 general form is actually an atomic exchange, and some targets only
5494 support a reduced form with the second argument being a constant 1.
5495 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5496 the results. */
5497
5498 static rtx
5499 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5500 rtx target)
5501 {
5502 rtx val, mem;
5503 enum machine_mode old_mode;
5504
5505 /* Expand the operands. */
5506 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5507 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5508 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5509 of CONST_INTs, where we know the old_mode only from the call argument. */
5510 old_mode = GET_MODE (val);
5511 if (old_mode == VOIDmode)
5512 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5513 val = convert_modes (mode, old_mode, val, 1);
5514
5515 return expand_sync_lock_test_and_set (mem, val, target);
5516 }
5517
5518 /* Expand the __sync_synchronize intrinsic. */
5519
5520 static void
5521 expand_builtin_synchronize (void)
5522 {
5523 gimple x;
5524 VEC (tree, gc) *v_clobbers;
5525
5526 #ifdef HAVE_memory_barrier
5527 if (HAVE_memory_barrier)
5528 {
5529 emit_insn (gen_memory_barrier ());
5530 return;
5531 }
5532 #endif
5533
5534 if (synchronize_libfunc != NULL_RTX)
5535 {
5536 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5537 return;
5538 }
5539
5540 /* If no explicit memory barrier instruction is available, create an
5541 empty asm stmt with a memory clobber. */
5542 v_clobbers = VEC_alloc (tree, gc, 1);
5543 VEC_quick_push (tree, v_clobbers,
5544 tree_cons (NULL, build_string (6, "memory"), NULL));
5545 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5546 gimple_asm_set_volatile (x, true);
5547 expand_asm_stmt (x);
5548 }
5549
5550 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5551
5552 static void
5553 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5554 {
5555 enum insn_code icode;
5556 rtx mem, insn;
5557 rtx val = const0_rtx;
5558
5559 /* Expand the operands. */
5560 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5561
5562 /* If there is an explicit operation in the md file, use it. */
5563 icode = sync_lock_release[mode];
5564 if (icode != CODE_FOR_nothing)
5565 {
5566 if (!insn_data[icode].operand[1].predicate (val, mode))
5567 val = force_reg (mode, val);
5568
5569 insn = GEN_FCN (icode) (mem, val);
5570 if (insn)
5571 {
5572 emit_insn (insn);
5573 return;
5574 }
5575 }
5576
5577 /* Otherwise we can implement this operation by emitting a barrier
5578 followed by a store of zero. */
5579 expand_builtin_synchronize ();
5580 emit_move_insn (mem, val);
5581 }
5582 \f
5583 /* Expand an expression EXP that calls a built-in function,
5584 with result going to TARGET if that's convenient
5585 (and in mode MODE if that's convenient).
5586 SUBTARGET may be used as the target for computing one of EXP's operands.
5587 IGNORE is nonzero if the value is to be ignored. */
5588
5589 rtx
5590 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5591 int ignore)
5592 {
5593 tree fndecl = get_callee_fndecl (exp);
5594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5595 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5596
5597 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5598 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5599
5600 /* When not optimizing, generate calls to library functions for a certain
5601 set of builtins. */
5602 if (!optimize
5603 && !called_as_built_in (fndecl)
5604 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5605 && fcode != BUILT_IN_ALLOCA
5606 && fcode != BUILT_IN_FREE)
5607 return expand_call (exp, target, ignore);
5608
5609 /* The built-in function expanders test for target == const0_rtx
5610 to determine whether the function's result will be ignored. */
5611 if (ignore)
5612 target = const0_rtx;
5613
5614 /* If the result of a pure or const built-in function is ignored, and
5615 none of its arguments are volatile, we can avoid expanding the
5616 built-in call and just evaluate the arguments for side-effects. */
5617 if (target == const0_rtx
5618 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5619 {
5620 bool volatilep = false;
5621 tree arg;
5622 call_expr_arg_iterator iter;
5623
5624 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5625 if (TREE_THIS_VOLATILE (arg))
5626 {
5627 volatilep = true;
5628 break;
5629 }
5630
5631 if (! volatilep)
5632 {
5633 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5634 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5635 return const0_rtx;
5636 }
5637 }
5638
5639 switch (fcode)
5640 {
5641 CASE_FLT_FN (BUILT_IN_FABS):
5642 target = expand_builtin_fabs (exp, target, subtarget);
5643 if (target)
5644 return target;
5645 break;
5646
5647 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5648 target = expand_builtin_copysign (exp, target, subtarget);
5649 if (target)
5650 return target;
5651 break;
5652
5653 /* Just do a normal library call if we were unable to fold
5654 the values. */
5655 CASE_FLT_FN (BUILT_IN_CABS):
5656 break;
5657
5658 CASE_FLT_FN (BUILT_IN_EXP):
5659 CASE_FLT_FN (BUILT_IN_EXP10):
5660 CASE_FLT_FN (BUILT_IN_POW10):
5661 CASE_FLT_FN (BUILT_IN_EXP2):
5662 CASE_FLT_FN (BUILT_IN_EXPM1):
5663 CASE_FLT_FN (BUILT_IN_LOGB):
5664 CASE_FLT_FN (BUILT_IN_LOG):
5665 CASE_FLT_FN (BUILT_IN_LOG10):
5666 CASE_FLT_FN (BUILT_IN_LOG2):
5667 CASE_FLT_FN (BUILT_IN_LOG1P):
5668 CASE_FLT_FN (BUILT_IN_TAN):
5669 CASE_FLT_FN (BUILT_IN_ASIN):
5670 CASE_FLT_FN (BUILT_IN_ACOS):
5671 CASE_FLT_FN (BUILT_IN_ATAN):
5672 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5673 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5674 because of possible accuracy problems. */
5675 if (! flag_unsafe_math_optimizations)
5676 break;
5677 CASE_FLT_FN (BUILT_IN_SQRT):
5678 CASE_FLT_FN (BUILT_IN_FLOOR):
5679 CASE_FLT_FN (BUILT_IN_CEIL):
5680 CASE_FLT_FN (BUILT_IN_TRUNC):
5681 CASE_FLT_FN (BUILT_IN_ROUND):
5682 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5683 CASE_FLT_FN (BUILT_IN_RINT):
5684 target = expand_builtin_mathfn (exp, target, subtarget);
5685 if (target)
5686 return target;
5687 break;
5688
5689 CASE_FLT_FN (BUILT_IN_ILOGB):
5690 if (! flag_unsafe_math_optimizations)
5691 break;
5692 CASE_FLT_FN (BUILT_IN_ISINF):
5693 CASE_FLT_FN (BUILT_IN_FINITE):
5694 case BUILT_IN_ISFINITE:
5695 case BUILT_IN_ISNORMAL:
5696 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5697 if (target)
5698 return target;
5699 break;
5700
5701 CASE_FLT_FN (BUILT_IN_LCEIL):
5702 CASE_FLT_FN (BUILT_IN_LLCEIL):
5703 CASE_FLT_FN (BUILT_IN_LFLOOR):
5704 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5705 target = expand_builtin_int_roundingfn (exp, target);
5706 if (target)
5707 return target;
5708 break;
5709
5710 CASE_FLT_FN (BUILT_IN_LRINT):
5711 CASE_FLT_FN (BUILT_IN_LLRINT):
5712 CASE_FLT_FN (BUILT_IN_LROUND):
5713 CASE_FLT_FN (BUILT_IN_LLROUND):
5714 target = expand_builtin_int_roundingfn_2 (exp, target);
5715 if (target)
5716 return target;
5717 break;
5718
5719 CASE_FLT_FN (BUILT_IN_POW):
5720 target = expand_builtin_pow (exp, target, subtarget);
5721 if (target)
5722 return target;
5723 break;
5724
5725 CASE_FLT_FN (BUILT_IN_POWI):
5726 target = expand_builtin_powi (exp, target, subtarget);
5727 if (target)
5728 return target;
5729 break;
5730
5731 CASE_FLT_FN (BUILT_IN_ATAN2):
5732 CASE_FLT_FN (BUILT_IN_LDEXP):
5733 CASE_FLT_FN (BUILT_IN_SCALB):
5734 CASE_FLT_FN (BUILT_IN_SCALBN):
5735 CASE_FLT_FN (BUILT_IN_SCALBLN):
5736 if (! flag_unsafe_math_optimizations)
5737 break;
5738
5739 CASE_FLT_FN (BUILT_IN_FMOD):
5740 CASE_FLT_FN (BUILT_IN_REMAINDER):
5741 CASE_FLT_FN (BUILT_IN_DREM):
5742 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5743 if (target)
5744 return target;
5745 break;
5746
5747 CASE_FLT_FN (BUILT_IN_CEXPI):
5748 target = expand_builtin_cexpi (exp, target, subtarget);
5749 gcc_assert (target);
5750 return target;
5751
5752 CASE_FLT_FN (BUILT_IN_SIN):
5753 CASE_FLT_FN (BUILT_IN_COS):
5754 if (! flag_unsafe_math_optimizations)
5755 break;
5756 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5757 if (target)
5758 return target;
5759 break;
5760
5761 CASE_FLT_FN (BUILT_IN_SINCOS):
5762 if (! flag_unsafe_math_optimizations)
5763 break;
5764 target = expand_builtin_sincos (exp);
5765 if (target)
5766 return target;
5767 break;
5768
5769 case BUILT_IN_APPLY_ARGS:
5770 return expand_builtin_apply_args ();
5771
5772 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5773 FUNCTION with a copy of the parameters described by
5774 ARGUMENTS, and ARGSIZE. It returns a block of memory
5775 allocated on the stack into which is stored all the registers
5776 that might possibly be used for returning the result of a
5777 function. ARGUMENTS is the value returned by
5778 __builtin_apply_args. ARGSIZE is the number of bytes of
5779 arguments that must be copied. ??? How should this value be
5780 computed? We'll also need a safe worst case value for varargs
5781 functions. */
5782 case BUILT_IN_APPLY:
5783 if (!validate_arglist (exp, POINTER_TYPE,
5784 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5785 && !validate_arglist (exp, REFERENCE_TYPE,
5786 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5787 return const0_rtx;
5788 else
5789 {
5790 rtx ops[3];
5791
5792 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5793 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5794 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5795
5796 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5797 }
5798
5799 /* __builtin_return (RESULT) causes the function to return the
5800 value described by RESULT. RESULT is address of the block of
5801 memory returned by __builtin_apply. */
5802 case BUILT_IN_RETURN:
5803 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5804 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5805 return const0_rtx;
5806
5807 case BUILT_IN_SAVEREGS:
5808 return expand_builtin_saveregs ();
5809
5810 case BUILT_IN_ARGS_INFO:
5811 return expand_builtin_args_info (exp);
5812
5813 case BUILT_IN_VA_ARG_PACK:
5814 /* All valid uses of __builtin_va_arg_pack () are removed during
5815 inlining. */
5816 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5817 return const0_rtx;
5818
5819 case BUILT_IN_VA_ARG_PACK_LEN:
5820 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5821 inlining. */
5822 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5823 return const0_rtx;
5824
5825 /* Return the address of the first anonymous stack arg. */
5826 case BUILT_IN_NEXT_ARG:
5827 if (fold_builtin_next_arg (exp, false))
5828 return const0_rtx;
5829 return expand_builtin_next_arg ();
5830
5831 case BUILT_IN_CLEAR_CACHE:
5832 target = expand_builtin___clear_cache (exp);
5833 if (target)
5834 return target;
5835 break;
5836
5837 case BUILT_IN_CLASSIFY_TYPE:
5838 return expand_builtin_classify_type (exp);
5839
5840 case BUILT_IN_CONSTANT_P:
5841 return const0_rtx;
5842
5843 case BUILT_IN_FRAME_ADDRESS:
5844 case BUILT_IN_RETURN_ADDRESS:
5845 return expand_builtin_frame_address (fndecl, exp);
5846
5847 /* Returns the address of the area where the structure is returned.
5848 0 otherwise. */
5849 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5850 if (call_expr_nargs (exp) != 0
5851 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5852 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5853 return const0_rtx;
5854 else
5855 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5856
5857 case BUILT_IN_ALLOCA:
5858 target = expand_builtin_alloca (exp, target);
5859 if (target)
5860 return target;
5861 break;
5862
5863 case BUILT_IN_STACK_SAVE:
5864 return expand_stack_save ();
5865
5866 case BUILT_IN_STACK_RESTORE:
5867 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5868 return const0_rtx;
5869
5870 case BUILT_IN_BSWAP32:
5871 case BUILT_IN_BSWAP64:
5872 target = expand_builtin_bswap (exp, target, subtarget);
5873
5874 if (target)
5875 return target;
5876 break;
5877
5878 CASE_INT_FN (BUILT_IN_FFS):
5879 case BUILT_IN_FFSIMAX:
5880 target = expand_builtin_unop (target_mode, exp, target,
5881 subtarget, ffs_optab);
5882 if (target)
5883 return target;
5884 break;
5885
5886 CASE_INT_FN (BUILT_IN_CLZ):
5887 case BUILT_IN_CLZIMAX:
5888 target = expand_builtin_unop (target_mode, exp, target,
5889 subtarget, clz_optab);
5890 if (target)
5891 return target;
5892 break;
5893
5894 CASE_INT_FN (BUILT_IN_CTZ):
5895 case BUILT_IN_CTZIMAX:
5896 target = expand_builtin_unop (target_mode, exp, target,
5897 subtarget, ctz_optab);
5898 if (target)
5899 return target;
5900 break;
5901
5902 CASE_INT_FN (BUILT_IN_POPCOUNT):
5903 case BUILT_IN_POPCOUNTIMAX:
5904 target = expand_builtin_unop (target_mode, exp, target,
5905 subtarget, popcount_optab);
5906 if (target)
5907 return target;
5908 break;
5909
5910 CASE_INT_FN (BUILT_IN_PARITY):
5911 case BUILT_IN_PARITYIMAX:
5912 target = expand_builtin_unop (target_mode, exp, target,
5913 subtarget, parity_optab);
5914 if (target)
5915 return target;
5916 break;
5917
5918 case BUILT_IN_STRLEN:
5919 target = expand_builtin_strlen (exp, target, target_mode);
5920 if (target)
5921 return target;
5922 break;
5923
5924 case BUILT_IN_STRCPY:
5925 target = expand_builtin_strcpy (exp, target);
5926 if (target)
5927 return target;
5928 break;
5929
5930 case BUILT_IN_STRNCPY:
5931 target = expand_builtin_strncpy (exp, target);
5932 if (target)
5933 return target;
5934 break;
5935
5936 case BUILT_IN_STPCPY:
5937 target = expand_builtin_stpcpy (exp, target, mode);
5938 if (target)
5939 return target;
5940 break;
5941
5942 case BUILT_IN_MEMCPY:
5943 target = expand_builtin_memcpy (exp, target);
5944 if (target)
5945 return target;
5946 break;
5947
5948 case BUILT_IN_MEMPCPY:
5949 target = expand_builtin_mempcpy (exp, target, mode);
5950 if (target)
5951 return target;
5952 break;
5953
5954 case BUILT_IN_MEMSET:
5955 target = expand_builtin_memset (exp, target, mode);
5956 if (target)
5957 return target;
5958 break;
5959
5960 case BUILT_IN_BZERO:
5961 target = expand_builtin_bzero (exp);
5962 if (target)
5963 return target;
5964 break;
5965
5966 case BUILT_IN_STRCMP:
5967 target = expand_builtin_strcmp (exp, target);
5968 if (target)
5969 return target;
5970 break;
5971
5972 case BUILT_IN_STRNCMP:
5973 target = expand_builtin_strncmp (exp, target, mode);
5974 if (target)
5975 return target;
5976 break;
5977
5978 case BUILT_IN_BCMP:
5979 case BUILT_IN_MEMCMP:
5980 target = expand_builtin_memcmp (exp, target, mode);
5981 if (target)
5982 return target;
5983 break;
5984
5985 case BUILT_IN_SETJMP:
5986 /* This should have been lowered to the builtins below. */
5987 gcc_unreachable ();
5988
5989 case BUILT_IN_SETJMP_SETUP:
5990 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5991 and the receiver label. */
5992 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5993 {
5994 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5995 VOIDmode, EXPAND_NORMAL);
5996 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5997 rtx label_r = label_rtx (label);
5998
5999 /* This is copied from the handling of non-local gotos. */
6000 expand_builtin_setjmp_setup (buf_addr, label_r);
6001 nonlocal_goto_handler_labels
6002 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6003 nonlocal_goto_handler_labels);
6004 /* ??? Do not let expand_label treat us as such since we would
6005 not want to be both on the list of non-local labels and on
6006 the list of forced labels. */
6007 FORCED_LABEL (label) = 0;
6008 return const0_rtx;
6009 }
6010 break;
6011
6012 case BUILT_IN_SETJMP_DISPATCHER:
6013 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6014 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6015 {
6016 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6017 rtx label_r = label_rtx (label);
6018
6019 /* Remove the dispatcher label from the list of non-local labels
6020 since the receiver labels have been added to it above. */
6021 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6022 return const0_rtx;
6023 }
6024 break;
6025
6026 case BUILT_IN_SETJMP_RECEIVER:
6027 /* __builtin_setjmp_receiver is passed the receiver label. */
6028 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6029 {
6030 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6031 rtx label_r = label_rtx (label);
6032
6033 expand_builtin_setjmp_receiver (label_r);
6034 return const0_rtx;
6035 }
6036 break;
6037
6038 /* __builtin_longjmp is passed a pointer to an array of five words.
6039 It's similar to the C library longjmp function but works with
6040 __builtin_setjmp above. */
6041 case BUILT_IN_LONGJMP:
6042 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6043 {
6044 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6045 VOIDmode, EXPAND_NORMAL);
6046 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6047
6048 if (value != const1_rtx)
6049 {
6050 error ("%<__builtin_longjmp%> second argument must be 1");
6051 return const0_rtx;
6052 }
6053
6054 expand_builtin_longjmp (buf_addr, value);
6055 return const0_rtx;
6056 }
6057 break;
6058
6059 case BUILT_IN_NONLOCAL_GOTO:
6060 target = expand_builtin_nonlocal_goto (exp);
6061 if (target)
6062 return target;
6063 break;
6064
6065 /* This updates the setjmp buffer that is its argument with the value
6066 of the current stack pointer. */
6067 case BUILT_IN_UPDATE_SETJMP_BUF:
6068 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6069 {
6070 rtx buf_addr
6071 = expand_normal (CALL_EXPR_ARG (exp, 0));
6072
6073 expand_builtin_update_setjmp_buf (buf_addr);
6074 return const0_rtx;
6075 }
6076 break;
6077
6078 case BUILT_IN_TRAP:
6079 expand_builtin_trap ();
6080 return const0_rtx;
6081
6082 case BUILT_IN_UNREACHABLE:
6083 expand_builtin_unreachable ();
6084 return const0_rtx;
6085
6086 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6087 case BUILT_IN_SIGNBITD32:
6088 case BUILT_IN_SIGNBITD64:
6089 case BUILT_IN_SIGNBITD128:
6090 target = expand_builtin_signbit (exp, target);
6091 if (target)
6092 return target;
6093 break;
6094
6095 /* Various hooks for the DWARF 2 __throw routine. */
6096 case BUILT_IN_UNWIND_INIT:
6097 expand_builtin_unwind_init ();
6098 return const0_rtx;
6099 case BUILT_IN_DWARF_CFA:
6100 return virtual_cfa_rtx;
6101 #ifdef DWARF2_UNWIND_INFO
6102 case BUILT_IN_DWARF_SP_COLUMN:
6103 return expand_builtin_dwarf_sp_column ();
6104 case BUILT_IN_INIT_DWARF_REG_SIZES:
6105 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6106 return const0_rtx;
6107 #endif
6108 case BUILT_IN_FROB_RETURN_ADDR:
6109 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6110 case BUILT_IN_EXTRACT_RETURN_ADDR:
6111 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6112 case BUILT_IN_EH_RETURN:
6113 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6114 CALL_EXPR_ARG (exp, 1));
6115 return const0_rtx;
6116 #ifdef EH_RETURN_DATA_REGNO
6117 case BUILT_IN_EH_RETURN_DATA_REGNO:
6118 return expand_builtin_eh_return_data_regno (exp);
6119 #endif
6120 case BUILT_IN_EXTEND_POINTER:
6121 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6122 case BUILT_IN_EH_POINTER:
6123 return expand_builtin_eh_pointer (exp);
6124 case BUILT_IN_EH_FILTER:
6125 return expand_builtin_eh_filter (exp);
6126 case BUILT_IN_EH_COPY_VALUES:
6127 return expand_builtin_eh_copy_values (exp);
6128
6129 case BUILT_IN_VA_START:
6130 return expand_builtin_va_start (exp);
6131 case BUILT_IN_VA_END:
6132 return expand_builtin_va_end (exp);
6133 case BUILT_IN_VA_COPY:
6134 return expand_builtin_va_copy (exp);
6135 case BUILT_IN_EXPECT:
6136 return expand_builtin_expect (exp, target);
6137 case BUILT_IN_PREFETCH:
6138 expand_builtin_prefetch (exp);
6139 return const0_rtx;
6140
6141 case BUILT_IN_PROFILE_FUNC_ENTER:
6142 return expand_builtin_profile_func (false);
6143 case BUILT_IN_PROFILE_FUNC_EXIT:
6144 return expand_builtin_profile_func (true);
6145
6146 case BUILT_IN_INIT_TRAMPOLINE:
6147 return expand_builtin_init_trampoline (exp);
6148 case BUILT_IN_ADJUST_TRAMPOLINE:
6149 return expand_builtin_adjust_trampoline (exp);
6150
6151 case BUILT_IN_FORK:
6152 case BUILT_IN_EXECL:
6153 case BUILT_IN_EXECV:
6154 case BUILT_IN_EXECLP:
6155 case BUILT_IN_EXECLE:
6156 case BUILT_IN_EXECVP:
6157 case BUILT_IN_EXECVE:
6158 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6159 if (target)
6160 return target;
6161 break;
6162
6163 case BUILT_IN_FETCH_AND_ADD_1:
6164 case BUILT_IN_FETCH_AND_ADD_2:
6165 case BUILT_IN_FETCH_AND_ADD_4:
6166 case BUILT_IN_FETCH_AND_ADD_8:
6167 case BUILT_IN_FETCH_AND_ADD_16:
6168 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6169 target = expand_builtin_sync_operation (mode, exp, PLUS,
6170 false, target, ignore);
6171 if (target)
6172 return target;
6173 break;
6174
6175 case BUILT_IN_FETCH_AND_SUB_1:
6176 case BUILT_IN_FETCH_AND_SUB_2:
6177 case BUILT_IN_FETCH_AND_SUB_4:
6178 case BUILT_IN_FETCH_AND_SUB_8:
6179 case BUILT_IN_FETCH_AND_SUB_16:
6180 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6181 target = expand_builtin_sync_operation (mode, exp, MINUS,
6182 false, target, ignore);
6183 if (target)
6184 return target;
6185 break;
6186
6187 case BUILT_IN_FETCH_AND_OR_1:
6188 case BUILT_IN_FETCH_AND_OR_2:
6189 case BUILT_IN_FETCH_AND_OR_4:
6190 case BUILT_IN_FETCH_AND_OR_8:
6191 case BUILT_IN_FETCH_AND_OR_16:
6192 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6193 target = expand_builtin_sync_operation (mode, exp, IOR,
6194 false, target, ignore);
6195 if (target)
6196 return target;
6197 break;
6198
6199 case BUILT_IN_FETCH_AND_AND_1:
6200 case BUILT_IN_FETCH_AND_AND_2:
6201 case BUILT_IN_FETCH_AND_AND_4:
6202 case BUILT_IN_FETCH_AND_AND_8:
6203 case BUILT_IN_FETCH_AND_AND_16:
6204 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6205 target = expand_builtin_sync_operation (mode, exp, AND,
6206 false, target, ignore);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_FETCH_AND_XOR_1:
6212 case BUILT_IN_FETCH_AND_XOR_2:
6213 case BUILT_IN_FETCH_AND_XOR_4:
6214 case BUILT_IN_FETCH_AND_XOR_8:
6215 case BUILT_IN_FETCH_AND_XOR_16:
6216 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6217 target = expand_builtin_sync_operation (mode, exp, XOR,
6218 false, target, ignore);
6219 if (target)
6220 return target;
6221 break;
6222
6223 case BUILT_IN_FETCH_AND_NAND_1:
6224 case BUILT_IN_FETCH_AND_NAND_2:
6225 case BUILT_IN_FETCH_AND_NAND_4:
6226 case BUILT_IN_FETCH_AND_NAND_8:
6227 case BUILT_IN_FETCH_AND_NAND_16:
6228 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6229 target = expand_builtin_sync_operation (mode, exp, NOT,
6230 false, target, ignore);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_ADD_AND_FETCH_1:
6236 case BUILT_IN_ADD_AND_FETCH_2:
6237 case BUILT_IN_ADD_AND_FETCH_4:
6238 case BUILT_IN_ADD_AND_FETCH_8:
6239 case BUILT_IN_ADD_AND_FETCH_16:
6240 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6241 target = expand_builtin_sync_operation (mode, exp, PLUS,
6242 true, target, ignore);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_SUB_AND_FETCH_1:
6248 case BUILT_IN_SUB_AND_FETCH_2:
6249 case BUILT_IN_SUB_AND_FETCH_4:
6250 case BUILT_IN_SUB_AND_FETCH_8:
6251 case BUILT_IN_SUB_AND_FETCH_16:
6252 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6253 target = expand_builtin_sync_operation (mode, exp, MINUS,
6254 true, target, ignore);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_OR_AND_FETCH_1:
6260 case BUILT_IN_OR_AND_FETCH_2:
6261 case BUILT_IN_OR_AND_FETCH_4:
6262 case BUILT_IN_OR_AND_FETCH_8:
6263 case BUILT_IN_OR_AND_FETCH_16:
6264 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6265 target = expand_builtin_sync_operation (mode, exp, IOR,
6266 true, target, ignore);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_AND_AND_FETCH_1:
6272 case BUILT_IN_AND_AND_FETCH_2:
6273 case BUILT_IN_AND_AND_FETCH_4:
6274 case BUILT_IN_AND_AND_FETCH_8:
6275 case BUILT_IN_AND_AND_FETCH_16:
6276 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6277 target = expand_builtin_sync_operation (mode, exp, AND,
6278 true, target, ignore);
6279 if (target)
6280 return target;
6281 break;
6282
6283 case BUILT_IN_XOR_AND_FETCH_1:
6284 case BUILT_IN_XOR_AND_FETCH_2:
6285 case BUILT_IN_XOR_AND_FETCH_4:
6286 case BUILT_IN_XOR_AND_FETCH_8:
6287 case BUILT_IN_XOR_AND_FETCH_16:
6288 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6289 target = expand_builtin_sync_operation (mode, exp, XOR,
6290 true, target, ignore);
6291 if (target)
6292 return target;
6293 break;
6294
6295 case BUILT_IN_NAND_AND_FETCH_1:
6296 case BUILT_IN_NAND_AND_FETCH_2:
6297 case BUILT_IN_NAND_AND_FETCH_4:
6298 case BUILT_IN_NAND_AND_FETCH_8:
6299 case BUILT_IN_NAND_AND_FETCH_16:
6300 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6301 target = expand_builtin_sync_operation (mode, exp, NOT,
6302 true, target, ignore);
6303 if (target)
6304 return target;
6305 break;
6306
6307 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6308 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6309 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6310 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6311 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6312 if (mode == VOIDmode)
6313 mode = TYPE_MODE (boolean_type_node);
6314 if (!target || !register_operand (target, mode))
6315 target = gen_reg_rtx (mode);
6316
6317 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6318 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6319 if (target)
6320 return target;
6321 break;
6322
6323 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6324 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6325 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6326 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6327 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6329 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6330 if (target)
6331 return target;
6332 break;
6333
6334 case BUILT_IN_LOCK_TEST_AND_SET_1:
6335 case BUILT_IN_LOCK_TEST_AND_SET_2:
6336 case BUILT_IN_LOCK_TEST_AND_SET_4:
6337 case BUILT_IN_LOCK_TEST_AND_SET_8:
6338 case BUILT_IN_LOCK_TEST_AND_SET_16:
6339 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6340 target = expand_builtin_lock_test_and_set (mode, exp, target);
6341 if (target)
6342 return target;
6343 break;
6344
6345 case BUILT_IN_LOCK_RELEASE_1:
6346 case BUILT_IN_LOCK_RELEASE_2:
6347 case BUILT_IN_LOCK_RELEASE_4:
6348 case BUILT_IN_LOCK_RELEASE_8:
6349 case BUILT_IN_LOCK_RELEASE_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6351 expand_builtin_lock_release (mode, exp);
6352 return const0_rtx;
6353
6354 case BUILT_IN_SYNCHRONIZE:
6355 expand_builtin_synchronize ();
6356 return const0_rtx;
6357
6358 case BUILT_IN_OBJECT_SIZE:
6359 return expand_builtin_object_size (exp);
6360
6361 case BUILT_IN_MEMCPY_CHK:
6362 case BUILT_IN_MEMPCPY_CHK:
6363 case BUILT_IN_MEMMOVE_CHK:
6364 case BUILT_IN_MEMSET_CHK:
6365 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6366 if (target)
6367 return target;
6368 break;
6369
6370 case BUILT_IN_STRCPY_CHK:
6371 case BUILT_IN_STPCPY_CHK:
6372 case BUILT_IN_STRNCPY_CHK:
6373 case BUILT_IN_STRCAT_CHK:
6374 case BUILT_IN_STRNCAT_CHK:
6375 case BUILT_IN_SNPRINTF_CHK:
6376 case BUILT_IN_VSNPRINTF_CHK:
6377 maybe_emit_chk_warning (exp, fcode);
6378 break;
6379
6380 case BUILT_IN_SPRINTF_CHK:
6381 case BUILT_IN_VSPRINTF_CHK:
6382 maybe_emit_sprintf_chk_warning (exp, fcode);
6383 break;
6384
6385 case BUILT_IN_FREE:
6386 maybe_emit_free_warning (exp);
6387 break;
6388
6389 default: /* just do library call, if unknown builtin */
6390 break;
6391 }
6392
6393 /* The switch statement above can drop through to cause the function
6394 to be called normally. */
6395 return expand_call (exp, target, ignore);
6396 }
6397
6398 /* Determine whether a tree node represents a call to a built-in
6399 function. If the tree T is a call to a built-in function with
6400 the right number of arguments of the appropriate types, return
6401 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6402 Otherwise the return value is END_BUILTINS. */
6403
6404 enum built_in_function
6405 builtin_mathfn_code (const_tree t)
6406 {
6407 const_tree fndecl, arg, parmlist;
6408 const_tree argtype, parmtype;
6409 const_call_expr_arg_iterator iter;
6410
6411 if (TREE_CODE (t) != CALL_EXPR
6412 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6413 return END_BUILTINS;
6414
6415 fndecl = get_callee_fndecl (t);
6416 if (fndecl == NULL_TREE
6417 || TREE_CODE (fndecl) != FUNCTION_DECL
6418 || ! DECL_BUILT_IN (fndecl)
6419 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6420 return END_BUILTINS;
6421
6422 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6423 init_const_call_expr_arg_iterator (t, &iter);
6424 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6425 {
6426 /* If a function doesn't take a variable number of arguments,
6427 the last element in the list will have type `void'. */
6428 parmtype = TREE_VALUE (parmlist);
6429 if (VOID_TYPE_P (parmtype))
6430 {
6431 if (more_const_call_expr_args_p (&iter))
6432 return END_BUILTINS;
6433 return DECL_FUNCTION_CODE (fndecl);
6434 }
6435
6436 if (! more_const_call_expr_args_p (&iter))
6437 return END_BUILTINS;
6438
6439 arg = next_const_call_expr_arg (&iter);
6440 argtype = TREE_TYPE (arg);
6441
6442 if (SCALAR_FLOAT_TYPE_P (parmtype))
6443 {
6444 if (! SCALAR_FLOAT_TYPE_P (argtype))
6445 return END_BUILTINS;
6446 }
6447 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6448 {
6449 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6450 return END_BUILTINS;
6451 }
6452 else if (POINTER_TYPE_P (parmtype))
6453 {
6454 if (! POINTER_TYPE_P (argtype))
6455 return END_BUILTINS;
6456 }
6457 else if (INTEGRAL_TYPE_P (parmtype))
6458 {
6459 if (! INTEGRAL_TYPE_P (argtype))
6460 return END_BUILTINS;
6461 }
6462 else
6463 return END_BUILTINS;
6464 }
6465
6466 /* Variable-length argument list. */
6467 return DECL_FUNCTION_CODE (fndecl);
6468 }
6469
6470 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6471 evaluate to a constant. */
6472
6473 static tree
6474 fold_builtin_constant_p (tree arg)
6475 {
6476 /* We return 1 for a numeric type that's known to be a constant
6477 value at compile-time or for an aggregate type that's a
6478 literal constant. */
6479 STRIP_NOPS (arg);
6480
6481 /* If we know this is a constant, emit the constant of one. */
6482 if (CONSTANT_CLASS_P (arg)
6483 || (TREE_CODE (arg) == CONSTRUCTOR
6484 && TREE_CONSTANT (arg)))
6485 return integer_one_node;
6486 if (TREE_CODE (arg) == ADDR_EXPR)
6487 {
6488 tree op = TREE_OPERAND (arg, 0);
6489 if (TREE_CODE (op) == STRING_CST
6490 || (TREE_CODE (op) == ARRAY_REF
6491 && integer_zerop (TREE_OPERAND (op, 1))
6492 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6493 return integer_one_node;
6494 }
6495
6496 /* If this expression has side effects, show we don't know it to be a
6497 constant. Likewise if it's a pointer or aggregate type since in
6498 those case we only want literals, since those are only optimized
6499 when generating RTL, not later.
6500 And finally, if we are compiling an initializer, not code, we
6501 need to return a definite result now; there's not going to be any
6502 more optimization done. */
6503 if (TREE_SIDE_EFFECTS (arg)
6504 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6505 || POINTER_TYPE_P (TREE_TYPE (arg))
6506 || cfun == 0
6507 || folding_initializer)
6508 return integer_zero_node;
6509
6510 return NULL_TREE;
6511 }
6512
6513 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6514 return it as a truthvalue. */
6515
6516 static tree
6517 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6518 {
6519 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6520
6521 fn = built_in_decls[BUILT_IN_EXPECT];
6522 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6523 ret_type = TREE_TYPE (TREE_TYPE (fn));
6524 pred_type = TREE_VALUE (arg_types);
6525 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6526
6527 pred = fold_convert_loc (loc, pred_type, pred);
6528 expected = fold_convert_loc (loc, expected_type, expected);
6529 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6530
6531 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6532 build_int_cst (ret_type, 0));
6533 }
6534
6535 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6536 NULL_TREE if no simplification is possible. */
6537
6538 static tree
6539 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6540 {
6541 tree inner, fndecl;
6542 enum tree_code code;
6543
6544 /* If this is a builtin_expect within a builtin_expect keep the
6545 inner one. See through a comparison against a constant. It
6546 might have been added to create a thruthvalue. */
6547 inner = arg0;
6548 if (COMPARISON_CLASS_P (inner)
6549 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6550 inner = TREE_OPERAND (inner, 0);
6551
6552 if (TREE_CODE (inner) == CALL_EXPR
6553 && (fndecl = get_callee_fndecl (inner))
6554 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6555 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6556 return arg0;
6557
6558 /* Distribute the expected value over short-circuiting operators.
6559 See through the cast from truthvalue_type_node to long. */
6560 inner = arg0;
6561 while (TREE_CODE (inner) == NOP_EXPR
6562 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6563 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6564 inner = TREE_OPERAND (inner, 0);
6565
6566 code = TREE_CODE (inner);
6567 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6568 {
6569 tree op0 = TREE_OPERAND (inner, 0);
6570 tree op1 = TREE_OPERAND (inner, 1);
6571
6572 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6573 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6574 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6575
6576 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6577 }
6578
6579 /* If the argument isn't invariant then there's nothing else we can do. */
6580 if (!TREE_CONSTANT (arg0))
6581 return NULL_TREE;
6582
6583 /* If we expect that a comparison against the argument will fold to
6584 a constant return the constant. In practice, this means a true
6585 constant or the address of a non-weak symbol. */
6586 inner = arg0;
6587 STRIP_NOPS (inner);
6588 if (TREE_CODE (inner) == ADDR_EXPR)
6589 {
6590 do
6591 {
6592 inner = TREE_OPERAND (inner, 0);
6593 }
6594 while (TREE_CODE (inner) == COMPONENT_REF
6595 || TREE_CODE (inner) == ARRAY_REF);
6596 if ((TREE_CODE (inner) == VAR_DECL
6597 || TREE_CODE (inner) == FUNCTION_DECL)
6598 && DECL_WEAK (inner))
6599 return NULL_TREE;
6600 }
6601
6602 /* Otherwise, ARG0 already has the proper type for the return value. */
6603 return arg0;
6604 }
6605
6606 /* Fold a call to __builtin_classify_type with argument ARG. */
6607
6608 static tree
6609 fold_builtin_classify_type (tree arg)
6610 {
6611 if (arg == 0)
6612 return build_int_cst (NULL_TREE, no_type_class);
6613
6614 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6615 }
6616
6617 /* Fold a call to __builtin_strlen with argument ARG. */
6618
6619 static tree
6620 fold_builtin_strlen (location_t loc, tree arg)
6621 {
6622 if (!validate_arg (arg, POINTER_TYPE))
6623 return NULL_TREE;
6624 else
6625 {
6626 tree len = c_strlen (arg, 0);
6627
6628 if (len)
6629 {
6630 /* Convert from the internal "sizetype" type to "size_t". */
6631 if (size_type_node)
6632 len = fold_convert_loc (loc, size_type_node, len);
6633 return len;
6634 }
6635
6636 return NULL_TREE;
6637 }
6638 }
6639
6640 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6641
6642 static tree
6643 fold_builtin_inf (location_t loc, tree type, int warn)
6644 {
6645 REAL_VALUE_TYPE real;
6646
6647 /* __builtin_inff is intended to be usable to define INFINITY on all
6648 targets. If an infinity is not available, INFINITY expands "to a
6649 positive constant of type float that overflows at translation
6650 time", footnote "In this case, using INFINITY will violate the
6651 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6652 Thus we pedwarn to ensure this constraint violation is
6653 diagnosed. */
6654 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6655 pedwarn (loc, 0, "target format does not support infinity");
6656
6657 real_inf (&real);
6658 return build_real (type, real);
6659 }
6660
6661 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6662
6663 static tree
6664 fold_builtin_nan (tree arg, tree type, int quiet)
6665 {
6666 REAL_VALUE_TYPE real;
6667 const char *str;
6668
6669 if (!validate_arg (arg, POINTER_TYPE))
6670 return NULL_TREE;
6671 str = c_getstr (arg);
6672 if (!str)
6673 return NULL_TREE;
6674
6675 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6676 return NULL_TREE;
6677
6678 return build_real (type, real);
6679 }
6680
6681 /* Return true if the floating point expression T has an integer value.
6682 We also allow +Inf, -Inf and NaN to be considered integer values. */
6683
6684 static bool
6685 integer_valued_real_p (tree t)
6686 {
6687 switch (TREE_CODE (t))
6688 {
6689 case FLOAT_EXPR:
6690 return true;
6691
6692 case ABS_EXPR:
6693 case SAVE_EXPR:
6694 return integer_valued_real_p (TREE_OPERAND (t, 0));
6695
6696 case COMPOUND_EXPR:
6697 case MODIFY_EXPR:
6698 case BIND_EXPR:
6699 return integer_valued_real_p (TREE_OPERAND (t, 1));
6700
6701 case PLUS_EXPR:
6702 case MINUS_EXPR:
6703 case MULT_EXPR:
6704 case MIN_EXPR:
6705 case MAX_EXPR:
6706 return integer_valued_real_p (TREE_OPERAND (t, 0))
6707 && integer_valued_real_p (TREE_OPERAND (t, 1));
6708
6709 case COND_EXPR:
6710 return integer_valued_real_p (TREE_OPERAND (t, 1))
6711 && integer_valued_real_p (TREE_OPERAND (t, 2));
6712
6713 case REAL_CST:
6714 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6715
6716 case NOP_EXPR:
6717 {
6718 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6719 if (TREE_CODE (type) == INTEGER_TYPE)
6720 return true;
6721 if (TREE_CODE (type) == REAL_TYPE)
6722 return integer_valued_real_p (TREE_OPERAND (t, 0));
6723 break;
6724 }
6725
6726 case CALL_EXPR:
6727 switch (builtin_mathfn_code (t))
6728 {
6729 CASE_FLT_FN (BUILT_IN_CEIL):
6730 CASE_FLT_FN (BUILT_IN_FLOOR):
6731 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6732 CASE_FLT_FN (BUILT_IN_RINT):
6733 CASE_FLT_FN (BUILT_IN_ROUND):
6734 CASE_FLT_FN (BUILT_IN_TRUNC):
6735 return true;
6736
6737 CASE_FLT_FN (BUILT_IN_FMIN):
6738 CASE_FLT_FN (BUILT_IN_FMAX):
6739 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6740 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6741
6742 default:
6743 break;
6744 }
6745 break;
6746
6747 default:
6748 break;
6749 }
6750 return false;
6751 }
6752
6753 /* FNDECL is assumed to be a builtin where truncation can be propagated
6754 across (for instance floor((double)f) == (double)floorf (f).
6755 Do the transformation for a call with argument ARG. */
6756
6757 static tree
6758 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6759 {
6760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6761
6762 if (!validate_arg (arg, REAL_TYPE))
6763 return NULL_TREE;
6764
6765 /* Integer rounding functions are idempotent. */
6766 if (fcode == builtin_mathfn_code (arg))
6767 return arg;
6768
6769 /* If argument is already integer valued, and we don't need to worry
6770 about setting errno, there's no need to perform rounding. */
6771 if (! flag_errno_math && integer_valued_real_p (arg))
6772 return arg;
6773
6774 if (optimize)
6775 {
6776 tree arg0 = strip_float_extensions (arg);
6777 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6778 tree newtype = TREE_TYPE (arg0);
6779 tree decl;
6780
6781 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6782 && (decl = mathfn_built_in (newtype, fcode)))
6783 return fold_convert_loc (loc, ftype,
6784 build_call_expr_loc (loc, decl, 1,
6785 fold_convert_loc (loc,
6786 newtype,
6787 arg0)));
6788 }
6789 return NULL_TREE;
6790 }
6791
6792 /* FNDECL is assumed to be builtin which can narrow the FP type of
6793 the argument, for instance lround((double)f) -> lroundf (f).
6794 Do the transformation for a call with argument ARG. */
6795
6796 static tree
6797 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6798 {
6799 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6800
6801 if (!validate_arg (arg, REAL_TYPE))
6802 return NULL_TREE;
6803
6804 /* If argument is already integer valued, and we don't need to worry
6805 about setting errno, there's no need to perform rounding. */
6806 if (! flag_errno_math && integer_valued_real_p (arg))
6807 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6808 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6809
6810 if (optimize)
6811 {
6812 tree ftype = TREE_TYPE (arg);
6813 tree arg0 = strip_float_extensions (arg);
6814 tree newtype = TREE_TYPE (arg0);
6815 tree decl;
6816
6817 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6818 && (decl = mathfn_built_in (newtype, fcode)))
6819 return build_call_expr_loc (loc, decl, 1,
6820 fold_convert_loc (loc, newtype, arg0));
6821 }
6822
6823 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6824 sizeof (long long) == sizeof (long). */
6825 if (TYPE_PRECISION (long_long_integer_type_node)
6826 == TYPE_PRECISION (long_integer_type_node))
6827 {
6828 tree newfn = NULL_TREE;
6829 switch (fcode)
6830 {
6831 CASE_FLT_FN (BUILT_IN_LLCEIL):
6832 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6833 break;
6834
6835 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6836 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6837 break;
6838
6839 CASE_FLT_FN (BUILT_IN_LLROUND):
6840 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6841 break;
6842
6843 CASE_FLT_FN (BUILT_IN_LLRINT):
6844 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6845 break;
6846
6847 default:
6848 break;
6849 }
6850
6851 if (newfn)
6852 {
6853 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6854 return fold_convert_loc (loc,
6855 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6856 }
6857 }
6858
6859 return NULL_TREE;
6860 }
6861
6862 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6863 return type. Return NULL_TREE if no simplification can be made. */
6864
6865 static tree
6866 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6867 {
6868 tree res;
6869
6870 if (!validate_arg (arg, COMPLEX_TYPE)
6871 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6872 return NULL_TREE;
6873
6874 /* Calculate the result when the argument is a constant. */
6875 if (TREE_CODE (arg) == COMPLEX_CST
6876 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6877 type, mpfr_hypot)))
6878 return res;
6879
6880 if (TREE_CODE (arg) == COMPLEX_EXPR)
6881 {
6882 tree real = TREE_OPERAND (arg, 0);
6883 tree imag = TREE_OPERAND (arg, 1);
6884
6885 /* If either part is zero, cabs is fabs of the other. */
6886 if (real_zerop (real))
6887 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6888 if (real_zerop (imag))
6889 return fold_build1_loc (loc, ABS_EXPR, type, real);
6890
6891 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6892 if (flag_unsafe_math_optimizations
6893 && operand_equal_p (real, imag, OEP_PURE_SAME))
6894 {
6895 const REAL_VALUE_TYPE sqrt2_trunc
6896 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6897 STRIP_NOPS (real);
6898 return fold_build2_loc (loc, MULT_EXPR, type,
6899 fold_build1_loc (loc, ABS_EXPR, type, real),
6900 build_real (type, sqrt2_trunc));
6901 }
6902 }
6903
6904 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6905 if (TREE_CODE (arg) == NEGATE_EXPR
6906 || TREE_CODE (arg) == CONJ_EXPR)
6907 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6908
6909 /* Don't do this when optimizing for size. */
6910 if (flag_unsafe_math_optimizations
6911 && optimize && optimize_function_for_speed_p (cfun))
6912 {
6913 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6914
6915 if (sqrtfn != NULL_TREE)
6916 {
6917 tree rpart, ipart, result;
6918
6919 arg = builtin_save_expr (arg);
6920
6921 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6922 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6923
6924 rpart = builtin_save_expr (rpart);
6925 ipart = builtin_save_expr (ipart);
6926
6927 result = fold_build2_loc (loc, PLUS_EXPR, type,
6928 fold_build2_loc (loc, MULT_EXPR, type,
6929 rpart, rpart),
6930 fold_build2_loc (loc, MULT_EXPR, type,
6931 ipart, ipart));
6932
6933 return build_call_expr_loc (loc, sqrtfn, 1, result);
6934 }
6935 }
6936
6937 return NULL_TREE;
6938 }
6939
6940 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6941 Return NULL_TREE if no simplification can be made. */
6942
6943 static tree
6944 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6945 {
6946
6947 enum built_in_function fcode;
6948 tree res;
6949
6950 if (!validate_arg (arg, REAL_TYPE))
6951 return NULL_TREE;
6952
6953 /* Calculate the result when the argument is a constant. */
6954 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6955 return res;
6956
6957 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6958 fcode = builtin_mathfn_code (arg);
6959 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6960 {
6961 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6962 arg = fold_build2_loc (loc, MULT_EXPR, type,
6963 CALL_EXPR_ARG (arg, 0),
6964 build_real (type, dconsthalf));
6965 return build_call_expr_loc (loc, expfn, 1, arg);
6966 }
6967
6968 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6969 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6970 {
6971 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6972
6973 if (powfn)
6974 {
6975 tree arg0 = CALL_EXPR_ARG (arg, 0);
6976 tree tree_root;
6977 /* The inner root was either sqrt or cbrt. */
6978 /* This was a conditional expression but it triggered a bug
6979 in Sun C 5.5. */
6980 REAL_VALUE_TYPE dconstroot;
6981 if (BUILTIN_SQRT_P (fcode))
6982 dconstroot = dconsthalf;
6983 else
6984 dconstroot = dconst_third ();
6985
6986 /* Adjust for the outer root. */
6987 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6988 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6989 tree_root = build_real (type, dconstroot);
6990 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6991 }
6992 }
6993
6994 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6995 if (flag_unsafe_math_optimizations
6996 && (fcode == BUILT_IN_POW
6997 || fcode == BUILT_IN_POWF
6998 || fcode == BUILT_IN_POWL))
6999 {
7000 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7001 tree arg0 = CALL_EXPR_ARG (arg, 0);
7002 tree arg1 = CALL_EXPR_ARG (arg, 1);
7003 tree narg1;
7004 if (!tree_expr_nonnegative_p (arg0))
7005 arg0 = build1 (ABS_EXPR, type, arg0);
7006 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7007 build_real (type, dconsthalf));
7008 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7009 }
7010
7011 return NULL_TREE;
7012 }
7013
7014 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7015 Return NULL_TREE if no simplification can be made. */
7016
7017 static tree
7018 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7019 {
7020 const enum built_in_function fcode = builtin_mathfn_code (arg);
7021 tree res;
7022
7023 if (!validate_arg (arg, REAL_TYPE))
7024 return NULL_TREE;
7025
7026 /* Calculate the result when the argument is a constant. */
7027 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7028 return res;
7029
7030 if (flag_unsafe_math_optimizations)
7031 {
7032 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7033 if (BUILTIN_EXPONENT_P (fcode))
7034 {
7035 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7036 const REAL_VALUE_TYPE third_trunc =
7037 real_value_truncate (TYPE_MODE (type), dconst_third ());
7038 arg = fold_build2_loc (loc, MULT_EXPR, type,
7039 CALL_EXPR_ARG (arg, 0),
7040 build_real (type, third_trunc));
7041 return build_call_expr_loc (loc, expfn, 1, arg);
7042 }
7043
7044 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7045 if (BUILTIN_SQRT_P (fcode))
7046 {
7047 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7048
7049 if (powfn)
7050 {
7051 tree arg0 = CALL_EXPR_ARG (arg, 0);
7052 tree tree_root;
7053 REAL_VALUE_TYPE dconstroot = dconst_third ();
7054
7055 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7056 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7057 tree_root = build_real (type, dconstroot);
7058 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7059 }
7060 }
7061
7062 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7063 if (BUILTIN_CBRT_P (fcode))
7064 {
7065 tree arg0 = CALL_EXPR_ARG (arg, 0);
7066 if (tree_expr_nonnegative_p (arg0))
7067 {
7068 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7069
7070 if (powfn)
7071 {
7072 tree tree_root;
7073 REAL_VALUE_TYPE dconstroot;
7074
7075 real_arithmetic (&dconstroot, MULT_EXPR,
7076 dconst_third_ptr (), dconst_third_ptr ());
7077 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7078 tree_root = build_real (type, dconstroot);
7079 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7080 }
7081 }
7082 }
7083
7084 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7085 if (fcode == BUILT_IN_POW
7086 || fcode == BUILT_IN_POWF
7087 || fcode == BUILT_IN_POWL)
7088 {
7089 tree arg00 = CALL_EXPR_ARG (arg, 0);
7090 tree arg01 = CALL_EXPR_ARG (arg, 1);
7091 if (tree_expr_nonnegative_p (arg00))
7092 {
7093 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7094 const REAL_VALUE_TYPE dconstroot
7095 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7096 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7097 build_real (type, dconstroot));
7098 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7099 }
7100 }
7101 }
7102 return NULL_TREE;
7103 }
7104
7105 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7106 TYPE is the type of the return value. Return NULL_TREE if no
7107 simplification can be made. */
7108
7109 static tree
7110 fold_builtin_cos (location_t loc,
7111 tree arg, tree type, tree fndecl)
7112 {
7113 tree res, narg;
7114
7115 if (!validate_arg (arg, REAL_TYPE))
7116 return NULL_TREE;
7117
7118 /* Calculate the result when the argument is a constant. */
7119 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7120 return res;
7121
7122 /* Optimize cos(-x) into cos (x). */
7123 if ((narg = fold_strip_sign_ops (arg)))
7124 return build_call_expr_loc (loc, fndecl, 1, narg);
7125
7126 return NULL_TREE;
7127 }
7128
7129 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7130 Return NULL_TREE if no simplification can be made. */
7131
7132 static tree
7133 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7134 {
7135 if (validate_arg (arg, REAL_TYPE))
7136 {
7137 tree res, narg;
7138
7139 /* Calculate the result when the argument is a constant. */
7140 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7141 return res;
7142
7143 /* Optimize cosh(-x) into cosh (x). */
7144 if ((narg = fold_strip_sign_ops (arg)))
7145 return build_call_expr_loc (loc, fndecl, 1, narg);
7146 }
7147
7148 return NULL_TREE;
7149 }
7150
7151 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7152 argument ARG. TYPE is the type of the return value. Return
7153 NULL_TREE if no simplification can be made. */
7154
7155 static tree
7156 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7157 bool hyper)
7158 {
7159 if (validate_arg (arg, COMPLEX_TYPE)
7160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7161 {
7162 tree tmp;
7163
7164 /* Calculate the result when the argument is a constant. */
7165 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7166 return tmp;
7167
7168 /* Optimize fn(-x) into fn(x). */
7169 if ((tmp = fold_strip_sign_ops (arg)))
7170 return build_call_expr_loc (loc, fndecl, 1, tmp);
7171 }
7172
7173 return NULL_TREE;
7174 }
7175
7176 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7177 Return NULL_TREE if no simplification can be made. */
7178
7179 static tree
7180 fold_builtin_tan (tree arg, tree type)
7181 {
7182 enum built_in_function fcode;
7183 tree res;
7184
7185 if (!validate_arg (arg, REAL_TYPE))
7186 return NULL_TREE;
7187
7188 /* Calculate the result when the argument is a constant. */
7189 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7190 return res;
7191
7192 /* Optimize tan(atan(x)) = x. */
7193 fcode = builtin_mathfn_code (arg);
7194 if (flag_unsafe_math_optimizations
7195 && (fcode == BUILT_IN_ATAN
7196 || fcode == BUILT_IN_ATANF
7197 || fcode == BUILT_IN_ATANL))
7198 return CALL_EXPR_ARG (arg, 0);
7199
7200 return NULL_TREE;
7201 }
7202
7203 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7204 NULL_TREE if no simplification can be made. */
7205
7206 static tree
7207 fold_builtin_sincos (location_t loc,
7208 tree arg0, tree arg1, tree arg2)
7209 {
7210 tree type;
7211 tree res, fn, call;
7212
7213 if (!validate_arg (arg0, REAL_TYPE)
7214 || !validate_arg (arg1, POINTER_TYPE)
7215 || !validate_arg (arg2, POINTER_TYPE))
7216 return NULL_TREE;
7217
7218 type = TREE_TYPE (arg0);
7219
7220 /* Calculate the result when the argument is a constant. */
7221 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7222 return res;
7223
7224 /* Canonicalize sincos to cexpi. */
7225 if (!TARGET_C99_FUNCTIONS)
7226 return NULL_TREE;
7227 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7228 if (!fn)
7229 return NULL_TREE;
7230
7231 call = build_call_expr_loc (loc, fn, 1, arg0);
7232 call = builtin_save_expr (call);
7233
7234 return build2 (COMPOUND_EXPR, void_type_node,
7235 build2 (MODIFY_EXPR, void_type_node,
7236 build_fold_indirect_ref_loc (loc, arg1),
7237 build1 (IMAGPART_EXPR, type, call)),
7238 build2 (MODIFY_EXPR, void_type_node,
7239 build_fold_indirect_ref_loc (loc, arg2),
7240 build1 (REALPART_EXPR, type, call)));
7241 }
7242
7243 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7244 NULL_TREE if no simplification can be made. */
7245
7246 static tree
7247 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7248 {
7249 tree rtype;
7250 tree realp, imagp, ifn;
7251 tree res;
7252
7253 if (!validate_arg (arg0, COMPLEX_TYPE)
7254 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7255 return NULL_TREE;
7256
7257 /* Calculate the result when the argument is a constant. */
7258 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7259 return res;
7260
7261 rtype = TREE_TYPE (TREE_TYPE (arg0));
7262
7263 /* In case we can figure out the real part of arg0 and it is constant zero
7264 fold to cexpi. */
7265 if (!TARGET_C99_FUNCTIONS)
7266 return NULL_TREE;
7267 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7268 if (!ifn)
7269 return NULL_TREE;
7270
7271 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7272 && real_zerop (realp))
7273 {
7274 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7275 return build_call_expr_loc (loc, ifn, 1, narg);
7276 }
7277
7278 /* In case we can easily decompose real and imaginary parts split cexp
7279 to exp (r) * cexpi (i). */
7280 if (flag_unsafe_math_optimizations
7281 && realp)
7282 {
7283 tree rfn, rcall, icall;
7284
7285 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7286 if (!rfn)
7287 return NULL_TREE;
7288
7289 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7290 if (!imagp)
7291 return NULL_TREE;
7292
7293 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7294 icall = builtin_save_expr (icall);
7295 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7296 rcall = builtin_save_expr (rcall);
7297 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7298 fold_build2_loc (loc, MULT_EXPR, rtype,
7299 rcall,
7300 fold_build1_loc (loc, REALPART_EXPR,
7301 rtype, icall)),
7302 fold_build2_loc (loc, MULT_EXPR, rtype,
7303 rcall,
7304 fold_build1_loc (loc, IMAGPART_EXPR,
7305 rtype, icall)));
7306 }
7307
7308 return NULL_TREE;
7309 }
7310
7311 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7312 Return NULL_TREE if no simplification can be made. */
7313
7314 static tree
7315 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7316 {
7317 if (!validate_arg (arg, REAL_TYPE))
7318 return NULL_TREE;
7319
7320 /* Optimize trunc of constant value. */
7321 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7322 {
7323 REAL_VALUE_TYPE r, x;
7324 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7325
7326 x = TREE_REAL_CST (arg);
7327 real_trunc (&r, TYPE_MODE (type), &x);
7328 return build_real (type, r);
7329 }
7330
7331 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7332 }
7333
7334 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7335 Return NULL_TREE if no simplification can be made. */
7336
7337 static tree
7338 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7339 {
7340 if (!validate_arg (arg, REAL_TYPE))
7341 return NULL_TREE;
7342
7343 /* Optimize floor of constant value. */
7344 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7345 {
7346 REAL_VALUE_TYPE x;
7347
7348 x = TREE_REAL_CST (arg);
7349 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7350 {
7351 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7352 REAL_VALUE_TYPE r;
7353
7354 real_floor (&r, TYPE_MODE (type), &x);
7355 return build_real (type, r);
7356 }
7357 }
7358
7359 /* Fold floor (x) where x is nonnegative to trunc (x). */
7360 if (tree_expr_nonnegative_p (arg))
7361 {
7362 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7363 if (truncfn)
7364 return build_call_expr_loc (loc, truncfn, 1, arg);
7365 }
7366
7367 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7368 }
7369
7370 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7371 Return NULL_TREE if no simplification can be made. */
7372
7373 static tree
7374 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7375 {
7376 if (!validate_arg (arg, REAL_TYPE))
7377 return NULL_TREE;
7378
7379 /* Optimize ceil of constant value. */
7380 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7381 {
7382 REAL_VALUE_TYPE x;
7383
7384 x = TREE_REAL_CST (arg);
7385 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7386 {
7387 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7388 REAL_VALUE_TYPE r;
7389
7390 real_ceil (&r, TYPE_MODE (type), &x);
7391 return build_real (type, r);
7392 }
7393 }
7394
7395 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7396 }
7397
7398 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7399 Return NULL_TREE if no simplification can be made. */
7400
7401 static tree
7402 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7403 {
7404 if (!validate_arg (arg, REAL_TYPE))
7405 return NULL_TREE;
7406
7407 /* Optimize round of constant value. */
7408 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7409 {
7410 REAL_VALUE_TYPE x;
7411
7412 x = TREE_REAL_CST (arg);
7413 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7414 {
7415 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7416 REAL_VALUE_TYPE r;
7417
7418 real_round (&r, TYPE_MODE (type), &x);
7419 return build_real (type, r);
7420 }
7421 }
7422
7423 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7424 }
7425
7426 /* Fold function call to builtin lround, lroundf or lroundl (or the
7427 corresponding long long versions) and other rounding functions. ARG
7428 is the argument to the call. Return NULL_TREE if no simplification
7429 can be made. */
7430
7431 static tree
7432 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7433 {
7434 if (!validate_arg (arg, REAL_TYPE))
7435 return NULL_TREE;
7436
7437 /* Optimize lround of constant value. */
7438 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7439 {
7440 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7441
7442 if (real_isfinite (&x))
7443 {
7444 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7445 tree ftype = TREE_TYPE (arg);
7446 unsigned HOST_WIDE_INT lo2;
7447 HOST_WIDE_INT hi, lo;
7448 REAL_VALUE_TYPE r;
7449
7450 switch (DECL_FUNCTION_CODE (fndecl))
7451 {
7452 CASE_FLT_FN (BUILT_IN_LFLOOR):
7453 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7454 real_floor (&r, TYPE_MODE (ftype), &x);
7455 break;
7456
7457 CASE_FLT_FN (BUILT_IN_LCEIL):
7458 CASE_FLT_FN (BUILT_IN_LLCEIL):
7459 real_ceil (&r, TYPE_MODE (ftype), &x);
7460 break;
7461
7462 CASE_FLT_FN (BUILT_IN_LROUND):
7463 CASE_FLT_FN (BUILT_IN_LLROUND):
7464 real_round (&r, TYPE_MODE (ftype), &x);
7465 break;
7466
7467 default:
7468 gcc_unreachable ();
7469 }
7470
7471 REAL_VALUE_TO_INT (&lo, &hi, r);
7472 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7473 return build_int_cst_wide (itype, lo2, hi);
7474 }
7475 }
7476
7477 switch (DECL_FUNCTION_CODE (fndecl))
7478 {
7479 CASE_FLT_FN (BUILT_IN_LFLOOR):
7480 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7481 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7482 if (tree_expr_nonnegative_p (arg))
7483 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7484 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7485 break;
7486 default:;
7487 }
7488
7489 return fold_fixed_mathfn (loc, fndecl, arg);
7490 }
7491
7492 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7493 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7494 the argument to the call. Return NULL_TREE if no simplification can
7495 be made. */
7496
7497 static tree
7498 fold_builtin_bitop (tree fndecl, tree arg)
7499 {
7500 if (!validate_arg (arg, INTEGER_TYPE))
7501 return NULL_TREE;
7502
7503 /* Optimize for constant argument. */
7504 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7505 {
7506 HOST_WIDE_INT hi, width, result;
7507 unsigned HOST_WIDE_INT lo;
7508 tree type;
7509
7510 type = TREE_TYPE (arg);
7511 width = TYPE_PRECISION (type);
7512 lo = TREE_INT_CST_LOW (arg);
7513
7514 /* Clear all the bits that are beyond the type's precision. */
7515 if (width > HOST_BITS_PER_WIDE_INT)
7516 {
7517 hi = TREE_INT_CST_HIGH (arg);
7518 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7519 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7520 }
7521 else
7522 {
7523 hi = 0;
7524 if (width < HOST_BITS_PER_WIDE_INT)
7525 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7526 }
7527
7528 switch (DECL_FUNCTION_CODE (fndecl))
7529 {
7530 CASE_INT_FN (BUILT_IN_FFS):
7531 if (lo != 0)
7532 result = exact_log2 (lo & -lo) + 1;
7533 else if (hi != 0)
7534 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7535 else
7536 result = 0;
7537 break;
7538
7539 CASE_INT_FN (BUILT_IN_CLZ):
7540 if (hi != 0)
7541 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7542 else if (lo != 0)
7543 result = width - floor_log2 (lo) - 1;
7544 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7545 result = width;
7546 break;
7547
7548 CASE_INT_FN (BUILT_IN_CTZ):
7549 if (lo != 0)
7550 result = exact_log2 (lo & -lo);
7551 else if (hi != 0)
7552 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7553 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7554 result = width;
7555 break;
7556
7557 CASE_INT_FN (BUILT_IN_POPCOUNT):
7558 result = 0;
7559 while (lo)
7560 result++, lo &= lo - 1;
7561 while (hi)
7562 result++, hi &= hi - 1;
7563 break;
7564
7565 CASE_INT_FN (BUILT_IN_PARITY):
7566 result = 0;
7567 while (lo)
7568 result++, lo &= lo - 1;
7569 while (hi)
7570 result++, hi &= hi - 1;
7571 result &= 1;
7572 break;
7573
7574 default:
7575 gcc_unreachable ();
7576 }
7577
7578 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7579 }
7580
7581 return NULL_TREE;
7582 }
7583
7584 /* Fold function call to builtin_bswap and the long and long long
7585 variants. Return NULL_TREE if no simplification can be made. */
7586 static tree
7587 fold_builtin_bswap (tree fndecl, tree arg)
7588 {
7589 if (! validate_arg (arg, INTEGER_TYPE))
7590 return NULL_TREE;
7591
7592 /* Optimize constant value. */
7593 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7594 {
7595 HOST_WIDE_INT hi, width, r_hi = 0;
7596 unsigned HOST_WIDE_INT lo, r_lo = 0;
7597 tree type;
7598
7599 type = TREE_TYPE (arg);
7600 width = TYPE_PRECISION (type);
7601 lo = TREE_INT_CST_LOW (arg);
7602 hi = TREE_INT_CST_HIGH (arg);
7603
7604 switch (DECL_FUNCTION_CODE (fndecl))
7605 {
7606 case BUILT_IN_BSWAP32:
7607 case BUILT_IN_BSWAP64:
7608 {
7609 int s;
7610
7611 for (s = 0; s < width; s += 8)
7612 {
7613 int d = width - s - 8;
7614 unsigned HOST_WIDE_INT byte;
7615
7616 if (s < HOST_BITS_PER_WIDE_INT)
7617 byte = (lo >> s) & 0xff;
7618 else
7619 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7620
7621 if (d < HOST_BITS_PER_WIDE_INT)
7622 r_lo |= byte << d;
7623 else
7624 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7625 }
7626 }
7627
7628 break;
7629
7630 default:
7631 gcc_unreachable ();
7632 }
7633
7634 if (width < HOST_BITS_PER_WIDE_INT)
7635 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7636 else
7637 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7638 }
7639
7640 return NULL_TREE;
7641 }
7642
7643 /* A subroutine of fold_builtin to fold the various logarithmic
7644 functions. Return NULL_TREE if no simplification can me made.
7645 FUNC is the corresponding MPFR logarithm function. */
7646
7647 static tree
7648 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7649 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7650 {
7651 if (validate_arg (arg, REAL_TYPE))
7652 {
7653 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7654 tree res;
7655 const enum built_in_function fcode = builtin_mathfn_code (arg);
7656
7657 /* Calculate the result when the argument is a constant. */
7658 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7659 return res;
7660
7661 /* Special case, optimize logN(expN(x)) = x. */
7662 if (flag_unsafe_math_optimizations
7663 && ((func == mpfr_log
7664 && (fcode == BUILT_IN_EXP
7665 || fcode == BUILT_IN_EXPF
7666 || fcode == BUILT_IN_EXPL))
7667 || (func == mpfr_log2
7668 && (fcode == BUILT_IN_EXP2
7669 || fcode == BUILT_IN_EXP2F
7670 || fcode == BUILT_IN_EXP2L))
7671 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7672 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7673
7674 /* Optimize logN(func()) for various exponential functions. We
7675 want to determine the value "x" and the power "exponent" in
7676 order to transform logN(x**exponent) into exponent*logN(x). */
7677 if (flag_unsafe_math_optimizations)
7678 {
7679 tree exponent = 0, x = 0;
7680
7681 switch (fcode)
7682 {
7683 CASE_FLT_FN (BUILT_IN_EXP):
7684 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7685 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7686 dconst_e ()));
7687 exponent = CALL_EXPR_ARG (arg, 0);
7688 break;
7689 CASE_FLT_FN (BUILT_IN_EXP2):
7690 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7691 x = build_real (type, dconst2);
7692 exponent = CALL_EXPR_ARG (arg, 0);
7693 break;
7694 CASE_FLT_FN (BUILT_IN_EXP10):
7695 CASE_FLT_FN (BUILT_IN_POW10):
7696 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7697 {
7698 REAL_VALUE_TYPE dconst10;
7699 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7700 x = build_real (type, dconst10);
7701 }
7702 exponent = CALL_EXPR_ARG (arg, 0);
7703 break;
7704 CASE_FLT_FN (BUILT_IN_SQRT):
7705 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7706 x = CALL_EXPR_ARG (arg, 0);
7707 exponent = build_real (type, dconsthalf);
7708 break;
7709 CASE_FLT_FN (BUILT_IN_CBRT):
7710 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7711 x = CALL_EXPR_ARG (arg, 0);
7712 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7713 dconst_third ()));
7714 break;
7715 CASE_FLT_FN (BUILT_IN_POW):
7716 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7717 x = CALL_EXPR_ARG (arg, 0);
7718 exponent = CALL_EXPR_ARG (arg, 1);
7719 break;
7720 default:
7721 break;
7722 }
7723
7724 /* Now perform the optimization. */
7725 if (x && exponent)
7726 {
7727 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7728 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7729 }
7730 }
7731 }
7732
7733 return NULL_TREE;
7734 }
7735
7736 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7737 NULL_TREE if no simplification can be made. */
7738
7739 static tree
7740 fold_builtin_hypot (location_t loc, tree fndecl,
7741 tree arg0, tree arg1, tree type)
7742 {
7743 tree res, narg0, narg1;
7744
7745 if (!validate_arg (arg0, REAL_TYPE)
7746 || !validate_arg (arg1, REAL_TYPE))
7747 return NULL_TREE;
7748
7749 /* Calculate the result when the argument is a constant. */
7750 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7751 return res;
7752
7753 /* If either argument to hypot has a negate or abs, strip that off.
7754 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7755 narg0 = fold_strip_sign_ops (arg0);
7756 narg1 = fold_strip_sign_ops (arg1);
7757 if (narg0 || narg1)
7758 {
7759 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7760 narg1 ? narg1 : arg1);
7761 }
7762
7763 /* If either argument is zero, hypot is fabs of the other. */
7764 if (real_zerop (arg0))
7765 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7766 else if (real_zerop (arg1))
7767 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7768
7769 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7770 if (flag_unsafe_math_optimizations
7771 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7772 {
7773 const REAL_VALUE_TYPE sqrt2_trunc
7774 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7775 return fold_build2_loc (loc, MULT_EXPR, type,
7776 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7777 build_real (type, sqrt2_trunc));
7778 }
7779
7780 return NULL_TREE;
7781 }
7782
7783
7784 /* Fold a builtin function call to pow, powf, or powl. Return
7785 NULL_TREE if no simplification can be made. */
7786 static tree
7787 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7788 {
7789 tree res;
7790
7791 if (!validate_arg (arg0, REAL_TYPE)
7792 || !validate_arg (arg1, REAL_TYPE))
7793 return NULL_TREE;
7794
7795 /* Calculate the result when the argument is a constant. */
7796 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7797 return res;
7798
7799 /* Optimize pow(1.0,y) = 1.0. */
7800 if (real_onep (arg0))
7801 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7802
7803 if (TREE_CODE (arg1) == REAL_CST
7804 && !TREE_OVERFLOW (arg1))
7805 {
7806 REAL_VALUE_TYPE cint;
7807 REAL_VALUE_TYPE c;
7808 HOST_WIDE_INT n;
7809
7810 c = TREE_REAL_CST (arg1);
7811
7812 /* Optimize pow(x,0.0) = 1.0. */
7813 if (REAL_VALUES_EQUAL (c, dconst0))
7814 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7815 arg0);
7816
7817 /* Optimize pow(x,1.0) = x. */
7818 if (REAL_VALUES_EQUAL (c, dconst1))
7819 return arg0;
7820
7821 /* Optimize pow(x,-1.0) = 1.0/x. */
7822 if (REAL_VALUES_EQUAL (c, dconstm1))
7823 return fold_build2_loc (loc, RDIV_EXPR, type,
7824 build_real (type, dconst1), arg0);
7825
7826 /* Optimize pow(x,0.5) = sqrt(x). */
7827 if (flag_unsafe_math_optimizations
7828 && REAL_VALUES_EQUAL (c, dconsthalf))
7829 {
7830 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7831
7832 if (sqrtfn != NULL_TREE)
7833 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7834 }
7835
7836 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7837 if (flag_unsafe_math_optimizations)
7838 {
7839 const REAL_VALUE_TYPE dconstroot
7840 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7841
7842 if (REAL_VALUES_EQUAL (c, dconstroot))
7843 {
7844 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7845 if (cbrtfn != NULL_TREE)
7846 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7847 }
7848 }
7849
7850 /* Check for an integer exponent. */
7851 n = real_to_integer (&c);
7852 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7853 if (real_identical (&c, &cint))
7854 {
7855 /* Attempt to evaluate pow at compile-time, unless this should
7856 raise an exception. */
7857 if (TREE_CODE (arg0) == REAL_CST
7858 && !TREE_OVERFLOW (arg0)
7859 && (n > 0
7860 || (!flag_trapping_math && !flag_errno_math)
7861 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7862 {
7863 REAL_VALUE_TYPE x;
7864 bool inexact;
7865
7866 x = TREE_REAL_CST (arg0);
7867 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7868 if (flag_unsafe_math_optimizations || !inexact)
7869 return build_real (type, x);
7870 }
7871
7872 /* Strip sign ops from even integer powers. */
7873 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7874 {
7875 tree narg0 = fold_strip_sign_ops (arg0);
7876 if (narg0)
7877 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7878 }
7879 }
7880 }
7881
7882 if (flag_unsafe_math_optimizations)
7883 {
7884 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7885
7886 /* Optimize pow(expN(x),y) = expN(x*y). */
7887 if (BUILTIN_EXPONENT_P (fcode))
7888 {
7889 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7890 tree arg = CALL_EXPR_ARG (arg0, 0);
7891 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7892 return build_call_expr_loc (loc, expfn, 1, arg);
7893 }
7894
7895 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7896 if (BUILTIN_SQRT_P (fcode))
7897 {
7898 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7899 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7900 build_real (type, dconsthalf));
7901 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7902 }
7903
7904 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7905 if (BUILTIN_CBRT_P (fcode))
7906 {
7907 tree arg = CALL_EXPR_ARG (arg0, 0);
7908 if (tree_expr_nonnegative_p (arg))
7909 {
7910 const REAL_VALUE_TYPE dconstroot
7911 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7912 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7913 build_real (type, dconstroot));
7914 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7915 }
7916 }
7917
7918 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7919 if (fcode == BUILT_IN_POW
7920 || fcode == BUILT_IN_POWF
7921 || fcode == BUILT_IN_POWL)
7922 {
7923 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7924 if (tree_expr_nonnegative_p (arg00))
7925 {
7926 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7927 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7928 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7929 }
7930 }
7931 }
7932
7933 return NULL_TREE;
7934 }
7935
7936 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7937 Return NULL_TREE if no simplification can be made. */
7938 static tree
7939 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7940 tree arg0, tree arg1, tree type)
7941 {
7942 if (!validate_arg (arg0, REAL_TYPE)
7943 || !validate_arg (arg1, INTEGER_TYPE))
7944 return NULL_TREE;
7945
7946 /* Optimize pow(1.0,y) = 1.0. */
7947 if (real_onep (arg0))
7948 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7949
7950 if (host_integerp (arg1, 0))
7951 {
7952 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7953
7954 /* Evaluate powi at compile-time. */
7955 if (TREE_CODE (arg0) == REAL_CST
7956 && !TREE_OVERFLOW (arg0))
7957 {
7958 REAL_VALUE_TYPE x;
7959 x = TREE_REAL_CST (arg0);
7960 real_powi (&x, TYPE_MODE (type), &x, c);
7961 return build_real (type, x);
7962 }
7963
7964 /* Optimize pow(x,0) = 1.0. */
7965 if (c == 0)
7966 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7967 arg0);
7968
7969 /* Optimize pow(x,1) = x. */
7970 if (c == 1)
7971 return arg0;
7972
7973 /* Optimize pow(x,-1) = 1.0/x. */
7974 if (c == -1)
7975 return fold_build2_loc (loc, RDIV_EXPR, type,
7976 build_real (type, dconst1), arg0);
7977 }
7978
7979 return NULL_TREE;
7980 }
7981
7982 /* A subroutine of fold_builtin to fold the various exponent
7983 functions. Return NULL_TREE if no simplification can be made.
7984 FUNC is the corresponding MPFR exponent function. */
7985
7986 static tree
7987 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7988 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7989 {
7990 if (validate_arg (arg, REAL_TYPE))
7991 {
7992 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7993 tree res;
7994
7995 /* Calculate the result when the argument is a constant. */
7996 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7997 return res;
7998
7999 /* Optimize expN(logN(x)) = x. */
8000 if (flag_unsafe_math_optimizations)
8001 {
8002 const enum built_in_function fcode = builtin_mathfn_code (arg);
8003
8004 if ((func == mpfr_exp
8005 && (fcode == BUILT_IN_LOG
8006 || fcode == BUILT_IN_LOGF
8007 || fcode == BUILT_IN_LOGL))
8008 || (func == mpfr_exp2
8009 && (fcode == BUILT_IN_LOG2
8010 || fcode == BUILT_IN_LOG2F
8011 || fcode == BUILT_IN_LOG2L))
8012 || (func == mpfr_exp10
8013 && (fcode == BUILT_IN_LOG10
8014 || fcode == BUILT_IN_LOG10F
8015 || fcode == BUILT_IN_LOG10L)))
8016 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8017 }
8018 }
8019
8020 return NULL_TREE;
8021 }
8022
8023 /* Return true if VAR is a VAR_DECL or a component thereof. */
8024
8025 static bool
8026 var_decl_component_p (tree var)
8027 {
8028 tree inner = var;
8029 while (handled_component_p (inner))
8030 inner = TREE_OPERAND (inner, 0);
8031 return SSA_VAR_P (inner);
8032 }
8033
8034 /* Fold function call to builtin memset. Return
8035 NULL_TREE if no simplification can be made. */
8036
8037 static tree
8038 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8039 tree type, bool ignore)
8040 {
8041 tree var, ret, etype;
8042 unsigned HOST_WIDE_INT length, cval;
8043
8044 if (! validate_arg (dest, POINTER_TYPE)
8045 || ! validate_arg (c, INTEGER_TYPE)
8046 || ! validate_arg (len, INTEGER_TYPE))
8047 return NULL_TREE;
8048
8049 if (! host_integerp (len, 1))
8050 return NULL_TREE;
8051
8052 /* If the LEN parameter is zero, return DEST. */
8053 if (integer_zerop (len))
8054 return omit_one_operand_loc (loc, type, dest, c);
8055
8056 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8057 return NULL_TREE;
8058
8059 var = dest;
8060 STRIP_NOPS (var);
8061 if (TREE_CODE (var) != ADDR_EXPR)
8062 return NULL_TREE;
8063
8064 var = TREE_OPERAND (var, 0);
8065 if (TREE_THIS_VOLATILE (var))
8066 return NULL_TREE;
8067
8068 etype = TREE_TYPE (var);
8069 if (TREE_CODE (etype) == ARRAY_TYPE)
8070 etype = TREE_TYPE (etype);
8071
8072 if (!INTEGRAL_TYPE_P (etype)
8073 && !POINTER_TYPE_P (etype))
8074 return NULL_TREE;
8075
8076 if (! var_decl_component_p (var))
8077 return NULL_TREE;
8078
8079 length = tree_low_cst (len, 1);
8080 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8081 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8082 < (int) length)
8083 return NULL_TREE;
8084
8085 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8086 return NULL_TREE;
8087
8088 if (integer_zerop (c))
8089 cval = 0;
8090 else
8091 {
8092 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8093 return NULL_TREE;
8094
8095 cval = tree_low_cst (c, 1);
8096 cval &= 0xff;
8097 cval |= cval << 8;
8098 cval |= cval << 16;
8099 cval |= (cval << 31) << 1;
8100 }
8101
8102 ret = build_int_cst_type (etype, cval);
8103 var = build_fold_indirect_ref_loc (loc,
8104 fold_convert_loc (loc,
8105 build_pointer_type (etype),
8106 dest));
8107 ret = build2 (MODIFY_EXPR, etype, var, ret);
8108 if (ignore)
8109 return ret;
8110
8111 return omit_one_operand_loc (loc, type, dest, ret);
8112 }
8113
8114 /* Fold function call to builtin memset. Return
8115 NULL_TREE if no simplification can be made. */
8116
8117 static tree
8118 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8119 {
8120 if (! validate_arg (dest, POINTER_TYPE)
8121 || ! validate_arg (size, INTEGER_TYPE))
8122 return NULL_TREE;
8123
8124 if (!ignore)
8125 return NULL_TREE;
8126
8127 /* New argument list transforming bzero(ptr x, int y) to
8128 memset(ptr x, int 0, size_t y). This is done this way
8129 so that if it isn't expanded inline, we fallback to
8130 calling bzero instead of memset. */
8131
8132 return fold_builtin_memset (loc, dest, integer_zero_node,
8133 fold_convert_loc (loc, sizetype, size),
8134 void_type_node, ignore);
8135 }
8136
8137 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8138 NULL_TREE if no simplification can be made.
8139 If ENDP is 0, return DEST (like memcpy).
8140 If ENDP is 1, return DEST+LEN (like mempcpy).
8141 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8142 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8143 (memmove). */
8144
8145 static tree
8146 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8147 tree len, tree type, bool ignore, int endp)
8148 {
8149 tree destvar, srcvar, expr;
8150
8151 if (! validate_arg (dest, POINTER_TYPE)
8152 || ! validate_arg (src, POINTER_TYPE)
8153 || ! validate_arg (len, INTEGER_TYPE))
8154 return NULL_TREE;
8155
8156 /* If the LEN parameter is zero, return DEST. */
8157 if (integer_zerop (len))
8158 return omit_one_operand_loc (loc, type, dest, src);
8159
8160 /* If SRC and DEST are the same (and not volatile), return
8161 DEST{,+LEN,+LEN-1}. */
8162 if (operand_equal_p (src, dest, 0))
8163 expr = len;
8164 else
8165 {
8166 tree srctype, desttype;
8167 int src_align, dest_align;
8168
8169 if (endp == 3)
8170 {
8171 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8172 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8173
8174 /* Both DEST and SRC must be pointer types.
8175 ??? This is what old code did. Is the testing for pointer types
8176 really mandatory?
8177
8178 If either SRC is readonly or length is 1, we can use memcpy. */
8179 if (!dest_align || !src_align)
8180 return NULL_TREE;
8181 if (readonly_data_expr (src)
8182 || (host_integerp (len, 1)
8183 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8184 >= tree_low_cst (len, 1))))
8185 {
8186 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8187 if (!fn)
8188 return NULL_TREE;
8189 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8190 }
8191
8192 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8193 srcvar = build_fold_indirect_ref_loc (loc, src);
8194 destvar = build_fold_indirect_ref_loc (loc, dest);
8195 if (srcvar
8196 && !TREE_THIS_VOLATILE (srcvar)
8197 && destvar
8198 && !TREE_THIS_VOLATILE (destvar))
8199 {
8200 tree src_base, dest_base, fn;
8201 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8202 HOST_WIDE_INT size = -1;
8203 HOST_WIDE_INT maxsize = -1;
8204
8205 src_base = srcvar;
8206 if (handled_component_p (src_base))
8207 src_base = get_ref_base_and_extent (src_base, &src_offset,
8208 &size, &maxsize);
8209 dest_base = destvar;
8210 if (handled_component_p (dest_base))
8211 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8212 &size, &maxsize);
8213 if (host_integerp (len, 1))
8214 {
8215 maxsize = tree_low_cst (len, 1);
8216 if (maxsize
8217 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8218 maxsize = -1;
8219 else
8220 maxsize *= BITS_PER_UNIT;
8221 }
8222 else
8223 maxsize = -1;
8224 if (SSA_VAR_P (src_base)
8225 && SSA_VAR_P (dest_base))
8226 {
8227 if (operand_equal_p (src_base, dest_base, 0)
8228 && ranges_overlap_p (src_offset, maxsize,
8229 dest_offset, maxsize))
8230 return NULL_TREE;
8231 }
8232 else if (TREE_CODE (src_base) == INDIRECT_REF
8233 && TREE_CODE (dest_base) == INDIRECT_REF)
8234 {
8235 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8236 TREE_OPERAND (dest_base, 0), 0)
8237 || ranges_overlap_p (src_offset, maxsize,
8238 dest_offset, maxsize))
8239 return NULL_TREE;
8240 }
8241 else
8242 return NULL_TREE;
8243
8244 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8245 if (!fn)
8246 return NULL_TREE;
8247 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8248 }
8249 return NULL_TREE;
8250 }
8251
8252 if (!host_integerp (len, 0))
8253 return NULL_TREE;
8254 /* FIXME:
8255 This logic lose for arguments like (type *)malloc (sizeof (type)),
8256 since we strip the casts of up to VOID return value from malloc.
8257 Perhaps we ought to inherit type from non-VOID argument here? */
8258 STRIP_NOPS (src);
8259 STRIP_NOPS (dest);
8260 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8261 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8262 {
8263 tree tem = TREE_OPERAND (src, 0);
8264 STRIP_NOPS (tem);
8265 if (tem != TREE_OPERAND (src, 0))
8266 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8267 }
8268 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8269 {
8270 tree tem = TREE_OPERAND (dest, 0);
8271 STRIP_NOPS (tem);
8272 if (tem != TREE_OPERAND (dest, 0))
8273 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8274 }
8275 srctype = TREE_TYPE (TREE_TYPE (src));
8276 if (srctype
8277 && TREE_CODE (srctype) == ARRAY_TYPE
8278 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8279 {
8280 srctype = TREE_TYPE (srctype);
8281 STRIP_NOPS (src);
8282 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8283 }
8284 desttype = TREE_TYPE (TREE_TYPE (dest));
8285 if (desttype
8286 && TREE_CODE (desttype) == ARRAY_TYPE
8287 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8288 {
8289 desttype = TREE_TYPE (desttype);
8290 STRIP_NOPS (dest);
8291 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8292 }
8293 if (!srctype || !desttype
8294 || !TYPE_SIZE_UNIT (srctype)
8295 || !TYPE_SIZE_UNIT (desttype)
8296 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8297 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8298 || TYPE_VOLATILE (srctype)
8299 || TYPE_VOLATILE (desttype))
8300 return NULL_TREE;
8301
8302 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8303 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8304 if (dest_align < (int) TYPE_ALIGN (desttype)
8305 || src_align < (int) TYPE_ALIGN (srctype))
8306 return NULL_TREE;
8307
8308 if (!ignore)
8309 dest = builtin_save_expr (dest);
8310
8311 srcvar = NULL_TREE;
8312 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8313 {
8314 srcvar = build_fold_indirect_ref_loc (loc, src);
8315 if (TREE_THIS_VOLATILE (srcvar))
8316 return NULL_TREE;
8317 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8318 srcvar = NULL_TREE;
8319 /* With memcpy, it is possible to bypass aliasing rules, so without
8320 this check i.e. execute/20060930-2.c would be misoptimized,
8321 because it use conflicting alias set to hold argument for the
8322 memcpy call. This check is probably unnecessary with
8323 -fno-strict-aliasing. Similarly for destvar. See also
8324 PR29286. */
8325 else if (!var_decl_component_p (srcvar))
8326 srcvar = NULL_TREE;
8327 }
8328
8329 destvar = NULL_TREE;
8330 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8331 {
8332 destvar = build_fold_indirect_ref_loc (loc, dest);
8333 if (TREE_THIS_VOLATILE (destvar))
8334 return NULL_TREE;
8335 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8336 destvar = NULL_TREE;
8337 else if (!var_decl_component_p (destvar))
8338 destvar = NULL_TREE;
8339 }
8340
8341 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8342 return NULL_TREE;
8343
8344 if (srcvar == NULL_TREE)
8345 {
8346 tree srcptype;
8347 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8348 return NULL_TREE;
8349
8350 srctype = build_qualified_type (desttype, 0);
8351 if (src_align < (int) TYPE_ALIGN (srctype))
8352 {
8353 if (AGGREGATE_TYPE_P (srctype)
8354 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8355 return NULL_TREE;
8356
8357 srctype = build_variant_type_copy (srctype);
8358 TYPE_ALIGN (srctype) = src_align;
8359 TYPE_USER_ALIGN (srctype) = 1;
8360 TYPE_PACKED (srctype) = 1;
8361 }
8362 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8363 src = fold_convert_loc (loc, srcptype, src);
8364 srcvar = build_fold_indirect_ref_loc (loc, src);
8365 }
8366 else if (destvar == NULL_TREE)
8367 {
8368 tree destptype;
8369 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8370 return NULL_TREE;
8371
8372 desttype = build_qualified_type (srctype, 0);
8373 if (dest_align < (int) TYPE_ALIGN (desttype))
8374 {
8375 if (AGGREGATE_TYPE_P (desttype)
8376 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8377 return NULL_TREE;
8378
8379 desttype = build_variant_type_copy (desttype);
8380 TYPE_ALIGN (desttype) = dest_align;
8381 TYPE_USER_ALIGN (desttype) = 1;
8382 TYPE_PACKED (desttype) = 1;
8383 }
8384 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8385 dest = fold_convert_loc (loc, destptype, dest);
8386 destvar = build_fold_indirect_ref_loc (loc, dest);
8387 }
8388
8389 if (srctype == desttype
8390 || (gimple_in_ssa_p (cfun)
8391 && useless_type_conversion_p (desttype, srctype)))
8392 expr = srcvar;
8393 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8394 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8395 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8396 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8397 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8398 else
8399 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8400 TREE_TYPE (destvar), srcvar);
8401 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8402 }
8403
8404 if (ignore)
8405 return expr;
8406
8407 if (endp == 0 || endp == 3)
8408 return omit_one_operand_loc (loc, type, dest, expr);
8409
8410 if (expr == len)
8411 expr = NULL_TREE;
8412
8413 if (endp == 2)
8414 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8415 ssize_int (1));
8416
8417 len = fold_convert_loc (loc, sizetype, len);
8418 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8419 dest = fold_convert_loc (loc, type, dest);
8420 if (expr)
8421 dest = omit_one_operand_loc (loc, type, dest, expr);
8422 return dest;
8423 }
8424
8425 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8426 If LEN is not NULL, it represents the length of the string to be
8427 copied. Return NULL_TREE if no simplification can be made. */
8428
8429 tree
8430 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8431 {
8432 tree fn;
8433
8434 if (!validate_arg (dest, POINTER_TYPE)
8435 || !validate_arg (src, POINTER_TYPE))
8436 return NULL_TREE;
8437
8438 /* If SRC and DEST are the same (and not volatile), return DEST. */
8439 if (operand_equal_p (src, dest, 0))
8440 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8441
8442 if (optimize_function_for_size_p (cfun))
8443 return NULL_TREE;
8444
8445 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8446 if (!fn)
8447 return NULL_TREE;
8448
8449 if (!len)
8450 {
8451 len = c_strlen (src, 1);
8452 if (! len || TREE_SIDE_EFFECTS (len))
8453 return NULL_TREE;
8454 }
8455
8456 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8457 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8458 build_call_expr_loc (loc, fn, 3, dest, src, len));
8459 }
8460
8461 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8462 Return NULL_TREE if no simplification can be made. */
8463
8464 static tree
8465 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8466 {
8467 tree fn, len, lenp1, call, type;
8468
8469 if (!validate_arg (dest, POINTER_TYPE)
8470 || !validate_arg (src, POINTER_TYPE))
8471 return NULL_TREE;
8472
8473 len = c_strlen (src, 1);
8474 if (!len
8475 || TREE_CODE (len) != INTEGER_CST)
8476 return NULL_TREE;
8477
8478 if (optimize_function_for_size_p (cfun)
8479 /* If length is zero it's small enough. */
8480 && !integer_zerop (len))
8481 return NULL_TREE;
8482
8483 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8484 if (!fn)
8485 return NULL_TREE;
8486
8487 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8488 /* We use dest twice in building our expression. Save it from
8489 multiple expansions. */
8490 dest = builtin_save_expr (dest);
8491 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8492
8493 type = TREE_TYPE (TREE_TYPE (fndecl));
8494 len = fold_convert_loc (loc, sizetype, len);
8495 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8496 dest = fold_convert_loc (loc, type, dest);
8497 dest = omit_one_operand_loc (loc, type, dest, call);
8498 return dest;
8499 }
8500
8501 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8502 If SLEN is not NULL, it represents the length of the source string.
8503 Return NULL_TREE if no simplification can be made. */
8504
8505 tree
8506 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8507 tree src, tree len, tree slen)
8508 {
8509 tree fn;
8510
8511 if (!validate_arg (dest, POINTER_TYPE)
8512 || !validate_arg (src, POINTER_TYPE)
8513 || !validate_arg (len, INTEGER_TYPE))
8514 return NULL_TREE;
8515
8516 /* If the LEN parameter is zero, return DEST. */
8517 if (integer_zerop (len))
8518 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8519
8520 /* We can't compare slen with len as constants below if len is not a
8521 constant. */
8522 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8523 return NULL_TREE;
8524
8525 if (!slen)
8526 slen = c_strlen (src, 1);
8527
8528 /* Now, we must be passed a constant src ptr parameter. */
8529 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8530 return NULL_TREE;
8531
8532 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8533
8534 /* We do not support simplification of this case, though we do
8535 support it when expanding trees into RTL. */
8536 /* FIXME: generate a call to __builtin_memset. */
8537 if (tree_int_cst_lt (slen, len))
8538 return NULL_TREE;
8539
8540 /* OK transform into builtin memcpy. */
8541 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8542 if (!fn)
8543 return NULL_TREE;
8544 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8545 build_call_expr_loc (loc, fn, 3, dest, src, len));
8546 }
8547
8548 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8549 arguments to the call, and TYPE is its return type.
8550 Return NULL_TREE if no simplification can be made. */
8551
8552 static tree
8553 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8554 {
8555 if (!validate_arg (arg1, POINTER_TYPE)
8556 || !validate_arg (arg2, INTEGER_TYPE)
8557 || !validate_arg (len, INTEGER_TYPE))
8558 return NULL_TREE;
8559 else
8560 {
8561 const char *p1;
8562
8563 if (TREE_CODE (arg2) != INTEGER_CST
8564 || !host_integerp (len, 1))
8565 return NULL_TREE;
8566
8567 p1 = c_getstr (arg1);
8568 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8569 {
8570 char c;
8571 const char *r;
8572 tree tem;
8573
8574 if (target_char_cast (arg2, &c))
8575 return NULL_TREE;
8576
8577 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8578
8579 if (r == NULL)
8580 return build_int_cst (TREE_TYPE (arg1), 0);
8581
8582 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8583 size_int (r - p1));
8584 return fold_convert_loc (loc, type, tem);
8585 }
8586 return NULL_TREE;
8587 }
8588 }
8589
8590 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8591 Return NULL_TREE if no simplification can be made. */
8592
8593 static tree
8594 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8595 {
8596 const char *p1, *p2;
8597
8598 if (!validate_arg (arg1, POINTER_TYPE)
8599 || !validate_arg (arg2, POINTER_TYPE)
8600 || !validate_arg (len, INTEGER_TYPE))
8601 return NULL_TREE;
8602
8603 /* If the LEN parameter is zero, return zero. */
8604 if (integer_zerop (len))
8605 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8606 arg1, arg2);
8607
8608 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8609 if (operand_equal_p (arg1, arg2, 0))
8610 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8611
8612 p1 = c_getstr (arg1);
8613 p2 = c_getstr (arg2);
8614
8615 /* If all arguments are constant, and the value of len is not greater
8616 than the lengths of arg1 and arg2, evaluate at compile-time. */
8617 if (host_integerp (len, 1) && p1 && p2
8618 && compare_tree_int (len, strlen (p1) + 1) <= 0
8619 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8620 {
8621 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8622
8623 if (r > 0)
8624 return integer_one_node;
8625 else if (r < 0)
8626 return integer_minus_one_node;
8627 else
8628 return integer_zero_node;
8629 }
8630
8631 /* If len parameter is one, return an expression corresponding to
8632 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8633 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8634 {
8635 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8636 tree cst_uchar_ptr_node
8637 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8638
8639 tree ind1
8640 = fold_convert_loc (loc, integer_type_node,
8641 build1 (INDIRECT_REF, cst_uchar_node,
8642 fold_convert_loc (loc,
8643 cst_uchar_ptr_node,
8644 arg1)));
8645 tree ind2
8646 = fold_convert_loc (loc, integer_type_node,
8647 build1 (INDIRECT_REF, cst_uchar_node,
8648 fold_convert_loc (loc,
8649 cst_uchar_ptr_node,
8650 arg2)));
8651 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8652 }
8653
8654 return NULL_TREE;
8655 }
8656
8657 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8658 Return NULL_TREE if no simplification can be made. */
8659
8660 static tree
8661 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8662 {
8663 const char *p1, *p2;
8664
8665 if (!validate_arg (arg1, POINTER_TYPE)
8666 || !validate_arg (arg2, POINTER_TYPE))
8667 return NULL_TREE;
8668
8669 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8670 if (operand_equal_p (arg1, arg2, 0))
8671 return integer_zero_node;
8672
8673 p1 = c_getstr (arg1);
8674 p2 = c_getstr (arg2);
8675
8676 if (p1 && p2)
8677 {
8678 const int i = strcmp (p1, p2);
8679 if (i < 0)
8680 return integer_minus_one_node;
8681 else if (i > 0)
8682 return integer_one_node;
8683 else
8684 return integer_zero_node;
8685 }
8686
8687 /* If the second arg is "", return *(const unsigned char*)arg1. */
8688 if (p2 && *p2 == '\0')
8689 {
8690 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8691 tree cst_uchar_ptr_node
8692 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8693
8694 return fold_convert_loc (loc, integer_type_node,
8695 build1 (INDIRECT_REF, cst_uchar_node,
8696 fold_convert_loc (loc,
8697 cst_uchar_ptr_node,
8698 arg1)));
8699 }
8700
8701 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8702 if (p1 && *p1 == '\0')
8703 {
8704 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8705 tree cst_uchar_ptr_node
8706 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8707
8708 tree temp
8709 = fold_convert_loc (loc, integer_type_node,
8710 build1 (INDIRECT_REF, cst_uchar_node,
8711 fold_convert_loc (loc,
8712 cst_uchar_ptr_node,
8713 arg2)));
8714 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8715 }
8716
8717 return NULL_TREE;
8718 }
8719
8720 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8721 Return NULL_TREE if no simplification can be made. */
8722
8723 static tree
8724 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8725 {
8726 const char *p1, *p2;
8727
8728 if (!validate_arg (arg1, POINTER_TYPE)
8729 || !validate_arg (arg2, POINTER_TYPE)
8730 || !validate_arg (len, INTEGER_TYPE))
8731 return NULL_TREE;
8732
8733 /* If the LEN parameter is zero, return zero. */
8734 if (integer_zerop (len))
8735 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8736 arg1, arg2);
8737
8738 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8739 if (operand_equal_p (arg1, arg2, 0))
8740 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8741
8742 p1 = c_getstr (arg1);
8743 p2 = c_getstr (arg2);
8744
8745 if (host_integerp (len, 1) && p1 && p2)
8746 {
8747 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8748 if (i > 0)
8749 return integer_one_node;
8750 else if (i < 0)
8751 return integer_minus_one_node;
8752 else
8753 return integer_zero_node;
8754 }
8755
8756 /* If the second arg is "", and the length is greater than zero,
8757 return *(const unsigned char*)arg1. */
8758 if (p2 && *p2 == '\0'
8759 && TREE_CODE (len) == INTEGER_CST
8760 && tree_int_cst_sgn (len) == 1)
8761 {
8762 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8763 tree cst_uchar_ptr_node
8764 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8765
8766 return fold_convert_loc (loc, integer_type_node,
8767 build1 (INDIRECT_REF, cst_uchar_node,
8768 fold_convert_loc (loc,
8769 cst_uchar_ptr_node,
8770 arg1)));
8771 }
8772
8773 /* If the first arg is "", and the length is greater than zero,
8774 return -*(const unsigned char*)arg2. */
8775 if (p1 && *p1 == '\0'
8776 && TREE_CODE (len) == INTEGER_CST
8777 && tree_int_cst_sgn (len) == 1)
8778 {
8779 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8780 tree cst_uchar_ptr_node
8781 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8782
8783 tree temp = fold_convert_loc (loc, integer_type_node,
8784 build1 (INDIRECT_REF, cst_uchar_node,
8785 fold_convert_loc (loc,
8786 cst_uchar_ptr_node,
8787 arg2)));
8788 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8789 }
8790
8791 /* If len parameter is one, return an expression corresponding to
8792 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8793 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8794 {
8795 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8796 tree cst_uchar_ptr_node
8797 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8798
8799 tree ind1 = fold_convert_loc (loc, integer_type_node,
8800 build1 (INDIRECT_REF, cst_uchar_node,
8801 fold_convert_loc (loc,
8802 cst_uchar_ptr_node,
8803 arg1)));
8804 tree ind2 = fold_convert_loc (loc, integer_type_node,
8805 build1 (INDIRECT_REF, cst_uchar_node,
8806 fold_convert_loc (loc,
8807 cst_uchar_ptr_node,
8808 arg2)));
8809 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8810 }
8811
8812 return NULL_TREE;
8813 }
8814
8815 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8816 ARG. Return NULL_TREE if no simplification can be made. */
8817
8818 static tree
8819 fold_builtin_signbit (location_t loc, tree arg, tree type)
8820 {
8821 tree temp;
8822
8823 if (!validate_arg (arg, REAL_TYPE))
8824 return NULL_TREE;
8825
8826 /* If ARG is a compile-time constant, determine the result. */
8827 if (TREE_CODE (arg) == REAL_CST
8828 && !TREE_OVERFLOW (arg))
8829 {
8830 REAL_VALUE_TYPE c;
8831
8832 c = TREE_REAL_CST (arg);
8833 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8834 return fold_convert_loc (loc, type, temp);
8835 }
8836
8837 /* If ARG is non-negative, the result is always zero. */
8838 if (tree_expr_nonnegative_p (arg))
8839 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8840
8841 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8842 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8843 return fold_build2_loc (loc, LT_EXPR, type, arg,
8844 build_real (TREE_TYPE (arg), dconst0));
8845
8846 return NULL_TREE;
8847 }
8848
8849 /* Fold function call to builtin copysign, copysignf or copysignl with
8850 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8851 be made. */
8852
8853 static tree
8854 fold_builtin_copysign (location_t loc, tree fndecl,
8855 tree arg1, tree arg2, tree type)
8856 {
8857 tree tem;
8858
8859 if (!validate_arg (arg1, REAL_TYPE)
8860 || !validate_arg (arg2, REAL_TYPE))
8861 return NULL_TREE;
8862
8863 /* copysign(X,X) is X. */
8864 if (operand_equal_p (arg1, arg2, 0))
8865 return fold_convert_loc (loc, type, arg1);
8866
8867 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8868 if (TREE_CODE (arg1) == REAL_CST
8869 && TREE_CODE (arg2) == REAL_CST
8870 && !TREE_OVERFLOW (arg1)
8871 && !TREE_OVERFLOW (arg2))
8872 {
8873 REAL_VALUE_TYPE c1, c2;
8874
8875 c1 = TREE_REAL_CST (arg1);
8876 c2 = TREE_REAL_CST (arg2);
8877 /* c1.sign := c2.sign. */
8878 real_copysign (&c1, &c2);
8879 return build_real (type, c1);
8880 }
8881
8882 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8883 Remember to evaluate Y for side-effects. */
8884 if (tree_expr_nonnegative_p (arg2))
8885 return omit_one_operand_loc (loc, type,
8886 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8887 arg2);
8888
8889 /* Strip sign changing operations for the first argument. */
8890 tem = fold_strip_sign_ops (arg1);
8891 if (tem)
8892 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8893
8894 return NULL_TREE;
8895 }
8896
8897 /* Fold a call to builtin isascii with argument ARG. */
8898
8899 static tree
8900 fold_builtin_isascii (location_t loc, tree arg)
8901 {
8902 if (!validate_arg (arg, INTEGER_TYPE))
8903 return NULL_TREE;
8904 else
8905 {
8906 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8907 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8908 build_int_cst (NULL_TREE,
8909 ~ (unsigned HOST_WIDE_INT) 0x7f));
8910 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8911 arg, integer_zero_node);
8912 }
8913 }
8914
8915 /* Fold a call to builtin toascii with argument ARG. */
8916
8917 static tree
8918 fold_builtin_toascii (location_t loc, tree arg)
8919 {
8920 if (!validate_arg (arg, INTEGER_TYPE))
8921 return NULL_TREE;
8922
8923 /* Transform toascii(c) -> (c & 0x7f). */
8924 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8925 build_int_cst (NULL_TREE, 0x7f));
8926 }
8927
8928 /* Fold a call to builtin isdigit with argument ARG. */
8929
8930 static tree
8931 fold_builtin_isdigit (location_t loc, tree arg)
8932 {
8933 if (!validate_arg (arg, INTEGER_TYPE))
8934 return NULL_TREE;
8935 else
8936 {
8937 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8938 /* According to the C standard, isdigit is unaffected by locale.
8939 However, it definitely is affected by the target character set. */
8940 unsigned HOST_WIDE_INT target_digit0
8941 = lang_hooks.to_target_charset ('0');
8942
8943 if (target_digit0 == 0)
8944 return NULL_TREE;
8945
8946 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8947 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8948 build_int_cst (unsigned_type_node, target_digit0));
8949 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8950 build_int_cst (unsigned_type_node, 9));
8951 }
8952 }
8953
8954 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8955
8956 static tree
8957 fold_builtin_fabs (location_t loc, tree arg, tree type)
8958 {
8959 if (!validate_arg (arg, REAL_TYPE))
8960 return NULL_TREE;
8961
8962 arg = fold_convert_loc (loc, type, arg);
8963 if (TREE_CODE (arg) == REAL_CST)
8964 return fold_abs_const (arg, type);
8965 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8966 }
8967
8968 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8969
8970 static tree
8971 fold_builtin_abs (location_t loc, tree arg, tree type)
8972 {
8973 if (!validate_arg (arg, INTEGER_TYPE))
8974 return NULL_TREE;
8975
8976 arg = fold_convert_loc (loc, type, arg);
8977 if (TREE_CODE (arg) == INTEGER_CST)
8978 return fold_abs_const (arg, type);
8979 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8980 }
8981
8982 /* Fold a call to builtin fmin or fmax. */
8983
8984 static tree
8985 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8986 tree type, bool max)
8987 {
8988 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8989 {
8990 /* Calculate the result when the argument is a constant. */
8991 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8992
8993 if (res)
8994 return res;
8995
8996 /* If either argument is NaN, return the other one. Avoid the
8997 transformation if we get (and honor) a signalling NaN. Using
8998 omit_one_operand() ensures we create a non-lvalue. */
8999 if (TREE_CODE (arg0) == REAL_CST
9000 && real_isnan (&TREE_REAL_CST (arg0))
9001 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9002 || ! TREE_REAL_CST (arg0).signalling))
9003 return omit_one_operand_loc (loc, type, arg1, arg0);
9004 if (TREE_CODE (arg1) == REAL_CST
9005 && real_isnan (&TREE_REAL_CST (arg1))
9006 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9007 || ! TREE_REAL_CST (arg1).signalling))
9008 return omit_one_operand_loc (loc, type, arg0, arg1);
9009
9010 /* Transform fmin/fmax(x,x) -> x. */
9011 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9012 return omit_one_operand_loc (loc, type, arg0, arg1);
9013
9014 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9015 functions to return the numeric arg if the other one is NaN.
9016 These tree codes don't honor that, so only transform if
9017 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9018 handled, so we don't have to worry about it either. */
9019 if (flag_finite_math_only)
9020 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9021 fold_convert_loc (loc, type, arg0),
9022 fold_convert_loc (loc, type, arg1));
9023 }
9024 return NULL_TREE;
9025 }
9026
9027 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9028
9029 static tree
9030 fold_builtin_carg (location_t loc, tree arg, tree type)
9031 {
9032 if (validate_arg (arg, COMPLEX_TYPE)
9033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9034 {
9035 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9036
9037 if (atan2_fn)
9038 {
9039 tree new_arg = builtin_save_expr (arg);
9040 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9041 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9042 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9043 }
9044 }
9045
9046 return NULL_TREE;
9047 }
9048
9049 /* Fold a call to builtin logb/ilogb. */
9050
9051 static tree
9052 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9053 {
9054 if (! validate_arg (arg, REAL_TYPE))
9055 return NULL_TREE;
9056
9057 STRIP_NOPS (arg);
9058
9059 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9060 {
9061 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9062
9063 switch (value->cl)
9064 {
9065 case rvc_nan:
9066 case rvc_inf:
9067 /* If arg is Inf or NaN and we're logb, return it. */
9068 if (TREE_CODE (rettype) == REAL_TYPE)
9069 return fold_convert_loc (loc, rettype, arg);
9070 /* Fall through... */
9071 case rvc_zero:
9072 /* Zero may set errno and/or raise an exception for logb, also
9073 for ilogb we don't know FP_ILOGB0. */
9074 return NULL_TREE;
9075 case rvc_normal:
9076 /* For normal numbers, proceed iff radix == 2. In GCC,
9077 normalized significands are in the range [0.5, 1.0). We
9078 want the exponent as if they were [1.0, 2.0) so get the
9079 exponent and subtract 1. */
9080 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9081 return fold_convert_loc (loc, rettype,
9082 build_int_cst (NULL_TREE,
9083 REAL_EXP (value)-1));
9084 break;
9085 }
9086 }
9087
9088 return NULL_TREE;
9089 }
9090
9091 /* Fold a call to builtin significand, if radix == 2. */
9092
9093 static tree
9094 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9095 {
9096 if (! validate_arg (arg, REAL_TYPE))
9097 return NULL_TREE;
9098
9099 STRIP_NOPS (arg);
9100
9101 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9102 {
9103 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9104
9105 switch (value->cl)
9106 {
9107 case rvc_zero:
9108 case rvc_nan:
9109 case rvc_inf:
9110 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9111 return fold_convert_loc (loc, rettype, arg);
9112 case rvc_normal:
9113 /* For normal numbers, proceed iff radix == 2. */
9114 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9115 {
9116 REAL_VALUE_TYPE result = *value;
9117 /* In GCC, normalized significands are in the range [0.5,
9118 1.0). We want them to be [1.0, 2.0) so set the
9119 exponent to 1. */
9120 SET_REAL_EXP (&result, 1);
9121 return build_real (rettype, result);
9122 }
9123 break;
9124 }
9125 }
9126
9127 return NULL_TREE;
9128 }
9129
9130 /* Fold a call to builtin frexp, we can assume the base is 2. */
9131
9132 static tree
9133 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9134 {
9135 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9136 return NULL_TREE;
9137
9138 STRIP_NOPS (arg0);
9139
9140 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9141 return NULL_TREE;
9142
9143 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9144
9145 /* Proceed if a valid pointer type was passed in. */
9146 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9147 {
9148 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9149 tree frac, exp;
9150
9151 switch (value->cl)
9152 {
9153 case rvc_zero:
9154 /* For +-0, return (*exp = 0, +-0). */
9155 exp = integer_zero_node;
9156 frac = arg0;
9157 break;
9158 case rvc_nan:
9159 case rvc_inf:
9160 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9161 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9162 case rvc_normal:
9163 {
9164 /* Since the frexp function always expects base 2, and in
9165 GCC normalized significands are already in the range
9166 [0.5, 1.0), we have exactly what frexp wants. */
9167 REAL_VALUE_TYPE frac_rvt = *value;
9168 SET_REAL_EXP (&frac_rvt, 0);
9169 frac = build_real (rettype, frac_rvt);
9170 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9171 }
9172 break;
9173 default:
9174 gcc_unreachable ();
9175 }
9176
9177 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9178 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9179 TREE_SIDE_EFFECTS (arg1) = 1;
9180 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9181 }
9182
9183 return NULL_TREE;
9184 }
9185
9186 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9187 then we can assume the base is two. If it's false, then we have to
9188 check the mode of the TYPE parameter in certain cases. */
9189
9190 static tree
9191 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9192 tree type, bool ldexp)
9193 {
9194 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9195 {
9196 STRIP_NOPS (arg0);
9197 STRIP_NOPS (arg1);
9198
9199 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9200 if (real_zerop (arg0) || integer_zerop (arg1)
9201 || (TREE_CODE (arg0) == REAL_CST
9202 && !real_isfinite (&TREE_REAL_CST (arg0))))
9203 return omit_one_operand_loc (loc, type, arg0, arg1);
9204
9205 /* If both arguments are constant, then try to evaluate it. */
9206 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9207 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9208 && host_integerp (arg1, 0))
9209 {
9210 /* Bound the maximum adjustment to twice the range of the
9211 mode's valid exponents. Use abs to ensure the range is
9212 positive as a sanity check. */
9213 const long max_exp_adj = 2 *
9214 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9215 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9216
9217 /* Get the user-requested adjustment. */
9218 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9219
9220 /* The requested adjustment must be inside this range. This
9221 is a preliminary cap to avoid things like overflow, we
9222 may still fail to compute the result for other reasons. */
9223 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9224 {
9225 REAL_VALUE_TYPE initial_result;
9226
9227 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9228
9229 /* Ensure we didn't overflow. */
9230 if (! real_isinf (&initial_result))
9231 {
9232 const REAL_VALUE_TYPE trunc_result
9233 = real_value_truncate (TYPE_MODE (type), initial_result);
9234
9235 /* Only proceed if the target mode can hold the
9236 resulting value. */
9237 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9238 return build_real (type, trunc_result);
9239 }
9240 }
9241 }
9242 }
9243
9244 return NULL_TREE;
9245 }
9246
9247 /* Fold a call to builtin modf. */
9248
9249 static tree
9250 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9251 {
9252 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9253 return NULL_TREE;
9254
9255 STRIP_NOPS (arg0);
9256
9257 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9258 return NULL_TREE;
9259
9260 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9261
9262 /* Proceed if a valid pointer type was passed in. */
9263 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9264 {
9265 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9266 REAL_VALUE_TYPE trunc, frac;
9267
9268 switch (value->cl)
9269 {
9270 case rvc_nan:
9271 case rvc_zero:
9272 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9273 trunc = frac = *value;
9274 break;
9275 case rvc_inf:
9276 /* For +-Inf, return (*arg1 = arg0, +-0). */
9277 frac = dconst0;
9278 frac.sign = value->sign;
9279 trunc = *value;
9280 break;
9281 case rvc_normal:
9282 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9283 real_trunc (&trunc, VOIDmode, value);
9284 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9285 /* If the original number was negative and already
9286 integral, then the fractional part is -0.0. */
9287 if (value->sign && frac.cl == rvc_zero)
9288 frac.sign = value->sign;
9289 break;
9290 }
9291
9292 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9293 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9294 build_real (rettype, trunc));
9295 TREE_SIDE_EFFECTS (arg1) = 1;
9296 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9297 build_real (rettype, frac));
9298 }
9299
9300 return NULL_TREE;
9301 }
9302
9303 /* Given a location LOC, an interclass builtin function decl FNDECL
9304 and its single argument ARG, return an folded expression computing
9305 the same, or NULL_TREE if we either couldn't or didn't want to fold
9306 (the latter happen if there's an RTL instruction available). */
9307
9308 static tree
9309 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9310 {
9311 enum machine_mode mode;
9312
9313 if (!validate_arg (arg, REAL_TYPE))
9314 return NULL_TREE;
9315
9316 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9317 return NULL_TREE;
9318
9319 mode = TYPE_MODE (TREE_TYPE (arg));
9320
9321 /* If there is no optab, try generic code. */
9322 switch (DECL_FUNCTION_CODE (fndecl))
9323 {
9324 tree result;
9325
9326 CASE_FLT_FN (BUILT_IN_ISINF):
9327 {
9328 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9329 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9330 tree const type = TREE_TYPE (arg);
9331 REAL_VALUE_TYPE r;
9332 char buf[128];
9333
9334 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9335 real_from_string (&r, buf);
9336 result = build_call_expr (isgr_fn, 2,
9337 fold_build1_loc (loc, ABS_EXPR, type, arg),
9338 build_real (type, r));
9339 return result;
9340 }
9341 CASE_FLT_FN (BUILT_IN_FINITE):
9342 case BUILT_IN_ISFINITE:
9343 {
9344 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9345 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9346 tree const type = TREE_TYPE (arg);
9347 REAL_VALUE_TYPE r;
9348 char buf[128];
9349
9350 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9351 real_from_string (&r, buf);
9352 result = build_call_expr (isle_fn, 2,
9353 fold_build1_loc (loc, ABS_EXPR, type, arg),
9354 build_real (type, r));
9355 /*result = fold_build2_loc (loc, UNGT_EXPR,
9356 TREE_TYPE (TREE_TYPE (fndecl)),
9357 fold_build1_loc (loc, ABS_EXPR, type, arg),
9358 build_real (type, r));
9359 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9360 TREE_TYPE (TREE_TYPE (fndecl)),
9361 result);*/
9362 return result;
9363 }
9364 case BUILT_IN_ISNORMAL:
9365 {
9366 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9367 islessequal(fabs(x),DBL_MAX). */
9368 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9369 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9370 tree const type = TREE_TYPE (arg);
9371 REAL_VALUE_TYPE rmax, rmin;
9372 char buf[128];
9373
9374 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9375 real_from_string (&rmax, buf);
9376 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9377 real_from_string (&rmin, buf);
9378 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9379 result = build_call_expr (isle_fn, 2, arg,
9380 build_real (type, rmax));
9381 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9382 build_call_expr (isge_fn, 2, arg,
9383 build_real (type, rmin)));
9384 return result;
9385 }
9386 default:
9387 break;
9388 }
9389
9390 return NULL_TREE;
9391 }
9392
9393 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9394 ARG is the argument for the call. */
9395
9396 static tree
9397 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9398 {
9399 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9400 REAL_VALUE_TYPE r;
9401
9402 if (!validate_arg (arg, REAL_TYPE))
9403 return NULL_TREE;
9404
9405 switch (builtin_index)
9406 {
9407 case BUILT_IN_ISINF:
9408 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9409 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9410
9411 if (TREE_CODE (arg) == REAL_CST)
9412 {
9413 r = TREE_REAL_CST (arg);
9414 if (real_isinf (&r))
9415 return real_compare (GT_EXPR, &r, &dconst0)
9416 ? integer_one_node : integer_minus_one_node;
9417 else
9418 return integer_zero_node;
9419 }
9420
9421 return NULL_TREE;
9422
9423 case BUILT_IN_ISINF_SIGN:
9424 {
9425 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9426 /* In a boolean context, GCC will fold the inner COND_EXPR to
9427 1. So e.g. "if (isinf_sign(x))" would be folded to just
9428 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9429 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9430 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9431 tree tmp = NULL_TREE;
9432
9433 arg = builtin_save_expr (arg);
9434
9435 if (signbit_fn && isinf_fn)
9436 {
9437 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9438 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9439
9440 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9441 signbit_call, integer_zero_node);
9442 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9443 isinf_call, integer_zero_node);
9444
9445 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9446 integer_minus_one_node, integer_one_node);
9447 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9448 isinf_call, tmp,
9449 integer_zero_node);
9450 }
9451
9452 return tmp;
9453 }
9454
9455 case BUILT_IN_ISFINITE:
9456 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9457 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9458 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9459
9460 if (TREE_CODE (arg) == REAL_CST)
9461 {
9462 r = TREE_REAL_CST (arg);
9463 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9464 }
9465
9466 return NULL_TREE;
9467
9468 case BUILT_IN_ISNAN:
9469 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9470 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9471
9472 if (TREE_CODE (arg) == REAL_CST)
9473 {
9474 r = TREE_REAL_CST (arg);
9475 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9476 }
9477
9478 arg = builtin_save_expr (arg);
9479 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9480
9481 default:
9482 gcc_unreachable ();
9483 }
9484 }
9485
9486 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9487 This builtin will generate code to return the appropriate floating
9488 point classification depending on the value of the floating point
9489 number passed in. The possible return values must be supplied as
9490 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9491 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9492 one floating point argument which is "type generic". */
9493
9494 static tree
9495 fold_builtin_fpclassify (location_t loc, tree exp)
9496 {
9497 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9498 arg, type, res, tmp;
9499 enum machine_mode mode;
9500 REAL_VALUE_TYPE r;
9501 char buf[128];
9502
9503 /* Verify the required arguments in the original call. */
9504 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9505 INTEGER_TYPE, INTEGER_TYPE,
9506 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9507 return NULL_TREE;
9508
9509 fp_nan = CALL_EXPR_ARG (exp, 0);
9510 fp_infinite = CALL_EXPR_ARG (exp, 1);
9511 fp_normal = CALL_EXPR_ARG (exp, 2);
9512 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9513 fp_zero = CALL_EXPR_ARG (exp, 4);
9514 arg = CALL_EXPR_ARG (exp, 5);
9515 type = TREE_TYPE (arg);
9516 mode = TYPE_MODE (type);
9517 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9518
9519 /* fpclassify(x) ->
9520 isnan(x) ? FP_NAN :
9521 (fabs(x) == Inf ? FP_INFINITE :
9522 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9523 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9524
9525 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9526 build_real (type, dconst0));
9527 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9528 tmp, fp_zero, fp_subnormal);
9529
9530 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9531 real_from_string (&r, buf);
9532 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9533 arg, build_real (type, r));
9534 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9535
9536 if (HONOR_INFINITIES (mode))
9537 {
9538 real_inf (&r);
9539 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9540 build_real (type, r));
9541 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9542 fp_infinite, res);
9543 }
9544
9545 if (HONOR_NANS (mode))
9546 {
9547 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9548 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9549 }
9550
9551 return res;
9552 }
9553
9554 /* Fold a call to an unordered comparison function such as
9555 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9556 being called and ARG0 and ARG1 are the arguments for the call.
9557 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9558 the opposite of the desired result. UNORDERED_CODE is used
9559 for modes that can hold NaNs and ORDERED_CODE is used for
9560 the rest. */
9561
9562 static tree
9563 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9564 enum tree_code unordered_code,
9565 enum tree_code ordered_code)
9566 {
9567 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9568 enum tree_code code;
9569 tree type0, type1;
9570 enum tree_code code0, code1;
9571 tree cmp_type = NULL_TREE;
9572
9573 type0 = TREE_TYPE (arg0);
9574 type1 = TREE_TYPE (arg1);
9575
9576 code0 = TREE_CODE (type0);
9577 code1 = TREE_CODE (type1);
9578
9579 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9580 /* Choose the wider of two real types. */
9581 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9582 ? type0 : type1;
9583 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9584 cmp_type = type0;
9585 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9586 cmp_type = type1;
9587
9588 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9589 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9590
9591 if (unordered_code == UNORDERED_EXPR)
9592 {
9593 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9594 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9595 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9596 }
9597
9598 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9599 : ordered_code;
9600 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9601 fold_build2_loc (loc, code, type, arg0, arg1));
9602 }
9603
9604 /* Fold a call to built-in function FNDECL with 0 arguments.
9605 IGNORE is true if the result of the function call is ignored. This
9606 function returns NULL_TREE if no simplification was possible. */
9607
9608 static tree
9609 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9610 {
9611 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9612 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9613 switch (fcode)
9614 {
9615 CASE_FLT_FN (BUILT_IN_INF):
9616 case BUILT_IN_INFD32:
9617 case BUILT_IN_INFD64:
9618 case BUILT_IN_INFD128:
9619 return fold_builtin_inf (loc, type, true);
9620
9621 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9622 return fold_builtin_inf (loc, type, false);
9623
9624 case BUILT_IN_CLASSIFY_TYPE:
9625 return fold_builtin_classify_type (NULL_TREE);
9626
9627 default:
9628 break;
9629 }
9630 return NULL_TREE;
9631 }
9632
9633 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9634 IGNORE is true if the result of the function call is ignored. This
9635 function returns NULL_TREE if no simplification was possible. */
9636
9637 static tree
9638 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9639 {
9640 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9641 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9642 switch (fcode)
9643 {
9644
9645 case BUILT_IN_CONSTANT_P:
9646 {
9647 tree val = fold_builtin_constant_p (arg0);
9648
9649 /* Gimplification will pull the CALL_EXPR for the builtin out of
9650 an if condition. When not optimizing, we'll not CSE it back.
9651 To avoid link error types of regressions, return false now. */
9652 if (!val && !optimize)
9653 val = integer_zero_node;
9654
9655 return val;
9656 }
9657
9658 case BUILT_IN_CLASSIFY_TYPE:
9659 return fold_builtin_classify_type (arg0);
9660
9661 case BUILT_IN_STRLEN:
9662 return fold_builtin_strlen (loc, arg0);
9663
9664 CASE_FLT_FN (BUILT_IN_FABS):
9665 return fold_builtin_fabs (loc, arg0, type);
9666
9667 case BUILT_IN_ABS:
9668 case BUILT_IN_LABS:
9669 case BUILT_IN_LLABS:
9670 case BUILT_IN_IMAXABS:
9671 return fold_builtin_abs (loc, arg0, type);
9672
9673 CASE_FLT_FN (BUILT_IN_CONJ):
9674 if (validate_arg (arg0, COMPLEX_TYPE)
9675 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9676 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9677 break;
9678
9679 CASE_FLT_FN (BUILT_IN_CREAL):
9680 if (validate_arg (arg0, COMPLEX_TYPE)
9681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9682 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9683 break;
9684
9685 CASE_FLT_FN (BUILT_IN_CIMAG):
9686 if (validate_arg (arg0, COMPLEX_TYPE)
9687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9688 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9689 break;
9690
9691 CASE_FLT_FN (BUILT_IN_CCOS):
9692 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9693
9694 CASE_FLT_FN (BUILT_IN_CCOSH):
9695 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9696
9697 CASE_FLT_FN (BUILT_IN_CSIN):
9698 if (validate_arg (arg0, COMPLEX_TYPE)
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9700 return do_mpc_arg1 (arg0, type, mpc_sin);
9701 break;
9702
9703 CASE_FLT_FN (BUILT_IN_CSINH):
9704 if (validate_arg (arg0, COMPLEX_TYPE)
9705 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9706 return do_mpc_arg1 (arg0, type, mpc_sinh);
9707 break;
9708
9709 CASE_FLT_FN (BUILT_IN_CTAN):
9710 if (validate_arg (arg0, COMPLEX_TYPE)
9711 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9712 return do_mpc_arg1 (arg0, type, mpc_tan);
9713 break;
9714
9715 CASE_FLT_FN (BUILT_IN_CTANH):
9716 if (validate_arg (arg0, COMPLEX_TYPE)
9717 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9718 return do_mpc_arg1 (arg0, type, mpc_tanh);
9719 break;
9720
9721 CASE_FLT_FN (BUILT_IN_CLOG):
9722 if (validate_arg (arg0, COMPLEX_TYPE)
9723 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724 return do_mpc_arg1 (arg0, type, mpc_log);
9725 break;
9726
9727 CASE_FLT_FN (BUILT_IN_CSQRT):
9728 if (validate_arg (arg0, COMPLEX_TYPE)
9729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9731 break;
9732
9733 CASE_FLT_FN (BUILT_IN_CASIN):
9734 if (validate_arg (arg0, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736 return do_mpc_arg1 (arg0, type, mpc_asin);
9737 break;
9738
9739 CASE_FLT_FN (BUILT_IN_CACOS):
9740 if (validate_arg (arg0, COMPLEX_TYPE)
9741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9742 return do_mpc_arg1 (arg0, type, mpc_acos);
9743 break;
9744
9745 CASE_FLT_FN (BUILT_IN_CATAN):
9746 if (validate_arg (arg0, COMPLEX_TYPE)
9747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9748 return do_mpc_arg1 (arg0, type, mpc_atan);
9749 break;
9750
9751 CASE_FLT_FN (BUILT_IN_CASINH):
9752 if (validate_arg (arg0, COMPLEX_TYPE)
9753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9754 return do_mpc_arg1 (arg0, type, mpc_asinh);
9755 break;
9756
9757 CASE_FLT_FN (BUILT_IN_CACOSH):
9758 if (validate_arg (arg0, COMPLEX_TYPE)
9759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9760 return do_mpc_arg1 (arg0, type, mpc_acosh);
9761 break;
9762
9763 CASE_FLT_FN (BUILT_IN_CATANH):
9764 if (validate_arg (arg0, COMPLEX_TYPE)
9765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9766 return do_mpc_arg1 (arg0, type, mpc_atanh);
9767 break;
9768
9769 CASE_FLT_FN (BUILT_IN_CABS):
9770 return fold_builtin_cabs (loc, arg0, type, fndecl);
9771
9772 CASE_FLT_FN (BUILT_IN_CARG):
9773 return fold_builtin_carg (loc, arg0, type);
9774
9775 CASE_FLT_FN (BUILT_IN_SQRT):
9776 return fold_builtin_sqrt (loc, arg0, type);
9777
9778 CASE_FLT_FN (BUILT_IN_CBRT):
9779 return fold_builtin_cbrt (loc, arg0, type);
9780
9781 CASE_FLT_FN (BUILT_IN_ASIN):
9782 if (validate_arg (arg0, REAL_TYPE))
9783 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9784 &dconstm1, &dconst1, true);
9785 break;
9786
9787 CASE_FLT_FN (BUILT_IN_ACOS):
9788 if (validate_arg (arg0, REAL_TYPE))
9789 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9790 &dconstm1, &dconst1, true);
9791 break;
9792
9793 CASE_FLT_FN (BUILT_IN_ATAN):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9796 break;
9797
9798 CASE_FLT_FN (BUILT_IN_ASINH):
9799 if (validate_arg (arg0, REAL_TYPE))
9800 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9801 break;
9802
9803 CASE_FLT_FN (BUILT_IN_ACOSH):
9804 if (validate_arg (arg0, REAL_TYPE))
9805 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9806 &dconst1, NULL, true);
9807 break;
9808
9809 CASE_FLT_FN (BUILT_IN_ATANH):
9810 if (validate_arg (arg0, REAL_TYPE))
9811 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9812 &dconstm1, &dconst1, false);
9813 break;
9814
9815 CASE_FLT_FN (BUILT_IN_SIN):
9816 if (validate_arg (arg0, REAL_TYPE))
9817 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9818 break;
9819
9820 CASE_FLT_FN (BUILT_IN_COS):
9821 return fold_builtin_cos (loc, arg0, type, fndecl);
9822
9823 CASE_FLT_FN (BUILT_IN_TAN):
9824 return fold_builtin_tan (arg0, type);
9825
9826 CASE_FLT_FN (BUILT_IN_CEXP):
9827 return fold_builtin_cexp (loc, arg0, type);
9828
9829 CASE_FLT_FN (BUILT_IN_CEXPI):
9830 if (validate_arg (arg0, REAL_TYPE))
9831 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9832 break;
9833
9834 CASE_FLT_FN (BUILT_IN_SINH):
9835 if (validate_arg (arg0, REAL_TYPE))
9836 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9837 break;
9838
9839 CASE_FLT_FN (BUILT_IN_COSH):
9840 return fold_builtin_cosh (loc, arg0, type, fndecl);
9841
9842 CASE_FLT_FN (BUILT_IN_TANH):
9843 if (validate_arg (arg0, REAL_TYPE))
9844 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9845 break;
9846
9847 CASE_FLT_FN (BUILT_IN_ERF):
9848 if (validate_arg (arg0, REAL_TYPE))
9849 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9850 break;
9851
9852 CASE_FLT_FN (BUILT_IN_ERFC):
9853 if (validate_arg (arg0, REAL_TYPE))
9854 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9855 break;
9856
9857 CASE_FLT_FN (BUILT_IN_TGAMMA):
9858 if (validate_arg (arg0, REAL_TYPE))
9859 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9860 break;
9861
9862 CASE_FLT_FN (BUILT_IN_EXP):
9863 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9864
9865 CASE_FLT_FN (BUILT_IN_EXP2):
9866 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9867
9868 CASE_FLT_FN (BUILT_IN_EXP10):
9869 CASE_FLT_FN (BUILT_IN_POW10):
9870 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9871
9872 CASE_FLT_FN (BUILT_IN_EXPM1):
9873 if (validate_arg (arg0, REAL_TYPE))
9874 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9875 break;
9876
9877 CASE_FLT_FN (BUILT_IN_LOG):
9878 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9879
9880 CASE_FLT_FN (BUILT_IN_LOG2):
9881 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9882
9883 CASE_FLT_FN (BUILT_IN_LOG10):
9884 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9885
9886 CASE_FLT_FN (BUILT_IN_LOG1P):
9887 if (validate_arg (arg0, REAL_TYPE))
9888 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9889 &dconstm1, NULL, false);
9890 break;
9891
9892 CASE_FLT_FN (BUILT_IN_J0):
9893 if (validate_arg (arg0, REAL_TYPE))
9894 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9895 NULL, NULL, 0);
9896 break;
9897
9898 CASE_FLT_FN (BUILT_IN_J1):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9901 NULL, NULL, 0);
9902 break;
9903
9904 CASE_FLT_FN (BUILT_IN_Y0):
9905 if (validate_arg (arg0, REAL_TYPE))
9906 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9907 &dconst0, NULL, false);
9908 break;
9909
9910 CASE_FLT_FN (BUILT_IN_Y1):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9913 &dconst0, NULL, false);
9914 break;
9915
9916 CASE_FLT_FN (BUILT_IN_NAN):
9917 case BUILT_IN_NAND32:
9918 case BUILT_IN_NAND64:
9919 case BUILT_IN_NAND128:
9920 return fold_builtin_nan (arg0, type, true);
9921
9922 CASE_FLT_FN (BUILT_IN_NANS):
9923 return fold_builtin_nan (arg0, type, false);
9924
9925 CASE_FLT_FN (BUILT_IN_FLOOR):
9926 return fold_builtin_floor (loc, fndecl, arg0);
9927
9928 CASE_FLT_FN (BUILT_IN_CEIL):
9929 return fold_builtin_ceil (loc, fndecl, arg0);
9930
9931 CASE_FLT_FN (BUILT_IN_TRUNC):
9932 return fold_builtin_trunc (loc, fndecl, arg0);
9933
9934 CASE_FLT_FN (BUILT_IN_ROUND):
9935 return fold_builtin_round (loc, fndecl, arg0);
9936
9937 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9938 CASE_FLT_FN (BUILT_IN_RINT):
9939 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9940
9941 CASE_FLT_FN (BUILT_IN_LCEIL):
9942 CASE_FLT_FN (BUILT_IN_LLCEIL):
9943 CASE_FLT_FN (BUILT_IN_LFLOOR):
9944 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9945 CASE_FLT_FN (BUILT_IN_LROUND):
9946 CASE_FLT_FN (BUILT_IN_LLROUND):
9947 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9948
9949 CASE_FLT_FN (BUILT_IN_LRINT):
9950 CASE_FLT_FN (BUILT_IN_LLRINT):
9951 return fold_fixed_mathfn (loc, fndecl, arg0);
9952
9953 case BUILT_IN_BSWAP32:
9954 case BUILT_IN_BSWAP64:
9955 return fold_builtin_bswap (fndecl, arg0);
9956
9957 CASE_INT_FN (BUILT_IN_FFS):
9958 CASE_INT_FN (BUILT_IN_CLZ):
9959 CASE_INT_FN (BUILT_IN_CTZ):
9960 CASE_INT_FN (BUILT_IN_POPCOUNT):
9961 CASE_INT_FN (BUILT_IN_PARITY):
9962 return fold_builtin_bitop (fndecl, arg0);
9963
9964 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9965 return fold_builtin_signbit (loc, arg0, type);
9966
9967 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9968 return fold_builtin_significand (loc, arg0, type);
9969
9970 CASE_FLT_FN (BUILT_IN_ILOGB):
9971 CASE_FLT_FN (BUILT_IN_LOGB):
9972 return fold_builtin_logb (loc, arg0, type);
9973
9974 case BUILT_IN_ISASCII:
9975 return fold_builtin_isascii (loc, arg0);
9976
9977 case BUILT_IN_TOASCII:
9978 return fold_builtin_toascii (loc, arg0);
9979
9980 case BUILT_IN_ISDIGIT:
9981 return fold_builtin_isdigit (loc, arg0);
9982
9983 CASE_FLT_FN (BUILT_IN_FINITE):
9984 case BUILT_IN_FINITED32:
9985 case BUILT_IN_FINITED64:
9986 case BUILT_IN_FINITED128:
9987 case BUILT_IN_ISFINITE:
9988 {
9989 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9990 if (ret)
9991 return ret;
9992 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9993 }
9994
9995 CASE_FLT_FN (BUILT_IN_ISINF):
9996 case BUILT_IN_ISINFD32:
9997 case BUILT_IN_ISINFD64:
9998 case BUILT_IN_ISINFD128:
9999 {
10000 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10001 if (ret)
10002 return ret;
10003 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10004 }
10005
10006 case BUILT_IN_ISNORMAL:
10007 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10008
10009 case BUILT_IN_ISINF_SIGN:
10010 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10011
10012 CASE_FLT_FN (BUILT_IN_ISNAN):
10013 case BUILT_IN_ISNAND32:
10014 case BUILT_IN_ISNAND64:
10015 case BUILT_IN_ISNAND128:
10016 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10017
10018 case BUILT_IN_PRINTF:
10019 case BUILT_IN_PRINTF_UNLOCKED:
10020 case BUILT_IN_VPRINTF:
10021 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10022
10023 default:
10024 break;
10025 }
10026
10027 return NULL_TREE;
10028
10029 }
10030
10031 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10032 IGNORE is true if the result of the function call is ignored. This
10033 function returns NULL_TREE if no simplification was possible. */
10034
10035 static tree
10036 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10037 {
10038 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10039 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10040
10041 switch (fcode)
10042 {
10043 CASE_FLT_FN (BUILT_IN_JN):
10044 if (validate_arg (arg0, INTEGER_TYPE)
10045 && validate_arg (arg1, REAL_TYPE))
10046 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_YN):
10050 if (validate_arg (arg0, INTEGER_TYPE)
10051 && validate_arg (arg1, REAL_TYPE))
10052 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10053 &dconst0, false);
10054 break;
10055
10056 CASE_FLT_FN (BUILT_IN_DREM):
10057 CASE_FLT_FN (BUILT_IN_REMAINDER):
10058 if (validate_arg (arg0, REAL_TYPE)
10059 && validate_arg(arg1, REAL_TYPE))
10060 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10061 break;
10062
10063 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10064 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10065 if (validate_arg (arg0, REAL_TYPE)
10066 && validate_arg(arg1, POINTER_TYPE))
10067 return do_mpfr_lgamma_r (arg0, arg1, type);
10068 break;
10069
10070 CASE_FLT_FN (BUILT_IN_ATAN2):
10071 if (validate_arg (arg0, REAL_TYPE)
10072 && validate_arg(arg1, REAL_TYPE))
10073 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10074 break;
10075
10076 CASE_FLT_FN (BUILT_IN_FDIM):
10077 if (validate_arg (arg0, REAL_TYPE)
10078 && validate_arg(arg1, REAL_TYPE))
10079 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10080 break;
10081
10082 CASE_FLT_FN (BUILT_IN_HYPOT):
10083 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10084
10085 CASE_FLT_FN (BUILT_IN_CPOW):
10086 if (validate_arg (arg0, COMPLEX_TYPE)
10087 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10088 && validate_arg (arg1, COMPLEX_TYPE)
10089 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10090 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_LDEXP):
10094 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10095 CASE_FLT_FN (BUILT_IN_SCALBN):
10096 CASE_FLT_FN (BUILT_IN_SCALBLN):
10097 return fold_builtin_load_exponent (loc, arg0, arg1,
10098 type, /*ldexp=*/false);
10099
10100 CASE_FLT_FN (BUILT_IN_FREXP):
10101 return fold_builtin_frexp (loc, arg0, arg1, type);
10102
10103 CASE_FLT_FN (BUILT_IN_MODF):
10104 return fold_builtin_modf (loc, arg0, arg1, type);
10105
10106 case BUILT_IN_BZERO:
10107 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10108
10109 case BUILT_IN_FPUTS:
10110 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10111
10112 case BUILT_IN_FPUTS_UNLOCKED:
10113 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10114
10115 case BUILT_IN_STRSTR:
10116 return fold_builtin_strstr (loc, arg0, arg1, type);
10117
10118 case BUILT_IN_STRCAT:
10119 return fold_builtin_strcat (loc, arg0, arg1);
10120
10121 case BUILT_IN_STRSPN:
10122 return fold_builtin_strspn (loc, arg0, arg1);
10123
10124 case BUILT_IN_STRCSPN:
10125 return fold_builtin_strcspn (loc, arg0, arg1);
10126
10127 case BUILT_IN_STRCHR:
10128 case BUILT_IN_INDEX:
10129 return fold_builtin_strchr (loc, arg0, arg1, type);
10130
10131 case BUILT_IN_STRRCHR:
10132 case BUILT_IN_RINDEX:
10133 return fold_builtin_strrchr (loc, arg0, arg1, type);
10134
10135 case BUILT_IN_STRCPY:
10136 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10137
10138 case BUILT_IN_STPCPY:
10139 if (ignore)
10140 {
10141 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10142 if (!fn)
10143 break;
10144
10145 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10146 }
10147 else
10148 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10149 break;
10150
10151 case BUILT_IN_STRCMP:
10152 return fold_builtin_strcmp (loc, arg0, arg1);
10153
10154 case BUILT_IN_STRPBRK:
10155 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10156
10157 case BUILT_IN_EXPECT:
10158 return fold_builtin_expect (loc, arg0, arg1);
10159
10160 CASE_FLT_FN (BUILT_IN_POW):
10161 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10162
10163 CASE_FLT_FN (BUILT_IN_POWI):
10164 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10165
10166 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10167 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10168
10169 CASE_FLT_FN (BUILT_IN_FMIN):
10170 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10171
10172 CASE_FLT_FN (BUILT_IN_FMAX):
10173 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10174
10175 case BUILT_IN_ISGREATER:
10176 return fold_builtin_unordered_cmp (loc, fndecl,
10177 arg0, arg1, UNLE_EXPR, LE_EXPR);
10178 case BUILT_IN_ISGREATEREQUAL:
10179 return fold_builtin_unordered_cmp (loc, fndecl,
10180 arg0, arg1, UNLT_EXPR, LT_EXPR);
10181 case BUILT_IN_ISLESS:
10182 return fold_builtin_unordered_cmp (loc, fndecl,
10183 arg0, arg1, UNGE_EXPR, GE_EXPR);
10184 case BUILT_IN_ISLESSEQUAL:
10185 return fold_builtin_unordered_cmp (loc, fndecl,
10186 arg0, arg1, UNGT_EXPR, GT_EXPR);
10187 case BUILT_IN_ISLESSGREATER:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10190 case BUILT_IN_ISUNORDERED:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNORDERED_EXPR,
10193 NOP_EXPR);
10194
10195 /* We do the folding for va_start in the expander. */
10196 case BUILT_IN_VA_START:
10197 break;
10198
10199 case BUILT_IN_SPRINTF:
10200 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10201
10202 case BUILT_IN_OBJECT_SIZE:
10203 return fold_builtin_object_size (arg0, arg1);
10204
10205 case BUILT_IN_PRINTF:
10206 case BUILT_IN_PRINTF_UNLOCKED:
10207 case BUILT_IN_VPRINTF:
10208 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10209
10210 case BUILT_IN_PRINTF_CHK:
10211 case BUILT_IN_VPRINTF_CHK:
10212 if (!validate_arg (arg0, INTEGER_TYPE)
10213 || TREE_SIDE_EFFECTS (arg0))
10214 return NULL_TREE;
10215 else
10216 return fold_builtin_printf (loc, fndecl,
10217 arg1, NULL_TREE, ignore, fcode);
10218 break;
10219
10220 case BUILT_IN_FPRINTF:
10221 case BUILT_IN_FPRINTF_UNLOCKED:
10222 case BUILT_IN_VFPRINTF:
10223 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10224 ignore, fcode);
10225
10226 default:
10227 break;
10228 }
10229 return NULL_TREE;
10230 }
10231
10232 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10233 and ARG2. IGNORE is true if the result of the function call is ignored.
10234 This function returns NULL_TREE if no simplification was possible. */
10235
10236 static tree
10237 fold_builtin_3 (location_t loc, tree fndecl,
10238 tree arg0, tree arg1, tree arg2, bool ignore)
10239 {
10240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10242 switch (fcode)
10243 {
10244
10245 CASE_FLT_FN (BUILT_IN_SINCOS):
10246 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10247
10248 CASE_FLT_FN (BUILT_IN_FMA):
10249 if (validate_arg (arg0, REAL_TYPE)
10250 && validate_arg(arg1, REAL_TYPE)
10251 && validate_arg(arg2, REAL_TYPE))
10252 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10253 break;
10254
10255 CASE_FLT_FN (BUILT_IN_REMQUO):
10256 if (validate_arg (arg0, REAL_TYPE)
10257 && validate_arg(arg1, REAL_TYPE)
10258 && validate_arg(arg2, POINTER_TYPE))
10259 return do_mpfr_remquo (arg0, arg1, arg2);
10260 break;
10261
10262 case BUILT_IN_MEMSET:
10263 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10264
10265 case BUILT_IN_BCOPY:
10266 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10267 void_type_node, true, /*endp=*/3);
10268
10269 case BUILT_IN_MEMCPY:
10270 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10271 type, ignore, /*endp=*/0);
10272
10273 case BUILT_IN_MEMPCPY:
10274 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10275 type, ignore, /*endp=*/1);
10276
10277 case BUILT_IN_MEMMOVE:
10278 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10279 type, ignore, /*endp=*/3);
10280
10281 case BUILT_IN_STRNCAT:
10282 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10283
10284 case BUILT_IN_STRNCPY:
10285 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10286
10287 case BUILT_IN_STRNCMP:
10288 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10289
10290 case BUILT_IN_MEMCHR:
10291 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10292
10293 case BUILT_IN_BCMP:
10294 case BUILT_IN_MEMCMP:
10295 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10296
10297 case BUILT_IN_SPRINTF:
10298 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10299
10300 case BUILT_IN_STRCPY_CHK:
10301 case BUILT_IN_STPCPY_CHK:
10302 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10303 ignore, fcode);
10304
10305 case BUILT_IN_STRCAT_CHK:
10306 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10307
10308 case BUILT_IN_PRINTF_CHK:
10309 case BUILT_IN_VPRINTF_CHK:
10310 if (!validate_arg (arg0, INTEGER_TYPE)
10311 || TREE_SIDE_EFFECTS (arg0))
10312 return NULL_TREE;
10313 else
10314 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10315 break;
10316
10317 case BUILT_IN_FPRINTF:
10318 case BUILT_IN_FPRINTF_UNLOCKED:
10319 case BUILT_IN_VFPRINTF:
10320 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10321 ignore, fcode);
10322
10323 case BUILT_IN_FPRINTF_CHK:
10324 case BUILT_IN_VFPRINTF_CHK:
10325 if (!validate_arg (arg1, INTEGER_TYPE)
10326 || TREE_SIDE_EFFECTS (arg1))
10327 return NULL_TREE;
10328 else
10329 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10330 ignore, fcode);
10331
10332 default:
10333 break;
10334 }
10335 return NULL_TREE;
10336 }
10337
10338 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10339 ARG2, and ARG3. IGNORE is true if the result of the function call is
10340 ignored. This function returns NULL_TREE if no simplification was
10341 possible. */
10342
10343 static tree
10344 fold_builtin_4 (location_t loc, tree fndecl,
10345 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10346 {
10347 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10348
10349 switch (fcode)
10350 {
10351 case BUILT_IN_MEMCPY_CHK:
10352 case BUILT_IN_MEMPCPY_CHK:
10353 case BUILT_IN_MEMMOVE_CHK:
10354 case BUILT_IN_MEMSET_CHK:
10355 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10356 NULL_TREE, ignore,
10357 DECL_FUNCTION_CODE (fndecl));
10358
10359 case BUILT_IN_STRNCPY_CHK:
10360 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10361
10362 case BUILT_IN_STRNCAT_CHK:
10363 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10364
10365 case BUILT_IN_FPRINTF_CHK:
10366 case BUILT_IN_VFPRINTF_CHK:
10367 if (!validate_arg (arg1, INTEGER_TYPE)
10368 || TREE_SIDE_EFFECTS (arg1))
10369 return NULL_TREE;
10370 else
10371 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10372 ignore, fcode);
10373 break;
10374
10375 default:
10376 break;
10377 }
10378 return NULL_TREE;
10379 }
10380
10381 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10382 arguments, where NARGS <= 4. IGNORE is true if the result of the
10383 function call is ignored. This function returns NULL_TREE if no
10384 simplification was possible. Note that this only folds builtins with
10385 fixed argument patterns. Foldings that do varargs-to-varargs
10386 transformations, or that match calls with more than 4 arguments,
10387 need to be handled with fold_builtin_varargs instead. */
10388
10389 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10390
10391 static tree
10392 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10393 {
10394 tree ret = NULL_TREE;
10395
10396 switch (nargs)
10397 {
10398 case 0:
10399 ret = fold_builtin_0 (loc, fndecl, ignore);
10400 break;
10401 case 1:
10402 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10403 break;
10404 case 2:
10405 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10406 break;
10407 case 3:
10408 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10409 break;
10410 case 4:
10411 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10412 ignore);
10413 break;
10414 default:
10415 break;
10416 }
10417 if (ret)
10418 {
10419 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10420 SET_EXPR_LOCATION (ret, loc);
10421 TREE_NO_WARNING (ret) = 1;
10422 return ret;
10423 }
10424 return NULL_TREE;
10425 }
10426
10427 /* Builtins with folding operations that operate on "..." arguments
10428 need special handling; we need to store the arguments in a convenient
10429 data structure before attempting any folding. Fortunately there are
10430 only a few builtins that fall into this category. FNDECL is the
10431 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10432 result of the function call is ignored. */
10433
10434 static tree
10435 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10436 bool ignore ATTRIBUTE_UNUSED)
10437 {
10438 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10439 tree ret = NULL_TREE;
10440
10441 switch (fcode)
10442 {
10443 case BUILT_IN_SPRINTF_CHK:
10444 case BUILT_IN_VSPRINTF_CHK:
10445 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10446 break;
10447
10448 case BUILT_IN_SNPRINTF_CHK:
10449 case BUILT_IN_VSNPRINTF_CHK:
10450 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10451 break;
10452
10453 case BUILT_IN_FPCLASSIFY:
10454 ret = fold_builtin_fpclassify (loc, exp);
10455 break;
10456
10457 default:
10458 break;
10459 }
10460 if (ret)
10461 {
10462 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10463 SET_EXPR_LOCATION (ret, loc);
10464 TREE_NO_WARNING (ret) = 1;
10465 return ret;
10466 }
10467 return NULL_TREE;
10468 }
10469
10470 /* Return true if FNDECL shouldn't be folded right now.
10471 If a built-in function has an inline attribute always_inline
10472 wrapper, defer folding it after always_inline functions have
10473 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10474 might not be performed. */
10475
10476 static bool
10477 avoid_folding_inline_builtin (tree fndecl)
10478 {
10479 return (DECL_DECLARED_INLINE_P (fndecl)
10480 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10481 && cfun
10482 && !cfun->always_inline_functions_inlined
10483 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10484 }
10485
10486 /* A wrapper function for builtin folding that prevents warnings for
10487 "statement without effect" and the like, caused by removing the
10488 call node earlier than the warning is generated. */
10489
10490 tree
10491 fold_call_expr (location_t loc, tree exp, bool ignore)
10492 {
10493 tree ret = NULL_TREE;
10494 tree fndecl = get_callee_fndecl (exp);
10495 if (fndecl
10496 && TREE_CODE (fndecl) == FUNCTION_DECL
10497 && DECL_BUILT_IN (fndecl)
10498 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10499 yet. Defer folding until we see all the arguments
10500 (after inlining). */
10501 && !CALL_EXPR_VA_ARG_PACK (exp))
10502 {
10503 int nargs = call_expr_nargs (exp);
10504
10505 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10506 instead last argument is __builtin_va_arg_pack (). Defer folding
10507 even in that case, until arguments are finalized. */
10508 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10509 {
10510 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10511 if (fndecl2
10512 && TREE_CODE (fndecl2) == FUNCTION_DECL
10513 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10514 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10515 return NULL_TREE;
10516 }
10517
10518 if (avoid_folding_inline_builtin (fndecl))
10519 return NULL_TREE;
10520
10521 /* FIXME: Don't use a list in this interface. */
10522 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10523 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10524 else
10525 {
10526 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10527 {
10528 tree *args = CALL_EXPR_ARGP (exp);
10529 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10530 }
10531 if (!ret)
10532 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10533 if (ret)
10534 return ret;
10535 }
10536 }
10537 return NULL_TREE;
10538 }
10539
10540 /* Conveniently construct a function call expression. FNDECL names the
10541 function to be called and ARGLIST is a TREE_LIST of arguments. */
10542
10543 tree
10544 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10545 {
10546 tree fntype = TREE_TYPE (fndecl);
10547 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10548 int n = list_length (arglist);
10549 tree *argarray = (tree *) alloca (n * sizeof (tree));
10550 int i;
10551
10552 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10553 argarray[i] = TREE_VALUE (arglist);
10554 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10555 }
10556
10557 /* Conveniently construct a function call expression. FNDECL names the
10558 function to be called, N is the number of arguments, and the "..."
10559 parameters are the argument expressions. */
10560
10561 tree
10562 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10563 {
10564 va_list ap;
10565 tree fntype = TREE_TYPE (fndecl);
10566 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10567 tree *argarray = (tree *) alloca (n * sizeof (tree));
10568 int i;
10569
10570 va_start (ap, n);
10571 for (i = 0; i < n; i++)
10572 argarray[i] = va_arg (ap, tree);
10573 va_end (ap);
10574 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10575 }
10576
10577 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10578 N arguments are passed in the array ARGARRAY. */
10579
10580 tree
10581 fold_builtin_call_array (location_t loc, tree type,
10582 tree fn,
10583 int n,
10584 tree *argarray)
10585 {
10586 tree ret = NULL_TREE;
10587 int i;
10588 tree exp;
10589
10590 if (TREE_CODE (fn) == ADDR_EXPR)
10591 {
10592 tree fndecl = TREE_OPERAND (fn, 0);
10593 if (TREE_CODE (fndecl) == FUNCTION_DECL
10594 && DECL_BUILT_IN (fndecl))
10595 {
10596 /* If last argument is __builtin_va_arg_pack (), arguments to this
10597 function are not finalized yet. Defer folding until they are. */
10598 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10599 {
10600 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10601 if (fndecl2
10602 && TREE_CODE (fndecl2) == FUNCTION_DECL
10603 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10604 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10605 return build_call_array_loc (loc, type, fn, n, argarray);
10606 }
10607 if (avoid_folding_inline_builtin (fndecl))
10608 return build_call_array_loc (loc, type, fn, n, argarray);
10609 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10610 {
10611 tree arglist = NULL_TREE;
10612 for (i = n - 1; i >= 0; i--)
10613 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10614 ret = targetm.fold_builtin (fndecl, arglist, false);
10615 if (ret)
10616 return ret;
10617 return build_call_array_loc (loc, type, fn, n, argarray);
10618 }
10619 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10620 {
10621 /* First try the transformations that don't require consing up
10622 an exp. */
10623 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10624 if (ret)
10625 return ret;
10626 }
10627
10628 /* If we got this far, we need to build an exp. */
10629 exp = build_call_array_loc (loc, type, fn, n, argarray);
10630 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10631 return ret ? ret : exp;
10632 }
10633 }
10634
10635 return build_call_array_loc (loc, type, fn, n, argarray);
10636 }
10637
10638 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10639 along with N new arguments specified as the "..." parameters. SKIP
10640 is the number of arguments in EXP to be omitted. This function is used
10641 to do varargs-to-varargs transformations. */
10642
10643 static tree
10644 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10645 {
10646 int oldnargs = call_expr_nargs (exp);
10647 int nargs = oldnargs - skip + n;
10648 tree fntype = TREE_TYPE (fndecl);
10649 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10650 tree *buffer;
10651
10652 if (n > 0)
10653 {
10654 int i, j;
10655 va_list ap;
10656
10657 buffer = XALLOCAVEC (tree, nargs);
10658 va_start (ap, n);
10659 for (i = 0; i < n; i++)
10660 buffer[i] = va_arg (ap, tree);
10661 va_end (ap);
10662 for (j = skip; j < oldnargs; j++, i++)
10663 buffer[i] = CALL_EXPR_ARG (exp, j);
10664 }
10665 else
10666 buffer = CALL_EXPR_ARGP (exp) + skip;
10667
10668 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10669 }
10670
10671 /* Validate a single argument ARG against a tree code CODE representing
10672 a type. */
10673
10674 static bool
10675 validate_arg (const_tree arg, enum tree_code code)
10676 {
10677 if (!arg)
10678 return false;
10679 else if (code == POINTER_TYPE)
10680 return POINTER_TYPE_P (TREE_TYPE (arg));
10681 else if (code == INTEGER_TYPE)
10682 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10683 return code == TREE_CODE (TREE_TYPE (arg));
10684 }
10685
10686 /* This function validates the types of a function call argument list
10687 against a specified list of tree_codes. If the last specifier is a 0,
10688 that represents an ellipses, otherwise the last specifier must be a
10689 VOID_TYPE.
10690
10691 This is the GIMPLE version of validate_arglist. Eventually we want to
10692 completely convert builtins.c to work from GIMPLEs and the tree based
10693 validate_arglist will then be removed. */
10694
10695 bool
10696 validate_gimple_arglist (const_gimple call, ...)
10697 {
10698 enum tree_code code;
10699 bool res = 0;
10700 va_list ap;
10701 const_tree arg;
10702 size_t i;
10703
10704 va_start (ap, call);
10705 i = 0;
10706
10707 do
10708 {
10709 code = (enum tree_code) va_arg (ap, int);
10710 switch (code)
10711 {
10712 case 0:
10713 /* This signifies an ellipses, any further arguments are all ok. */
10714 res = true;
10715 goto end;
10716 case VOID_TYPE:
10717 /* This signifies an endlink, if no arguments remain, return
10718 true, otherwise return false. */
10719 res = (i == gimple_call_num_args (call));
10720 goto end;
10721 default:
10722 /* If no parameters remain or the parameter's code does not
10723 match the specified code, return false. Otherwise continue
10724 checking any remaining arguments. */
10725 arg = gimple_call_arg (call, i++);
10726 if (!validate_arg (arg, code))
10727 goto end;
10728 break;
10729 }
10730 }
10731 while (1);
10732
10733 /* We need gotos here since we can only have one VA_CLOSE in a
10734 function. */
10735 end: ;
10736 va_end (ap);
10737
10738 return res;
10739 }
10740
10741 /* This function validates the types of a function call argument list
10742 against a specified list of tree_codes. If the last specifier is a 0,
10743 that represents an ellipses, otherwise the last specifier must be a
10744 VOID_TYPE. */
10745
10746 bool
10747 validate_arglist (const_tree callexpr, ...)
10748 {
10749 enum tree_code code;
10750 bool res = 0;
10751 va_list ap;
10752 const_call_expr_arg_iterator iter;
10753 const_tree arg;
10754
10755 va_start (ap, callexpr);
10756 init_const_call_expr_arg_iterator (callexpr, &iter);
10757
10758 do
10759 {
10760 code = (enum tree_code) va_arg (ap, int);
10761 switch (code)
10762 {
10763 case 0:
10764 /* This signifies an ellipses, any further arguments are all ok. */
10765 res = true;
10766 goto end;
10767 case VOID_TYPE:
10768 /* This signifies an endlink, if no arguments remain, return
10769 true, otherwise return false. */
10770 res = !more_const_call_expr_args_p (&iter);
10771 goto end;
10772 default:
10773 /* If no parameters remain or the parameter's code does not
10774 match the specified code, return false. Otherwise continue
10775 checking any remaining arguments. */
10776 arg = next_const_call_expr_arg (&iter);
10777 if (!validate_arg (arg, code))
10778 goto end;
10779 break;
10780 }
10781 }
10782 while (1);
10783
10784 /* We need gotos here since we can only have one VA_CLOSE in a
10785 function. */
10786 end: ;
10787 va_end (ap);
10788
10789 return res;
10790 }
10791
10792 /* Default target-specific builtin expander that does nothing. */
10793
10794 rtx
10795 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10796 rtx target ATTRIBUTE_UNUSED,
10797 rtx subtarget ATTRIBUTE_UNUSED,
10798 enum machine_mode mode ATTRIBUTE_UNUSED,
10799 int ignore ATTRIBUTE_UNUSED)
10800 {
10801 return NULL_RTX;
10802 }
10803
10804 /* Returns true is EXP represents data that would potentially reside
10805 in a readonly section. */
10806
10807 static bool
10808 readonly_data_expr (tree exp)
10809 {
10810 STRIP_NOPS (exp);
10811
10812 if (TREE_CODE (exp) != ADDR_EXPR)
10813 return false;
10814
10815 exp = get_base_address (TREE_OPERAND (exp, 0));
10816 if (!exp)
10817 return false;
10818
10819 /* Make sure we call decl_readonly_section only for trees it
10820 can handle (since it returns true for everything it doesn't
10821 understand). */
10822 if (TREE_CODE (exp) == STRING_CST
10823 || TREE_CODE (exp) == CONSTRUCTOR
10824 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10825 return decl_readonly_section (exp, 0);
10826 else
10827 return false;
10828 }
10829
10830 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10831 to the call, and TYPE is its return type.
10832
10833 Return NULL_TREE if no simplification was possible, otherwise return the
10834 simplified form of the call as a tree.
10835
10836 The simplified form may be a constant or other expression which
10837 computes the same value, but in a more efficient manner (including
10838 calls to other builtin functions).
10839
10840 The call may contain arguments which need to be evaluated, but
10841 which are not useful to determine the result of the call. In
10842 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10843 COMPOUND_EXPR will be an argument which must be evaluated.
10844 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10845 COMPOUND_EXPR in the chain will contain the tree for the simplified
10846 form of the builtin function call. */
10847
10848 static tree
10849 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10850 {
10851 if (!validate_arg (s1, POINTER_TYPE)
10852 || !validate_arg (s2, POINTER_TYPE))
10853 return NULL_TREE;
10854 else
10855 {
10856 tree fn;
10857 const char *p1, *p2;
10858
10859 p2 = c_getstr (s2);
10860 if (p2 == NULL)
10861 return NULL_TREE;
10862
10863 p1 = c_getstr (s1);
10864 if (p1 != NULL)
10865 {
10866 const char *r = strstr (p1, p2);
10867 tree tem;
10868
10869 if (r == NULL)
10870 return build_int_cst (TREE_TYPE (s1), 0);
10871
10872 /* Return an offset into the constant string argument. */
10873 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10874 s1, size_int (r - p1));
10875 return fold_convert_loc (loc, type, tem);
10876 }
10877
10878 /* The argument is const char *, and the result is char *, so we need
10879 a type conversion here to avoid a warning. */
10880 if (p2[0] == '\0')
10881 return fold_convert_loc (loc, type, s1);
10882
10883 if (p2[1] != '\0')
10884 return NULL_TREE;
10885
10886 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10887 if (!fn)
10888 return NULL_TREE;
10889
10890 /* New argument list transforming strstr(s1, s2) to
10891 strchr(s1, s2[0]). */
10892 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10893 }
10894 }
10895
10896 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10897 the call, and TYPE is its return type.
10898
10899 Return NULL_TREE if no simplification was possible, otherwise return the
10900 simplified form of the call as a tree.
10901
10902 The simplified form may be a constant or other expression which
10903 computes the same value, but in a more efficient manner (including
10904 calls to other builtin functions).
10905
10906 The call may contain arguments which need to be evaluated, but
10907 which are not useful to determine the result of the call. In
10908 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10909 COMPOUND_EXPR will be an argument which must be evaluated.
10910 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10911 COMPOUND_EXPR in the chain will contain the tree for the simplified
10912 form of the builtin function call. */
10913
10914 static tree
10915 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10916 {
10917 if (!validate_arg (s1, POINTER_TYPE)
10918 || !validate_arg (s2, INTEGER_TYPE))
10919 return NULL_TREE;
10920 else
10921 {
10922 const char *p1;
10923
10924 if (TREE_CODE (s2) != INTEGER_CST)
10925 return NULL_TREE;
10926
10927 p1 = c_getstr (s1);
10928 if (p1 != NULL)
10929 {
10930 char c;
10931 const char *r;
10932 tree tem;
10933
10934 if (target_char_cast (s2, &c))
10935 return NULL_TREE;
10936
10937 r = strchr (p1, c);
10938
10939 if (r == NULL)
10940 return build_int_cst (TREE_TYPE (s1), 0);
10941
10942 /* Return an offset into the constant string argument. */
10943 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10944 s1, size_int (r - p1));
10945 return fold_convert_loc (loc, type, tem);
10946 }
10947 return NULL_TREE;
10948 }
10949 }
10950
10951 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10952 the call, and TYPE is its return type.
10953
10954 Return NULL_TREE if no simplification was possible, otherwise return the
10955 simplified form of the call as a tree.
10956
10957 The simplified form may be a constant or other expression which
10958 computes the same value, but in a more efficient manner (including
10959 calls to other builtin functions).
10960
10961 The call may contain arguments which need to be evaluated, but
10962 which are not useful to determine the result of the call. In
10963 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10964 COMPOUND_EXPR will be an argument which must be evaluated.
10965 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10966 COMPOUND_EXPR in the chain will contain the tree for the simplified
10967 form of the builtin function call. */
10968
10969 static tree
10970 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10971 {
10972 if (!validate_arg (s1, POINTER_TYPE)
10973 || !validate_arg (s2, INTEGER_TYPE))
10974 return NULL_TREE;
10975 else
10976 {
10977 tree fn;
10978 const char *p1;
10979
10980 if (TREE_CODE (s2) != INTEGER_CST)
10981 return NULL_TREE;
10982
10983 p1 = c_getstr (s1);
10984 if (p1 != NULL)
10985 {
10986 char c;
10987 const char *r;
10988 tree tem;
10989
10990 if (target_char_cast (s2, &c))
10991 return NULL_TREE;
10992
10993 r = strrchr (p1, c);
10994
10995 if (r == NULL)
10996 return build_int_cst (TREE_TYPE (s1), 0);
10997
10998 /* Return an offset into the constant string argument. */
10999 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11000 s1, size_int (r - p1));
11001 return fold_convert_loc (loc, type, tem);
11002 }
11003
11004 if (! integer_zerop (s2))
11005 return NULL_TREE;
11006
11007 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11008 if (!fn)
11009 return NULL_TREE;
11010
11011 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11012 return build_call_expr_loc (loc, fn, 2, s1, s2);
11013 }
11014 }
11015
11016 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11017 to the call, and TYPE is its return type.
11018
11019 Return NULL_TREE if no simplification was possible, otherwise return the
11020 simplified form of the call as a tree.
11021
11022 The simplified form may be a constant or other expression which
11023 computes the same value, but in a more efficient manner (including
11024 calls to other builtin functions).
11025
11026 The call may contain arguments which need to be evaluated, but
11027 which are not useful to determine the result of the call. In
11028 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11029 COMPOUND_EXPR will be an argument which must be evaluated.
11030 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11031 COMPOUND_EXPR in the chain will contain the tree for the simplified
11032 form of the builtin function call. */
11033
11034 static tree
11035 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11036 {
11037 if (!validate_arg (s1, POINTER_TYPE)
11038 || !validate_arg (s2, POINTER_TYPE))
11039 return NULL_TREE;
11040 else
11041 {
11042 tree fn;
11043 const char *p1, *p2;
11044
11045 p2 = c_getstr (s2);
11046 if (p2 == NULL)
11047 return NULL_TREE;
11048
11049 p1 = c_getstr (s1);
11050 if (p1 != NULL)
11051 {
11052 const char *r = strpbrk (p1, p2);
11053 tree tem;
11054
11055 if (r == NULL)
11056 return build_int_cst (TREE_TYPE (s1), 0);
11057
11058 /* Return an offset into the constant string argument. */
11059 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11060 s1, size_int (r - p1));
11061 return fold_convert_loc (loc, type, tem);
11062 }
11063
11064 if (p2[0] == '\0')
11065 /* strpbrk(x, "") == NULL.
11066 Evaluate and ignore s1 in case it had side-effects. */
11067 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11068
11069 if (p2[1] != '\0')
11070 return NULL_TREE; /* Really call strpbrk. */
11071
11072 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11073 if (!fn)
11074 return NULL_TREE;
11075
11076 /* New argument list transforming strpbrk(s1, s2) to
11077 strchr(s1, s2[0]). */
11078 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11079 }
11080 }
11081
11082 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11083 to the call.
11084
11085 Return NULL_TREE if no simplification was possible, otherwise return the
11086 simplified form of the call as a tree.
11087
11088 The simplified form may be a constant or other expression which
11089 computes the same value, but in a more efficient manner (including
11090 calls to other builtin functions).
11091
11092 The call may contain arguments which need to be evaluated, but
11093 which are not useful to determine the result of the call. In
11094 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11095 COMPOUND_EXPR will be an argument which must be evaluated.
11096 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11097 COMPOUND_EXPR in the chain will contain the tree for the simplified
11098 form of the builtin function call. */
11099
11100 static tree
11101 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11102 {
11103 if (!validate_arg (dst, POINTER_TYPE)
11104 || !validate_arg (src, POINTER_TYPE))
11105 return NULL_TREE;
11106 else
11107 {
11108 const char *p = c_getstr (src);
11109
11110 /* If the string length is zero, return the dst parameter. */
11111 if (p && *p == '\0')
11112 return dst;
11113
11114 if (optimize_insn_for_speed_p ())
11115 {
11116 /* See if we can store by pieces into (dst + strlen(dst)). */
11117 tree newdst, call;
11118 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11119 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11120
11121 if (!strlen_fn || !strcpy_fn)
11122 return NULL_TREE;
11123
11124 /* If we don't have a movstr we don't want to emit an strcpy
11125 call. We have to do that if the length of the source string
11126 isn't computable (in that case we can use memcpy probably
11127 later expanding to a sequence of mov instructions). If we
11128 have movstr instructions we can emit strcpy calls. */
11129 if (!HAVE_movstr)
11130 {
11131 tree len = c_strlen (src, 1);
11132 if (! len || TREE_SIDE_EFFECTS (len))
11133 return NULL_TREE;
11134 }
11135
11136 /* Stabilize the argument list. */
11137 dst = builtin_save_expr (dst);
11138
11139 /* Create strlen (dst). */
11140 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11141 /* Create (dst p+ strlen (dst)). */
11142
11143 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11144 TREE_TYPE (dst), dst, newdst);
11145 newdst = builtin_save_expr (newdst);
11146
11147 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11148 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11149 }
11150 return NULL_TREE;
11151 }
11152 }
11153
11154 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11155 arguments to the call.
11156
11157 Return NULL_TREE if no simplification was possible, otherwise return the
11158 simplified form of the call as a tree.
11159
11160 The simplified form may be a constant or other expression which
11161 computes the same value, but in a more efficient manner (including
11162 calls to other builtin functions).
11163
11164 The call may contain arguments which need to be evaluated, but
11165 which are not useful to determine the result of the call. In
11166 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11167 COMPOUND_EXPR will be an argument which must be evaluated.
11168 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11169 COMPOUND_EXPR in the chain will contain the tree for the simplified
11170 form of the builtin function call. */
11171
11172 static tree
11173 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11174 {
11175 if (!validate_arg (dst, POINTER_TYPE)
11176 || !validate_arg (src, POINTER_TYPE)
11177 || !validate_arg (len, INTEGER_TYPE))
11178 return NULL_TREE;
11179 else
11180 {
11181 const char *p = c_getstr (src);
11182
11183 /* If the requested length is zero, or the src parameter string
11184 length is zero, return the dst parameter. */
11185 if (integer_zerop (len) || (p && *p == '\0'))
11186 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11187
11188 /* If the requested len is greater than or equal to the string
11189 length, call strcat. */
11190 if (TREE_CODE (len) == INTEGER_CST && p
11191 && compare_tree_int (len, strlen (p)) >= 0)
11192 {
11193 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11194
11195 /* If the replacement _DECL isn't initialized, don't do the
11196 transformation. */
11197 if (!fn)
11198 return NULL_TREE;
11199
11200 return build_call_expr_loc (loc, fn, 2, dst, src);
11201 }
11202 return NULL_TREE;
11203 }
11204 }
11205
11206 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11207 to the call.
11208
11209 Return NULL_TREE if no simplification was possible, otherwise return the
11210 simplified form of the call as a tree.
11211
11212 The simplified form may be a constant or other expression which
11213 computes the same value, but in a more efficient manner (including
11214 calls to other builtin functions).
11215
11216 The call may contain arguments which need to be evaluated, but
11217 which are not useful to determine the result of the call. In
11218 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11219 COMPOUND_EXPR will be an argument which must be evaluated.
11220 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11221 COMPOUND_EXPR in the chain will contain the tree for the simplified
11222 form of the builtin function call. */
11223
11224 static tree
11225 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11226 {
11227 if (!validate_arg (s1, POINTER_TYPE)
11228 || !validate_arg (s2, POINTER_TYPE))
11229 return NULL_TREE;
11230 else
11231 {
11232 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11233
11234 /* If both arguments are constants, evaluate at compile-time. */
11235 if (p1 && p2)
11236 {
11237 const size_t r = strspn (p1, p2);
11238 return size_int (r);
11239 }
11240
11241 /* If either argument is "", return NULL_TREE. */
11242 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11243 /* Evaluate and ignore both arguments in case either one has
11244 side-effects. */
11245 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11246 s1, s2);
11247 return NULL_TREE;
11248 }
11249 }
11250
11251 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11252 to the call.
11253
11254 Return NULL_TREE if no simplification was possible, otherwise return the
11255 simplified form of the call as a tree.
11256
11257 The simplified form may be a constant or other expression which
11258 computes the same value, but in a more efficient manner (including
11259 calls to other builtin functions).
11260
11261 The call may contain arguments which need to be evaluated, but
11262 which are not useful to determine the result of the call. In
11263 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11264 COMPOUND_EXPR will be an argument which must be evaluated.
11265 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11266 COMPOUND_EXPR in the chain will contain the tree for the simplified
11267 form of the builtin function call. */
11268
11269 static tree
11270 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11271 {
11272 if (!validate_arg (s1, POINTER_TYPE)
11273 || !validate_arg (s2, POINTER_TYPE))
11274 return NULL_TREE;
11275 else
11276 {
11277 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11278
11279 /* If both arguments are constants, evaluate at compile-time. */
11280 if (p1 && p2)
11281 {
11282 const size_t r = strcspn (p1, p2);
11283 return size_int (r);
11284 }
11285
11286 /* If the first argument is "", return NULL_TREE. */
11287 if (p1 && *p1 == '\0')
11288 {
11289 /* Evaluate and ignore argument s2 in case it has
11290 side-effects. */
11291 return omit_one_operand_loc (loc, size_type_node,
11292 size_zero_node, s2);
11293 }
11294
11295 /* If the second argument is "", return __builtin_strlen(s1). */
11296 if (p2 && *p2 == '\0')
11297 {
11298 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11299
11300 /* If the replacement _DECL isn't initialized, don't do the
11301 transformation. */
11302 if (!fn)
11303 return NULL_TREE;
11304
11305 return build_call_expr_loc (loc, fn, 1, s1);
11306 }
11307 return NULL_TREE;
11308 }
11309 }
11310
11311 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11312 to the call. IGNORE is true if the value returned
11313 by the builtin will be ignored. UNLOCKED is true is true if this
11314 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11315 the known length of the string. Return NULL_TREE if no simplification
11316 was possible. */
11317
11318 tree
11319 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11320 bool ignore, bool unlocked, tree len)
11321 {
11322 /* If we're using an unlocked function, assume the other unlocked
11323 functions exist explicitly. */
11324 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11325 : implicit_built_in_decls[BUILT_IN_FPUTC];
11326 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11327 : implicit_built_in_decls[BUILT_IN_FWRITE];
11328
11329 /* If the return value is used, don't do the transformation. */
11330 if (!ignore)
11331 return NULL_TREE;
11332
11333 /* Verify the arguments in the original call. */
11334 if (!validate_arg (arg0, POINTER_TYPE)
11335 || !validate_arg (arg1, POINTER_TYPE))
11336 return NULL_TREE;
11337
11338 if (! len)
11339 len = c_strlen (arg0, 0);
11340
11341 /* Get the length of the string passed to fputs. If the length
11342 can't be determined, punt. */
11343 if (!len
11344 || TREE_CODE (len) != INTEGER_CST)
11345 return NULL_TREE;
11346
11347 switch (compare_tree_int (len, 1))
11348 {
11349 case -1: /* length is 0, delete the call entirely . */
11350 return omit_one_operand_loc (loc, integer_type_node,
11351 integer_zero_node, arg1);;
11352
11353 case 0: /* length is 1, call fputc. */
11354 {
11355 const char *p = c_getstr (arg0);
11356
11357 if (p != NULL)
11358 {
11359 if (fn_fputc)
11360 return build_call_expr_loc (loc, fn_fputc, 2,
11361 build_int_cst (NULL_TREE, p[0]), arg1);
11362 else
11363 return NULL_TREE;
11364 }
11365 }
11366 /* FALLTHROUGH */
11367 case 1: /* length is greater than 1, call fwrite. */
11368 {
11369 /* If optimizing for size keep fputs. */
11370 if (optimize_function_for_size_p (cfun))
11371 return NULL_TREE;
11372 /* New argument list transforming fputs(string, stream) to
11373 fwrite(string, 1, len, stream). */
11374 if (fn_fwrite)
11375 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11376 size_one_node, len, arg1);
11377 else
11378 return NULL_TREE;
11379 }
11380 default:
11381 gcc_unreachable ();
11382 }
11383 return NULL_TREE;
11384 }
11385
11386 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11387 produced. False otherwise. This is done so that we don't output the error
11388 or warning twice or three times. */
11389
11390 bool
11391 fold_builtin_next_arg (tree exp, bool va_start_p)
11392 {
11393 tree fntype = TREE_TYPE (current_function_decl);
11394 int nargs = call_expr_nargs (exp);
11395 tree arg;
11396
11397 if (TYPE_ARG_TYPES (fntype) == 0
11398 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11399 == void_type_node))
11400 {
11401 error ("%<va_start%> used in function with fixed args");
11402 return true;
11403 }
11404
11405 if (va_start_p)
11406 {
11407 if (va_start_p && (nargs != 2))
11408 {
11409 error ("wrong number of arguments to function %<va_start%>");
11410 return true;
11411 }
11412 arg = CALL_EXPR_ARG (exp, 1);
11413 }
11414 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11415 when we checked the arguments and if needed issued a warning. */
11416 else
11417 {
11418 if (nargs == 0)
11419 {
11420 /* Evidently an out of date version of <stdarg.h>; can't validate
11421 va_start's second argument, but can still work as intended. */
11422 warning (0, "%<__builtin_next_arg%> called without an argument");
11423 return true;
11424 }
11425 else if (nargs > 1)
11426 {
11427 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11428 return true;
11429 }
11430 arg = CALL_EXPR_ARG (exp, 0);
11431 }
11432
11433 if (TREE_CODE (arg) == SSA_NAME)
11434 arg = SSA_NAME_VAR (arg);
11435
11436 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11437 or __builtin_next_arg (0) the first time we see it, after checking
11438 the arguments and if needed issuing a warning. */
11439 if (!integer_zerop (arg))
11440 {
11441 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11442
11443 /* Strip off all nops for the sake of the comparison. This
11444 is not quite the same as STRIP_NOPS. It does more.
11445 We must also strip off INDIRECT_EXPR for C++ reference
11446 parameters. */
11447 while (CONVERT_EXPR_P (arg)
11448 || TREE_CODE (arg) == INDIRECT_REF)
11449 arg = TREE_OPERAND (arg, 0);
11450 if (arg != last_parm)
11451 {
11452 /* FIXME: Sometimes with the tree optimizers we can get the
11453 not the last argument even though the user used the last
11454 argument. We just warn and set the arg to be the last
11455 argument so that we will get wrong-code because of
11456 it. */
11457 warning (0, "second parameter of %<va_start%> not last named argument");
11458 }
11459
11460 /* Undefined by C99 7.15.1.4p4 (va_start):
11461 "If the parameter parmN is declared with the register storage
11462 class, with a function or array type, or with a type that is
11463 not compatible with the type that results after application of
11464 the default argument promotions, the behavior is undefined."
11465 */
11466 else if (DECL_REGISTER (arg))
11467 warning (0, "undefined behaviour when second parameter of "
11468 "%<va_start%> is declared with %<register%> storage");
11469
11470 /* We want to verify the second parameter just once before the tree
11471 optimizers are run and then avoid keeping it in the tree,
11472 as otherwise we could warn even for correct code like:
11473 void foo (int i, ...)
11474 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11475 if (va_start_p)
11476 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11477 else
11478 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11479 }
11480 return false;
11481 }
11482
11483
11484 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11485 ORIG may be null if this is a 2-argument call. We don't attempt to
11486 simplify calls with more than 3 arguments.
11487
11488 Return NULL_TREE if no simplification was possible, otherwise return the
11489 simplified form of the call as a tree. If IGNORED is true, it means that
11490 the caller does not use the returned value of the function. */
11491
11492 static tree
11493 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11494 tree orig, int ignored)
11495 {
11496 tree call, retval;
11497 const char *fmt_str = NULL;
11498
11499 /* Verify the required arguments in the original call. We deal with two
11500 types of sprintf() calls: 'sprintf (str, fmt)' and
11501 'sprintf (dest, "%s", orig)'. */
11502 if (!validate_arg (dest, POINTER_TYPE)
11503 || !validate_arg (fmt, POINTER_TYPE))
11504 return NULL_TREE;
11505 if (orig && !validate_arg (orig, POINTER_TYPE))
11506 return NULL_TREE;
11507
11508 /* Check whether the format is a literal string constant. */
11509 fmt_str = c_getstr (fmt);
11510 if (fmt_str == NULL)
11511 return NULL_TREE;
11512
11513 call = NULL_TREE;
11514 retval = NULL_TREE;
11515
11516 if (!init_target_chars ())
11517 return NULL_TREE;
11518
11519 /* If the format doesn't contain % args or %%, use strcpy. */
11520 if (strchr (fmt_str, target_percent) == NULL)
11521 {
11522 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11523
11524 if (!fn)
11525 return NULL_TREE;
11526
11527 /* Don't optimize sprintf (buf, "abc", ptr++). */
11528 if (orig)
11529 return NULL_TREE;
11530
11531 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11532 'format' is known to contain no % formats. */
11533 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11534 if (!ignored)
11535 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11536 }
11537
11538 /* If the format is "%s", use strcpy if the result isn't used. */
11539 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11540 {
11541 tree fn;
11542 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11543
11544 if (!fn)
11545 return NULL_TREE;
11546
11547 /* Don't crash on sprintf (str1, "%s"). */
11548 if (!orig)
11549 return NULL_TREE;
11550
11551 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11552 if (!ignored)
11553 {
11554 retval = c_strlen (orig, 1);
11555 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11556 return NULL_TREE;
11557 }
11558 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11559 }
11560
11561 if (call && retval)
11562 {
11563 retval = fold_convert_loc
11564 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11565 retval);
11566 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11567 }
11568 else
11569 return call;
11570 }
11571
11572 /* Expand a call EXP to __builtin_object_size. */
11573
11574 rtx
11575 expand_builtin_object_size (tree exp)
11576 {
11577 tree ost;
11578 int object_size_type;
11579 tree fndecl = get_callee_fndecl (exp);
11580
11581 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11582 {
11583 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11584 exp, fndecl);
11585 expand_builtin_trap ();
11586 return const0_rtx;
11587 }
11588
11589 ost = CALL_EXPR_ARG (exp, 1);
11590 STRIP_NOPS (ost);
11591
11592 if (TREE_CODE (ost) != INTEGER_CST
11593 || tree_int_cst_sgn (ost) < 0
11594 || compare_tree_int (ost, 3) > 0)
11595 {
11596 error ("%Klast argument of %D is not integer constant between 0 and 3",
11597 exp, fndecl);
11598 expand_builtin_trap ();
11599 return const0_rtx;
11600 }
11601
11602 object_size_type = tree_low_cst (ost, 0);
11603
11604 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11605 }
11606
11607 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11608 FCODE is the BUILT_IN_* to use.
11609 Return NULL_RTX if we failed; the caller should emit a normal call,
11610 otherwise try to get the result in TARGET, if convenient (and in
11611 mode MODE if that's convenient). */
11612
11613 static rtx
11614 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11615 enum built_in_function fcode)
11616 {
11617 tree dest, src, len, size;
11618
11619 if (!validate_arglist (exp,
11620 POINTER_TYPE,
11621 fcode == BUILT_IN_MEMSET_CHK
11622 ? INTEGER_TYPE : POINTER_TYPE,
11623 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11624 return NULL_RTX;
11625
11626 dest = CALL_EXPR_ARG (exp, 0);
11627 src = CALL_EXPR_ARG (exp, 1);
11628 len = CALL_EXPR_ARG (exp, 2);
11629 size = CALL_EXPR_ARG (exp, 3);
11630
11631 if (! host_integerp (size, 1))
11632 return NULL_RTX;
11633
11634 if (host_integerp (len, 1) || integer_all_onesp (size))
11635 {
11636 tree fn;
11637
11638 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11639 {
11640 warning_at (tree_nonartificial_location (exp),
11641 0, "%Kcall to %D will always overflow destination buffer",
11642 exp, get_callee_fndecl (exp));
11643 return NULL_RTX;
11644 }
11645
11646 fn = NULL_TREE;
11647 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11648 mem{cpy,pcpy,move,set} is available. */
11649 switch (fcode)
11650 {
11651 case BUILT_IN_MEMCPY_CHK:
11652 fn = built_in_decls[BUILT_IN_MEMCPY];
11653 break;
11654 case BUILT_IN_MEMPCPY_CHK:
11655 fn = built_in_decls[BUILT_IN_MEMPCPY];
11656 break;
11657 case BUILT_IN_MEMMOVE_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMMOVE];
11659 break;
11660 case BUILT_IN_MEMSET_CHK:
11661 fn = built_in_decls[BUILT_IN_MEMSET];
11662 break;
11663 default:
11664 break;
11665 }
11666
11667 if (! fn)
11668 return NULL_RTX;
11669
11670 fn = build_call_nofold (fn, 3, dest, src, len);
11671 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11672 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11673 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11674 }
11675 else if (fcode == BUILT_IN_MEMSET_CHK)
11676 return NULL_RTX;
11677 else
11678 {
11679 unsigned int dest_align
11680 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11681
11682 /* If DEST is not a pointer type, call the normal function. */
11683 if (dest_align == 0)
11684 return NULL_RTX;
11685
11686 /* If SRC and DEST are the same (and not volatile), do nothing. */
11687 if (operand_equal_p (src, dest, 0))
11688 {
11689 tree expr;
11690
11691 if (fcode != BUILT_IN_MEMPCPY_CHK)
11692 {
11693 /* Evaluate and ignore LEN in case it has side-effects. */
11694 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11695 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11696 }
11697
11698 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11699 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11700 }
11701
11702 /* __memmove_chk special case. */
11703 if (fcode == BUILT_IN_MEMMOVE_CHK)
11704 {
11705 unsigned int src_align
11706 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11707
11708 if (src_align == 0)
11709 return NULL_RTX;
11710
11711 /* If src is categorized for a readonly section we can use
11712 normal __memcpy_chk. */
11713 if (readonly_data_expr (src))
11714 {
11715 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11716 if (!fn)
11717 return NULL_RTX;
11718 fn = build_call_nofold (fn, 4, dest, src, len, size);
11719 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11720 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11721 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11722 }
11723 }
11724 return NULL_RTX;
11725 }
11726 }
11727
11728 /* Emit warning if a buffer overflow is detected at compile time. */
11729
11730 static void
11731 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11732 {
11733 int is_strlen = 0;
11734 tree len, size;
11735 location_t loc = tree_nonartificial_location (exp);
11736
11737 switch (fcode)
11738 {
11739 case BUILT_IN_STRCPY_CHK:
11740 case BUILT_IN_STPCPY_CHK:
11741 /* For __strcat_chk the warning will be emitted only if overflowing
11742 by at least strlen (dest) + 1 bytes. */
11743 case BUILT_IN_STRCAT_CHK:
11744 len = CALL_EXPR_ARG (exp, 1);
11745 size = CALL_EXPR_ARG (exp, 2);
11746 is_strlen = 1;
11747 break;
11748 case BUILT_IN_STRNCAT_CHK:
11749 case BUILT_IN_STRNCPY_CHK:
11750 len = CALL_EXPR_ARG (exp, 2);
11751 size = CALL_EXPR_ARG (exp, 3);
11752 break;
11753 case BUILT_IN_SNPRINTF_CHK:
11754 case BUILT_IN_VSNPRINTF_CHK:
11755 len = CALL_EXPR_ARG (exp, 1);
11756 size = CALL_EXPR_ARG (exp, 3);
11757 break;
11758 default:
11759 gcc_unreachable ();
11760 }
11761
11762 if (!len || !size)
11763 return;
11764
11765 if (! host_integerp (size, 1) || integer_all_onesp (size))
11766 return;
11767
11768 if (is_strlen)
11769 {
11770 len = c_strlen (len, 1);
11771 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11772 return;
11773 }
11774 else if (fcode == BUILT_IN_STRNCAT_CHK)
11775 {
11776 tree src = CALL_EXPR_ARG (exp, 1);
11777 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11778 return;
11779 src = c_strlen (src, 1);
11780 if (! src || ! host_integerp (src, 1))
11781 {
11782 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11783 exp, get_callee_fndecl (exp));
11784 return;
11785 }
11786 else if (tree_int_cst_lt (src, size))
11787 return;
11788 }
11789 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11790 return;
11791
11792 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11793 exp, get_callee_fndecl (exp));
11794 }
11795
11796 /* Emit warning if a buffer overflow is detected at compile time
11797 in __sprintf_chk/__vsprintf_chk calls. */
11798
11799 static void
11800 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11801 {
11802 tree size, len, fmt;
11803 const char *fmt_str;
11804 int nargs = call_expr_nargs (exp);
11805
11806 /* Verify the required arguments in the original call. */
11807
11808 if (nargs < 4)
11809 return;
11810 size = CALL_EXPR_ARG (exp, 2);
11811 fmt = CALL_EXPR_ARG (exp, 3);
11812
11813 if (! host_integerp (size, 1) || integer_all_onesp (size))
11814 return;
11815
11816 /* Check whether the format is a literal string constant. */
11817 fmt_str = c_getstr (fmt);
11818 if (fmt_str == NULL)
11819 return;
11820
11821 if (!init_target_chars ())
11822 return;
11823
11824 /* If the format doesn't contain % args or %%, we know its size. */
11825 if (strchr (fmt_str, target_percent) == 0)
11826 len = build_int_cstu (size_type_node, strlen (fmt_str));
11827 /* If the format is "%s" and first ... argument is a string literal,
11828 we know it too. */
11829 else if (fcode == BUILT_IN_SPRINTF_CHK
11830 && strcmp (fmt_str, target_percent_s) == 0)
11831 {
11832 tree arg;
11833
11834 if (nargs < 5)
11835 return;
11836 arg = CALL_EXPR_ARG (exp, 4);
11837 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11838 return;
11839
11840 len = c_strlen (arg, 1);
11841 if (!len || ! host_integerp (len, 1))
11842 return;
11843 }
11844 else
11845 return;
11846
11847 if (! tree_int_cst_lt (len, size))
11848 warning_at (tree_nonartificial_location (exp),
11849 0, "%Kcall to %D will always overflow destination buffer",
11850 exp, get_callee_fndecl (exp));
11851 }
11852
11853 /* Emit warning if a free is called with address of a variable. */
11854
11855 static void
11856 maybe_emit_free_warning (tree exp)
11857 {
11858 tree arg = CALL_EXPR_ARG (exp, 0);
11859
11860 STRIP_NOPS (arg);
11861 if (TREE_CODE (arg) != ADDR_EXPR)
11862 return;
11863
11864 arg = get_base_address (TREE_OPERAND (arg, 0));
11865 if (arg == NULL || INDIRECT_REF_P (arg))
11866 return;
11867
11868 if (SSA_VAR_P (arg))
11869 warning_at (tree_nonartificial_location (exp),
11870 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11871 else
11872 warning_at (tree_nonartificial_location (exp),
11873 0, "%Kattempt to free a non-heap object", exp);
11874 }
11875
11876 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11877 if possible. */
11878
11879 tree
11880 fold_builtin_object_size (tree ptr, tree ost)
11881 {
11882 tree ret = NULL_TREE;
11883 int object_size_type;
11884
11885 if (!validate_arg (ptr, POINTER_TYPE)
11886 || !validate_arg (ost, INTEGER_TYPE))
11887 return NULL_TREE;
11888
11889 STRIP_NOPS (ost);
11890
11891 if (TREE_CODE (ost) != INTEGER_CST
11892 || tree_int_cst_sgn (ost) < 0
11893 || compare_tree_int (ost, 3) > 0)
11894 return NULL_TREE;
11895
11896 object_size_type = tree_low_cst (ost, 0);
11897
11898 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11899 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11900 and (size_t) 0 for types 2 and 3. */
11901 if (TREE_SIDE_EFFECTS (ptr))
11902 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11903
11904 if (TREE_CODE (ptr) == ADDR_EXPR)
11905 ret = build_int_cstu (size_type_node,
11906 compute_builtin_object_size (ptr, object_size_type));
11907
11908 else if (TREE_CODE (ptr) == SSA_NAME)
11909 {
11910 unsigned HOST_WIDE_INT bytes;
11911
11912 /* If object size is not known yet, delay folding until
11913 later. Maybe subsequent passes will help determining
11914 it. */
11915 bytes = compute_builtin_object_size (ptr, object_size_type);
11916 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11917 ? -1 : 0))
11918 ret = build_int_cstu (size_type_node, bytes);
11919 }
11920
11921 if (ret)
11922 {
11923 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11924 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11925 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11926 ret = NULL_TREE;
11927 }
11928
11929 return ret;
11930 }
11931
11932 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11933 DEST, SRC, LEN, and SIZE are the arguments to the call.
11934 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11935 code of the builtin. If MAXLEN is not NULL, it is maximum length
11936 passed as third argument. */
11937
11938 tree
11939 fold_builtin_memory_chk (location_t loc, tree fndecl,
11940 tree dest, tree src, tree len, tree size,
11941 tree maxlen, bool ignore,
11942 enum built_in_function fcode)
11943 {
11944 tree fn;
11945
11946 if (!validate_arg (dest, POINTER_TYPE)
11947 || !validate_arg (src,
11948 (fcode == BUILT_IN_MEMSET_CHK
11949 ? INTEGER_TYPE : POINTER_TYPE))
11950 || !validate_arg (len, INTEGER_TYPE)
11951 || !validate_arg (size, INTEGER_TYPE))
11952 return NULL_TREE;
11953
11954 /* If SRC and DEST are the same (and not volatile), return DEST
11955 (resp. DEST+LEN for __mempcpy_chk). */
11956 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11957 {
11958 if (fcode != BUILT_IN_MEMPCPY_CHK)
11959 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11960 dest, len);
11961 else
11962 {
11963 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11964 dest, len);
11965 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11966 }
11967 }
11968
11969 if (! host_integerp (size, 1))
11970 return NULL_TREE;
11971
11972 if (! integer_all_onesp (size))
11973 {
11974 if (! host_integerp (len, 1))
11975 {
11976 /* If LEN is not constant, try MAXLEN too.
11977 For MAXLEN only allow optimizing into non-_ocs function
11978 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11979 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11980 {
11981 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11982 {
11983 /* (void) __mempcpy_chk () can be optimized into
11984 (void) __memcpy_chk (). */
11985 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11986 if (!fn)
11987 return NULL_TREE;
11988
11989 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11990 }
11991 return NULL_TREE;
11992 }
11993 }
11994 else
11995 maxlen = len;
11996
11997 if (tree_int_cst_lt (size, maxlen))
11998 return NULL_TREE;
11999 }
12000
12001 fn = NULL_TREE;
12002 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12003 mem{cpy,pcpy,move,set} is available. */
12004 switch (fcode)
12005 {
12006 case BUILT_IN_MEMCPY_CHK:
12007 fn = built_in_decls[BUILT_IN_MEMCPY];
12008 break;
12009 case BUILT_IN_MEMPCPY_CHK:
12010 fn = built_in_decls[BUILT_IN_MEMPCPY];
12011 break;
12012 case BUILT_IN_MEMMOVE_CHK:
12013 fn = built_in_decls[BUILT_IN_MEMMOVE];
12014 break;
12015 case BUILT_IN_MEMSET_CHK:
12016 fn = built_in_decls[BUILT_IN_MEMSET];
12017 break;
12018 default:
12019 break;
12020 }
12021
12022 if (!fn)
12023 return NULL_TREE;
12024
12025 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12026 }
12027
12028 /* Fold a call to the __st[rp]cpy_chk builtin.
12029 DEST, SRC, and SIZE are the arguments to the call.
12030 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12031 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12032 strings passed as second argument. */
12033
12034 tree
12035 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12036 tree src, tree size,
12037 tree maxlen, bool ignore,
12038 enum built_in_function fcode)
12039 {
12040 tree len, fn;
12041
12042 if (!validate_arg (dest, POINTER_TYPE)
12043 || !validate_arg (src, POINTER_TYPE)
12044 || !validate_arg (size, INTEGER_TYPE))
12045 return NULL_TREE;
12046
12047 /* If SRC and DEST are the same (and not volatile), return DEST. */
12048 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12049 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12050
12051 if (! host_integerp (size, 1))
12052 return NULL_TREE;
12053
12054 if (! integer_all_onesp (size))
12055 {
12056 len = c_strlen (src, 1);
12057 if (! len || ! host_integerp (len, 1))
12058 {
12059 /* If LEN is not constant, try MAXLEN too.
12060 For MAXLEN only allow optimizing into non-_ocs function
12061 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12062 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12063 {
12064 if (fcode == BUILT_IN_STPCPY_CHK)
12065 {
12066 if (! ignore)
12067 return NULL_TREE;
12068
12069 /* If return value of __stpcpy_chk is ignored,
12070 optimize into __strcpy_chk. */
12071 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12072 if (!fn)
12073 return NULL_TREE;
12074
12075 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12076 }
12077
12078 if (! len || TREE_SIDE_EFFECTS (len))
12079 return NULL_TREE;
12080
12081 /* If c_strlen returned something, but not a constant,
12082 transform __strcpy_chk into __memcpy_chk. */
12083 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12084 if (!fn)
12085 return NULL_TREE;
12086
12087 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12088 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12089 build_call_expr_loc (loc, fn, 4,
12090 dest, src, len, size));
12091 }
12092 }
12093 else
12094 maxlen = len;
12095
12096 if (! tree_int_cst_lt (maxlen, size))
12097 return NULL_TREE;
12098 }
12099
12100 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12101 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12102 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12103 if (!fn)
12104 return NULL_TREE;
12105
12106 return build_call_expr_loc (loc, fn, 2, dest, src);
12107 }
12108
12109 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12110 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12111 length passed as third argument. */
12112
12113 tree
12114 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12115 tree len, tree size, tree maxlen)
12116 {
12117 tree fn;
12118
12119 if (!validate_arg (dest, POINTER_TYPE)
12120 || !validate_arg (src, POINTER_TYPE)
12121 || !validate_arg (len, INTEGER_TYPE)
12122 || !validate_arg (size, INTEGER_TYPE))
12123 return NULL_TREE;
12124
12125 if (! host_integerp (size, 1))
12126 return NULL_TREE;
12127
12128 if (! integer_all_onesp (size))
12129 {
12130 if (! host_integerp (len, 1))
12131 {
12132 /* If LEN is not constant, try MAXLEN too.
12133 For MAXLEN only allow optimizing into non-_ocs function
12134 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12135 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12136 return NULL_TREE;
12137 }
12138 else
12139 maxlen = len;
12140
12141 if (tree_int_cst_lt (size, maxlen))
12142 return NULL_TREE;
12143 }
12144
12145 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12146 fn = built_in_decls[BUILT_IN_STRNCPY];
12147 if (!fn)
12148 return NULL_TREE;
12149
12150 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12151 }
12152
12153 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12154 are the arguments to the call. */
12155
12156 static tree
12157 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12158 tree src, tree size)
12159 {
12160 tree fn;
12161 const char *p;
12162
12163 if (!validate_arg (dest, POINTER_TYPE)
12164 || !validate_arg (src, POINTER_TYPE)
12165 || !validate_arg (size, INTEGER_TYPE))
12166 return NULL_TREE;
12167
12168 p = c_getstr (src);
12169 /* If the SRC parameter is "", return DEST. */
12170 if (p && *p == '\0')
12171 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12172
12173 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12174 return NULL_TREE;
12175
12176 /* If __builtin_strcat_chk is used, assume strcat is available. */
12177 fn = built_in_decls[BUILT_IN_STRCAT];
12178 if (!fn)
12179 return NULL_TREE;
12180
12181 return build_call_expr_loc (loc, fn, 2, dest, src);
12182 }
12183
12184 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12185 LEN, and SIZE. */
12186
12187 static tree
12188 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12189 tree dest, tree src, tree len, tree size)
12190 {
12191 tree fn;
12192 const char *p;
12193
12194 if (!validate_arg (dest, POINTER_TYPE)
12195 || !validate_arg (src, POINTER_TYPE)
12196 || !validate_arg (size, INTEGER_TYPE)
12197 || !validate_arg (size, INTEGER_TYPE))
12198 return NULL_TREE;
12199
12200 p = c_getstr (src);
12201 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12202 if (p && *p == '\0')
12203 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12204 else if (integer_zerop (len))
12205 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12206
12207 if (! host_integerp (size, 1))
12208 return NULL_TREE;
12209
12210 if (! integer_all_onesp (size))
12211 {
12212 tree src_len = c_strlen (src, 1);
12213 if (src_len
12214 && host_integerp (src_len, 1)
12215 && host_integerp (len, 1)
12216 && ! tree_int_cst_lt (len, src_len))
12217 {
12218 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12219 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12220 if (!fn)
12221 return NULL_TREE;
12222
12223 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12224 }
12225 return NULL_TREE;
12226 }
12227
12228 /* If __builtin_strncat_chk is used, assume strncat is available. */
12229 fn = built_in_decls[BUILT_IN_STRNCAT];
12230 if (!fn)
12231 return NULL_TREE;
12232
12233 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12234 }
12235
12236 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12237 a normal call should be emitted rather than expanding the function
12238 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12239
12240 static tree
12241 fold_builtin_sprintf_chk (location_t loc, tree exp,
12242 enum built_in_function fcode)
12243 {
12244 tree dest, size, len, fn, fmt, flag;
12245 const char *fmt_str;
12246 int nargs = call_expr_nargs (exp);
12247
12248 /* Verify the required arguments in the original call. */
12249 if (nargs < 4)
12250 return NULL_TREE;
12251 dest = CALL_EXPR_ARG (exp, 0);
12252 if (!validate_arg (dest, POINTER_TYPE))
12253 return NULL_TREE;
12254 flag = CALL_EXPR_ARG (exp, 1);
12255 if (!validate_arg (flag, INTEGER_TYPE))
12256 return NULL_TREE;
12257 size = CALL_EXPR_ARG (exp, 2);
12258 if (!validate_arg (size, INTEGER_TYPE))
12259 return NULL_TREE;
12260 fmt = CALL_EXPR_ARG (exp, 3);
12261 if (!validate_arg (fmt, POINTER_TYPE))
12262 return NULL_TREE;
12263
12264 if (! host_integerp (size, 1))
12265 return NULL_TREE;
12266
12267 len = NULL_TREE;
12268
12269 if (!init_target_chars ())
12270 return NULL_TREE;
12271
12272 /* Check whether the format is a literal string constant. */
12273 fmt_str = c_getstr (fmt);
12274 if (fmt_str != NULL)
12275 {
12276 /* If the format doesn't contain % args or %%, we know the size. */
12277 if (strchr (fmt_str, target_percent) == 0)
12278 {
12279 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12280 len = build_int_cstu (size_type_node, strlen (fmt_str));
12281 }
12282 /* If the format is "%s" and first ... argument is a string literal,
12283 we know the size too. */
12284 else if (fcode == BUILT_IN_SPRINTF_CHK
12285 && strcmp (fmt_str, target_percent_s) == 0)
12286 {
12287 tree arg;
12288
12289 if (nargs == 5)
12290 {
12291 arg = CALL_EXPR_ARG (exp, 4);
12292 if (validate_arg (arg, POINTER_TYPE))
12293 {
12294 len = c_strlen (arg, 1);
12295 if (! len || ! host_integerp (len, 1))
12296 len = NULL_TREE;
12297 }
12298 }
12299 }
12300 }
12301
12302 if (! integer_all_onesp (size))
12303 {
12304 if (! len || ! tree_int_cst_lt (len, size))
12305 return NULL_TREE;
12306 }
12307
12308 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12309 or if format doesn't contain % chars or is "%s". */
12310 if (! integer_zerop (flag))
12311 {
12312 if (fmt_str == NULL)
12313 return NULL_TREE;
12314 if (strchr (fmt_str, target_percent) != NULL
12315 && strcmp (fmt_str, target_percent_s))
12316 return NULL_TREE;
12317 }
12318
12319 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12320 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12321 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12322 if (!fn)
12323 return NULL_TREE;
12324
12325 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12326 }
12327
12328 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12329 a normal call should be emitted rather than expanding the function
12330 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12331 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12332 passed as second argument. */
12333
12334 tree
12335 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12336 enum built_in_function fcode)
12337 {
12338 tree dest, size, len, fn, fmt, flag;
12339 const char *fmt_str;
12340
12341 /* Verify the required arguments in the original call. */
12342 if (call_expr_nargs (exp) < 5)
12343 return NULL_TREE;
12344 dest = CALL_EXPR_ARG (exp, 0);
12345 if (!validate_arg (dest, POINTER_TYPE))
12346 return NULL_TREE;
12347 len = CALL_EXPR_ARG (exp, 1);
12348 if (!validate_arg (len, INTEGER_TYPE))
12349 return NULL_TREE;
12350 flag = CALL_EXPR_ARG (exp, 2);
12351 if (!validate_arg (flag, INTEGER_TYPE))
12352 return NULL_TREE;
12353 size = CALL_EXPR_ARG (exp, 3);
12354 if (!validate_arg (size, INTEGER_TYPE))
12355 return NULL_TREE;
12356 fmt = CALL_EXPR_ARG (exp, 4);
12357 if (!validate_arg (fmt, POINTER_TYPE))
12358 return NULL_TREE;
12359
12360 if (! host_integerp (size, 1))
12361 return NULL_TREE;
12362
12363 if (! integer_all_onesp (size))
12364 {
12365 if (! host_integerp (len, 1))
12366 {
12367 /* If LEN is not constant, try MAXLEN too.
12368 For MAXLEN only allow optimizing into non-_ocs function
12369 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12370 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12371 return NULL_TREE;
12372 }
12373 else
12374 maxlen = len;
12375
12376 if (tree_int_cst_lt (size, maxlen))
12377 return NULL_TREE;
12378 }
12379
12380 if (!init_target_chars ())
12381 return NULL_TREE;
12382
12383 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12384 or if format doesn't contain % chars or is "%s". */
12385 if (! integer_zerop (flag))
12386 {
12387 fmt_str = c_getstr (fmt);
12388 if (fmt_str == NULL)
12389 return NULL_TREE;
12390 if (strchr (fmt_str, target_percent) != NULL
12391 && strcmp (fmt_str, target_percent_s))
12392 return NULL_TREE;
12393 }
12394
12395 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12396 available. */
12397 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12398 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12399 if (!fn)
12400 return NULL_TREE;
12401
12402 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12403 }
12404
12405 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12406 FMT and ARG are the arguments to the call; we don't fold cases with
12407 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12408
12409 Return NULL_TREE if no simplification was possible, otherwise return the
12410 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12411 code of the function to be simplified. */
12412
12413 static tree
12414 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12415 tree arg, bool ignore,
12416 enum built_in_function fcode)
12417 {
12418 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12419 const char *fmt_str = NULL;
12420
12421 /* If the return value is used, don't do the transformation. */
12422 if (! ignore)
12423 return NULL_TREE;
12424
12425 /* Verify the required arguments in the original call. */
12426 if (!validate_arg (fmt, POINTER_TYPE))
12427 return NULL_TREE;
12428
12429 /* Check whether the format is a literal string constant. */
12430 fmt_str = c_getstr (fmt);
12431 if (fmt_str == NULL)
12432 return NULL_TREE;
12433
12434 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12435 {
12436 /* If we're using an unlocked function, assume the other
12437 unlocked functions exist explicitly. */
12438 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12439 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12440 }
12441 else
12442 {
12443 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12444 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12445 }
12446
12447 if (!init_target_chars ())
12448 return NULL_TREE;
12449
12450 if (strcmp (fmt_str, target_percent_s) == 0
12451 || strchr (fmt_str, target_percent) == NULL)
12452 {
12453 const char *str;
12454
12455 if (strcmp (fmt_str, target_percent_s) == 0)
12456 {
12457 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12458 return NULL_TREE;
12459
12460 if (!arg || !validate_arg (arg, POINTER_TYPE))
12461 return NULL_TREE;
12462
12463 str = c_getstr (arg);
12464 if (str == NULL)
12465 return NULL_TREE;
12466 }
12467 else
12468 {
12469 /* The format specifier doesn't contain any '%' characters. */
12470 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12471 && arg)
12472 return NULL_TREE;
12473 str = fmt_str;
12474 }
12475
12476 /* If the string was "", printf does nothing. */
12477 if (str[0] == '\0')
12478 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12479
12480 /* If the string has length of 1, call putchar. */
12481 if (str[1] == '\0')
12482 {
12483 /* Given printf("c"), (where c is any one character,)
12484 convert "c"[0] to an int and pass that to the replacement
12485 function. */
12486 newarg = build_int_cst (NULL_TREE, str[0]);
12487 if (fn_putchar)
12488 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12489 }
12490 else
12491 {
12492 /* If the string was "string\n", call puts("string"). */
12493 size_t len = strlen (str);
12494 if ((unsigned char)str[len - 1] == target_newline)
12495 {
12496 /* Create a NUL-terminated string that's one char shorter
12497 than the original, stripping off the trailing '\n'. */
12498 char *newstr = XALLOCAVEC (char, len);
12499 memcpy (newstr, str, len - 1);
12500 newstr[len - 1] = 0;
12501
12502 newarg = build_string_literal (len, newstr);
12503 if (fn_puts)
12504 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12505 }
12506 else
12507 /* We'd like to arrange to call fputs(string,stdout) here,
12508 but we need stdout and don't have a way to get it yet. */
12509 return NULL_TREE;
12510 }
12511 }
12512
12513 /* The other optimizations can be done only on the non-va_list variants. */
12514 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12515 return NULL_TREE;
12516
12517 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12518 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12519 {
12520 if (!arg || !validate_arg (arg, POINTER_TYPE))
12521 return NULL_TREE;
12522 if (fn_puts)
12523 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12524 }
12525
12526 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12527 else if (strcmp (fmt_str, target_percent_c) == 0)
12528 {
12529 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12530 return NULL_TREE;
12531 if (fn_putchar)
12532 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12533 }
12534
12535 if (!call)
12536 return NULL_TREE;
12537
12538 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12539 }
12540
12541 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12542 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12543 more than 3 arguments, and ARG may be null in the 2-argument case.
12544
12545 Return NULL_TREE if no simplification was possible, otherwise return the
12546 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12547 code of the function to be simplified. */
12548
12549 static tree
12550 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12551 tree fmt, tree arg, bool ignore,
12552 enum built_in_function fcode)
12553 {
12554 tree fn_fputc, fn_fputs, call = NULL_TREE;
12555 const char *fmt_str = NULL;
12556
12557 /* If the return value is used, don't do the transformation. */
12558 if (! ignore)
12559 return NULL_TREE;
12560
12561 /* Verify the required arguments in the original call. */
12562 if (!validate_arg (fp, POINTER_TYPE))
12563 return NULL_TREE;
12564 if (!validate_arg (fmt, POINTER_TYPE))
12565 return NULL_TREE;
12566
12567 /* Check whether the format is a literal string constant. */
12568 fmt_str = c_getstr (fmt);
12569 if (fmt_str == NULL)
12570 return NULL_TREE;
12571
12572 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12573 {
12574 /* If we're using an unlocked function, assume the other
12575 unlocked functions exist explicitly. */
12576 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12577 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12578 }
12579 else
12580 {
12581 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12582 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12583 }
12584
12585 if (!init_target_chars ())
12586 return NULL_TREE;
12587
12588 /* If the format doesn't contain % args or %%, use strcpy. */
12589 if (strchr (fmt_str, target_percent) == NULL)
12590 {
12591 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12592 && arg)
12593 return NULL_TREE;
12594
12595 /* If the format specifier was "", fprintf does nothing. */
12596 if (fmt_str[0] == '\0')
12597 {
12598 /* If FP has side-effects, just wait until gimplification is
12599 done. */
12600 if (TREE_SIDE_EFFECTS (fp))
12601 return NULL_TREE;
12602
12603 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12604 }
12605
12606 /* When "string" doesn't contain %, replace all cases of
12607 fprintf (fp, string) with fputs (string, fp). The fputs
12608 builtin will take care of special cases like length == 1. */
12609 if (fn_fputs)
12610 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12611 }
12612
12613 /* The other optimizations can be done only on the non-va_list variants. */
12614 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12615 return NULL_TREE;
12616
12617 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12618 else if (strcmp (fmt_str, target_percent_s) == 0)
12619 {
12620 if (!arg || !validate_arg (arg, POINTER_TYPE))
12621 return NULL_TREE;
12622 if (fn_fputs)
12623 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12624 }
12625
12626 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12627 else if (strcmp (fmt_str, target_percent_c) == 0)
12628 {
12629 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12630 return NULL_TREE;
12631 if (fn_fputc)
12632 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12633 }
12634
12635 if (!call)
12636 return NULL_TREE;
12637 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12638 }
12639
12640 /* Initialize format string characters in the target charset. */
12641
12642 static bool
12643 init_target_chars (void)
12644 {
12645 static bool init;
12646 if (!init)
12647 {
12648 target_newline = lang_hooks.to_target_charset ('\n');
12649 target_percent = lang_hooks.to_target_charset ('%');
12650 target_c = lang_hooks.to_target_charset ('c');
12651 target_s = lang_hooks.to_target_charset ('s');
12652 if (target_newline == 0 || target_percent == 0 || target_c == 0
12653 || target_s == 0)
12654 return false;
12655
12656 target_percent_c[0] = target_percent;
12657 target_percent_c[1] = target_c;
12658 target_percent_c[2] = '\0';
12659
12660 target_percent_s[0] = target_percent;
12661 target_percent_s[1] = target_s;
12662 target_percent_s[2] = '\0';
12663
12664 target_percent_s_newline[0] = target_percent;
12665 target_percent_s_newline[1] = target_s;
12666 target_percent_s_newline[2] = target_newline;
12667 target_percent_s_newline[3] = '\0';
12668
12669 init = true;
12670 }
12671 return true;
12672 }
12673
12674 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12675 and no overflow/underflow occurred. INEXACT is true if M was not
12676 exactly calculated. TYPE is the tree type for the result. This
12677 function assumes that you cleared the MPFR flags and then
12678 calculated M to see if anything subsequently set a flag prior to
12679 entering this function. Return NULL_TREE if any checks fail. */
12680
12681 static tree
12682 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12683 {
12684 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12685 overflow/underflow occurred. If -frounding-math, proceed iff the
12686 result of calling FUNC was exact. */
12687 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12688 && (!flag_rounding_math || !inexact))
12689 {
12690 REAL_VALUE_TYPE rr;
12691
12692 real_from_mpfr (&rr, m, type, GMP_RNDN);
12693 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12694 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12695 but the mpft_t is not, then we underflowed in the
12696 conversion. */
12697 if (real_isfinite (&rr)
12698 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12699 {
12700 REAL_VALUE_TYPE rmode;
12701
12702 real_convert (&rmode, TYPE_MODE (type), &rr);
12703 /* Proceed iff the specified mode can hold the value. */
12704 if (real_identical (&rmode, &rr))
12705 return build_real (type, rmode);
12706 }
12707 }
12708 return NULL_TREE;
12709 }
12710
12711 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12712 number and no overflow/underflow occurred. INEXACT is true if M
12713 was not exactly calculated. TYPE is the tree type for the result.
12714 This function assumes that you cleared the MPFR flags and then
12715 calculated M to see if anything subsequently set a flag prior to
12716 entering this function. Return NULL_TREE if any checks fail, if
12717 FORCE_CONVERT is true, then bypass the checks. */
12718
12719 static tree
12720 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12721 {
12722 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12723 overflow/underflow occurred. If -frounding-math, proceed iff the
12724 result of calling FUNC was exact. */
12725 if (force_convert
12726 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12727 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12728 && (!flag_rounding_math || !inexact)))
12729 {
12730 REAL_VALUE_TYPE re, im;
12731
12732 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12733 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12734 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12735 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12736 but the mpft_t is not, then we underflowed in the
12737 conversion. */
12738 if (force_convert
12739 || (real_isfinite (&re) && real_isfinite (&im)
12740 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12741 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12742 {
12743 REAL_VALUE_TYPE re_mode, im_mode;
12744
12745 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12746 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12747 /* Proceed iff the specified mode can hold the value. */
12748 if (force_convert
12749 || (real_identical (&re_mode, &re)
12750 && real_identical (&im_mode, &im)))
12751 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12752 build_real (TREE_TYPE (type), im_mode));
12753 }
12754 }
12755 return NULL_TREE;
12756 }
12757
12758 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12759 FUNC on it and return the resulting value as a tree with type TYPE.
12760 If MIN and/or MAX are not NULL, then the supplied ARG must be
12761 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12762 acceptable values, otherwise they are not. The mpfr precision is
12763 set to the precision of TYPE. We assume that function FUNC returns
12764 zero if the result could be calculated exactly within the requested
12765 precision. */
12766
12767 static tree
12768 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12769 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12770 bool inclusive)
12771 {
12772 tree result = NULL_TREE;
12773
12774 STRIP_NOPS (arg);
12775
12776 /* To proceed, MPFR must exactly represent the target floating point
12777 format, which only happens when the target base equals two. */
12778 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12779 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12780 {
12781 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12782
12783 if (real_isfinite (ra)
12784 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12785 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12786 {
12787 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12788 const int prec = fmt->p;
12789 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12790 int inexact;
12791 mpfr_t m;
12792
12793 mpfr_init2 (m, prec);
12794 mpfr_from_real (m, ra, GMP_RNDN);
12795 mpfr_clear_flags ();
12796 inexact = func (m, m, rnd);
12797 result = do_mpfr_ckconv (m, type, inexact);
12798 mpfr_clear (m);
12799 }
12800 }
12801
12802 return result;
12803 }
12804
12805 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12806 FUNC on it and return the resulting value as a tree with type TYPE.
12807 The mpfr precision is set to the precision of TYPE. We assume that
12808 function FUNC returns zero if the result could be calculated
12809 exactly within the requested precision. */
12810
12811 static tree
12812 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12813 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12814 {
12815 tree result = NULL_TREE;
12816
12817 STRIP_NOPS (arg1);
12818 STRIP_NOPS (arg2);
12819
12820 /* To proceed, MPFR must exactly represent the target floating point
12821 format, which only happens when the target base equals two. */
12822 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12823 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12824 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12825 {
12826 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12827 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12828
12829 if (real_isfinite (ra1) && real_isfinite (ra2))
12830 {
12831 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12832 const int prec = fmt->p;
12833 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12834 int inexact;
12835 mpfr_t m1, m2;
12836
12837 mpfr_inits2 (prec, m1, m2, NULL);
12838 mpfr_from_real (m1, ra1, GMP_RNDN);
12839 mpfr_from_real (m2, ra2, GMP_RNDN);
12840 mpfr_clear_flags ();
12841 inexact = func (m1, m1, m2, rnd);
12842 result = do_mpfr_ckconv (m1, type, inexact);
12843 mpfr_clears (m1, m2, NULL);
12844 }
12845 }
12846
12847 return result;
12848 }
12849
12850 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12851 FUNC on it and return the resulting value as a tree with type TYPE.
12852 The mpfr precision is set to the precision of TYPE. We assume that
12853 function FUNC returns zero if the result could be calculated
12854 exactly within the requested precision. */
12855
12856 static tree
12857 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12858 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12859 {
12860 tree result = NULL_TREE;
12861
12862 STRIP_NOPS (arg1);
12863 STRIP_NOPS (arg2);
12864 STRIP_NOPS (arg3);
12865
12866 /* To proceed, MPFR must exactly represent the target floating point
12867 format, which only happens when the target base equals two. */
12868 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12869 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12870 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12871 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12872 {
12873 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12874 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12875 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12876
12877 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12878 {
12879 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12880 const int prec = fmt->p;
12881 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12882 int inexact;
12883 mpfr_t m1, m2, m3;
12884
12885 mpfr_inits2 (prec, m1, m2, m3, NULL);
12886 mpfr_from_real (m1, ra1, GMP_RNDN);
12887 mpfr_from_real (m2, ra2, GMP_RNDN);
12888 mpfr_from_real (m3, ra3, GMP_RNDN);
12889 mpfr_clear_flags ();
12890 inexact = func (m1, m1, m2, m3, rnd);
12891 result = do_mpfr_ckconv (m1, type, inexact);
12892 mpfr_clears (m1, m2, m3, NULL);
12893 }
12894 }
12895
12896 return result;
12897 }
12898
12899 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12900 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12901 If ARG_SINP and ARG_COSP are NULL then the result is returned
12902 as a complex value.
12903 The type is taken from the type of ARG and is used for setting the
12904 precision of the calculation and results. */
12905
12906 static tree
12907 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12908 {
12909 tree const type = TREE_TYPE (arg);
12910 tree result = NULL_TREE;
12911
12912 STRIP_NOPS (arg);
12913
12914 /* To proceed, MPFR must exactly represent the target floating point
12915 format, which only happens when the target base equals two. */
12916 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12917 && TREE_CODE (arg) == REAL_CST
12918 && !TREE_OVERFLOW (arg))
12919 {
12920 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12921
12922 if (real_isfinite (ra))
12923 {
12924 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12925 const int prec = fmt->p;
12926 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12927 tree result_s, result_c;
12928 int inexact;
12929 mpfr_t m, ms, mc;
12930
12931 mpfr_inits2 (prec, m, ms, mc, NULL);
12932 mpfr_from_real (m, ra, GMP_RNDN);
12933 mpfr_clear_flags ();
12934 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12935 result_s = do_mpfr_ckconv (ms, type, inexact);
12936 result_c = do_mpfr_ckconv (mc, type, inexact);
12937 mpfr_clears (m, ms, mc, NULL);
12938 if (result_s && result_c)
12939 {
12940 /* If we are to return in a complex value do so. */
12941 if (!arg_sinp && !arg_cosp)
12942 return build_complex (build_complex_type (type),
12943 result_c, result_s);
12944
12945 /* Dereference the sin/cos pointer arguments. */
12946 arg_sinp = build_fold_indirect_ref (arg_sinp);
12947 arg_cosp = build_fold_indirect_ref (arg_cosp);
12948 /* Proceed if valid pointer type were passed in. */
12949 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12950 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12951 {
12952 /* Set the values. */
12953 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12954 result_s);
12955 TREE_SIDE_EFFECTS (result_s) = 1;
12956 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12957 result_c);
12958 TREE_SIDE_EFFECTS (result_c) = 1;
12959 /* Combine the assignments into a compound expr. */
12960 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12961 result_s, result_c));
12962 }
12963 }
12964 }
12965 }
12966 return result;
12967 }
12968
12969 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12970 two-argument mpfr order N Bessel function FUNC on them and return
12971 the resulting value as a tree with type TYPE. The mpfr precision
12972 is set to the precision of TYPE. We assume that function FUNC
12973 returns zero if the result could be calculated exactly within the
12974 requested precision. */
12975 static tree
12976 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12977 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12978 const REAL_VALUE_TYPE *min, bool inclusive)
12979 {
12980 tree result = NULL_TREE;
12981
12982 STRIP_NOPS (arg1);
12983 STRIP_NOPS (arg2);
12984
12985 /* To proceed, MPFR must exactly represent the target floating point
12986 format, which only happens when the target base equals two. */
12987 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12988 && host_integerp (arg1, 0)
12989 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12990 {
12991 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12992 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12993
12994 if (n == (long)n
12995 && real_isfinite (ra)
12996 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12997 {
12998 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12999 const int prec = fmt->p;
13000 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13001 int inexact;
13002 mpfr_t m;
13003
13004 mpfr_init2 (m, prec);
13005 mpfr_from_real (m, ra, GMP_RNDN);
13006 mpfr_clear_flags ();
13007 inexact = func (m, n, m, rnd);
13008 result = do_mpfr_ckconv (m, type, inexact);
13009 mpfr_clear (m);
13010 }
13011 }
13012
13013 return result;
13014 }
13015
13016 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13017 the pointer *(ARG_QUO) and return the result. The type is taken
13018 from the type of ARG0 and is used for setting the precision of the
13019 calculation and results. */
13020
13021 static tree
13022 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13023 {
13024 tree const type = TREE_TYPE (arg0);
13025 tree result = NULL_TREE;
13026
13027 STRIP_NOPS (arg0);
13028 STRIP_NOPS (arg1);
13029
13030 /* To proceed, MPFR must exactly represent the target floating point
13031 format, which only happens when the target base equals two. */
13032 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13033 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13034 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13035 {
13036 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13037 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13038
13039 if (real_isfinite (ra0) && real_isfinite (ra1))
13040 {
13041 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13042 const int prec = fmt->p;
13043 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13044 tree result_rem;
13045 long integer_quo;
13046 mpfr_t m0, m1;
13047
13048 mpfr_inits2 (prec, m0, m1, NULL);
13049 mpfr_from_real (m0, ra0, GMP_RNDN);
13050 mpfr_from_real (m1, ra1, GMP_RNDN);
13051 mpfr_clear_flags ();
13052 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13053 /* Remquo is independent of the rounding mode, so pass
13054 inexact=0 to do_mpfr_ckconv(). */
13055 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13056 mpfr_clears (m0, m1, NULL);
13057 if (result_rem)
13058 {
13059 /* MPFR calculates quo in the host's long so it may
13060 return more bits in quo than the target int can hold
13061 if sizeof(host long) > sizeof(target int). This can
13062 happen even for native compilers in LP64 mode. In
13063 these cases, modulo the quo value with the largest
13064 number that the target int can hold while leaving one
13065 bit for the sign. */
13066 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13067 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13068
13069 /* Dereference the quo pointer argument. */
13070 arg_quo = build_fold_indirect_ref (arg_quo);
13071 /* Proceed iff a valid pointer type was passed in. */
13072 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13073 {
13074 /* Set the value. */
13075 tree result_quo = fold_build2 (MODIFY_EXPR,
13076 TREE_TYPE (arg_quo), arg_quo,
13077 build_int_cst (NULL, integer_quo));
13078 TREE_SIDE_EFFECTS (result_quo) = 1;
13079 /* Combine the quo assignment with the rem. */
13080 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13081 result_quo, result_rem));
13082 }
13083 }
13084 }
13085 }
13086 return result;
13087 }
13088
13089 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13090 resulting value as a tree with type TYPE. The mpfr precision is
13091 set to the precision of TYPE. We assume that this mpfr function
13092 returns zero if the result could be calculated exactly within the
13093 requested precision. In addition, the integer pointer represented
13094 by ARG_SG will be dereferenced and set to the appropriate signgam
13095 (-1,1) value. */
13096
13097 static tree
13098 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13099 {
13100 tree result = NULL_TREE;
13101
13102 STRIP_NOPS (arg);
13103
13104 /* To proceed, MPFR must exactly represent the target floating point
13105 format, which only happens when the target base equals two. Also
13106 verify ARG is a constant and that ARG_SG is an int pointer. */
13107 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13108 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13109 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13110 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13111 {
13112 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13113
13114 /* In addition to NaN and Inf, the argument cannot be zero or a
13115 negative integer. */
13116 if (real_isfinite (ra)
13117 && ra->cl != rvc_zero
13118 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13119 {
13120 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13121 const int prec = fmt->p;
13122 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13123 int inexact, sg;
13124 mpfr_t m;
13125 tree result_lg;
13126
13127 mpfr_init2 (m, prec);
13128 mpfr_from_real (m, ra, GMP_RNDN);
13129 mpfr_clear_flags ();
13130 inexact = mpfr_lgamma (m, &sg, m, rnd);
13131 result_lg = do_mpfr_ckconv (m, type, inexact);
13132 mpfr_clear (m);
13133 if (result_lg)
13134 {
13135 tree result_sg;
13136
13137 /* Dereference the arg_sg pointer argument. */
13138 arg_sg = build_fold_indirect_ref (arg_sg);
13139 /* Assign the signgam value into *arg_sg. */
13140 result_sg = fold_build2 (MODIFY_EXPR,
13141 TREE_TYPE (arg_sg), arg_sg,
13142 build_int_cst (NULL, sg));
13143 TREE_SIDE_EFFECTS (result_sg) = 1;
13144 /* Combine the signgam assignment with the lgamma result. */
13145 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13146 result_sg, result_lg));
13147 }
13148 }
13149 }
13150
13151 return result;
13152 }
13153
13154 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13155 function FUNC on it and return the resulting value as a tree with
13156 type TYPE. The mpfr precision is set to the precision of TYPE. We
13157 assume that function FUNC returns zero if the result could be
13158 calculated exactly within the requested precision. */
13159
13160 static tree
13161 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13162 {
13163 tree result = NULL_TREE;
13164
13165 STRIP_NOPS (arg);
13166
13167 /* To proceed, MPFR must exactly represent the target floating point
13168 format, which only happens when the target base equals two. */
13169 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13170 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13171 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13172 {
13173 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13174 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13175
13176 if (real_isfinite (re) && real_isfinite (im))
13177 {
13178 const struct real_format *const fmt =
13179 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13180 const int prec = fmt->p;
13181 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13182 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13183 int inexact;
13184 mpc_t m;
13185
13186 mpc_init2 (m, prec);
13187 mpfr_from_real (mpc_realref(m), re, rnd);
13188 mpfr_from_real (mpc_imagref(m), im, rnd);
13189 mpfr_clear_flags ();
13190 inexact = func (m, m, crnd);
13191 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13192 mpc_clear (m);
13193 }
13194 }
13195
13196 return result;
13197 }
13198
13199 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13200 mpc function FUNC on it and return the resulting value as a tree
13201 with type TYPE. The mpfr precision is set to the precision of
13202 TYPE. We assume that function FUNC returns zero if the result
13203 could be calculated exactly within the requested precision. If
13204 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13205 in the arguments and/or results. */
13206
13207 tree
13208 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13209 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13210 {
13211 tree result = NULL_TREE;
13212
13213 STRIP_NOPS (arg0);
13214 STRIP_NOPS (arg1);
13215
13216 /* To proceed, MPFR must exactly represent the target floating point
13217 format, which only happens when the target base equals two. */
13218 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13219 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13220 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13221 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13222 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13223 {
13224 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13225 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13226 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13227 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13228
13229 if (do_nonfinite
13230 || (real_isfinite (re0) && real_isfinite (im0)
13231 && real_isfinite (re1) && real_isfinite (im1)))
13232 {
13233 const struct real_format *const fmt =
13234 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13235 const int prec = fmt->p;
13236 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13237 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13238 int inexact;
13239 mpc_t m0, m1;
13240
13241 mpc_init2 (m0, prec);
13242 mpc_init2 (m1, prec);
13243 mpfr_from_real (mpc_realref(m0), re0, rnd);
13244 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13245 mpfr_from_real (mpc_realref(m1), re1, rnd);
13246 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13247 mpfr_clear_flags ();
13248 inexact = func (m0, m0, m1, crnd);
13249 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13250 mpc_clear (m0);
13251 mpc_clear (m1);
13252 }
13253 }
13254
13255 return result;
13256 }
13257
13258 /* FIXME tuples.
13259 The functions below provide an alternate interface for folding
13260 builtin function calls presented as GIMPLE_CALL statements rather
13261 than as CALL_EXPRs. The folded result is still expressed as a
13262 tree. There is too much code duplication in the handling of
13263 varargs functions, and a more intrusive re-factoring would permit
13264 better sharing of code between the tree and statement-based
13265 versions of these functions. */
13266
13267 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13268 along with N new arguments specified as the "..." parameters. SKIP
13269 is the number of arguments in STMT to be omitted. This function is used
13270 to do varargs-to-varargs transformations. */
13271
13272 static tree
13273 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13274 {
13275 int oldnargs = gimple_call_num_args (stmt);
13276 int nargs = oldnargs - skip + n;
13277 tree fntype = TREE_TYPE (fndecl);
13278 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13279 tree *buffer;
13280 int i, j;
13281 va_list ap;
13282 location_t loc = gimple_location (stmt);
13283
13284 buffer = XALLOCAVEC (tree, nargs);
13285 va_start (ap, n);
13286 for (i = 0; i < n; i++)
13287 buffer[i] = va_arg (ap, tree);
13288 va_end (ap);
13289 for (j = skip; j < oldnargs; j++, i++)
13290 buffer[i] = gimple_call_arg (stmt, j);
13291
13292 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13293 }
13294
13295 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13296 a normal call should be emitted rather than expanding the function
13297 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13298
13299 static tree
13300 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13301 {
13302 tree dest, size, len, fn, fmt, flag;
13303 const char *fmt_str;
13304 int nargs = gimple_call_num_args (stmt);
13305
13306 /* Verify the required arguments in the original call. */
13307 if (nargs < 4)
13308 return NULL_TREE;
13309 dest = gimple_call_arg (stmt, 0);
13310 if (!validate_arg (dest, POINTER_TYPE))
13311 return NULL_TREE;
13312 flag = gimple_call_arg (stmt, 1);
13313 if (!validate_arg (flag, INTEGER_TYPE))
13314 return NULL_TREE;
13315 size = gimple_call_arg (stmt, 2);
13316 if (!validate_arg (size, INTEGER_TYPE))
13317 return NULL_TREE;
13318 fmt = gimple_call_arg (stmt, 3);
13319 if (!validate_arg (fmt, POINTER_TYPE))
13320 return NULL_TREE;
13321
13322 if (! host_integerp (size, 1))
13323 return NULL_TREE;
13324
13325 len = NULL_TREE;
13326
13327 if (!init_target_chars ())
13328 return NULL_TREE;
13329
13330 /* Check whether the format is a literal string constant. */
13331 fmt_str = c_getstr (fmt);
13332 if (fmt_str != NULL)
13333 {
13334 /* If the format doesn't contain % args or %%, we know the size. */
13335 if (strchr (fmt_str, target_percent) == 0)
13336 {
13337 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13338 len = build_int_cstu (size_type_node, strlen (fmt_str));
13339 }
13340 /* If the format is "%s" and first ... argument is a string literal,
13341 we know the size too. */
13342 else if (fcode == BUILT_IN_SPRINTF_CHK
13343 && strcmp (fmt_str, target_percent_s) == 0)
13344 {
13345 tree arg;
13346
13347 if (nargs == 5)
13348 {
13349 arg = gimple_call_arg (stmt, 4);
13350 if (validate_arg (arg, POINTER_TYPE))
13351 {
13352 len = c_strlen (arg, 1);
13353 if (! len || ! host_integerp (len, 1))
13354 len = NULL_TREE;
13355 }
13356 }
13357 }
13358 }
13359
13360 if (! integer_all_onesp (size))
13361 {
13362 if (! len || ! tree_int_cst_lt (len, size))
13363 return NULL_TREE;
13364 }
13365
13366 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13367 or if format doesn't contain % chars or is "%s". */
13368 if (! integer_zerop (flag))
13369 {
13370 if (fmt_str == NULL)
13371 return NULL_TREE;
13372 if (strchr (fmt_str, target_percent) != NULL
13373 && strcmp (fmt_str, target_percent_s))
13374 return NULL_TREE;
13375 }
13376
13377 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13378 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13379 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13380 if (!fn)
13381 return NULL_TREE;
13382
13383 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13384 }
13385
13386 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13387 a normal call should be emitted rather than expanding the function
13388 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13389 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13390 passed as second argument. */
13391
13392 tree
13393 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13394 enum built_in_function fcode)
13395 {
13396 tree dest, size, len, fn, fmt, flag;
13397 const char *fmt_str;
13398
13399 /* Verify the required arguments in the original call. */
13400 if (gimple_call_num_args (stmt) < 5)
13401 return NULL_TREE;
13402 dest = gimple_call_arg (stmt, 0);
13403 if (!validate_arg (dest, POINTER_TYPE))
13404 return NULL_TREE;
13405 len = gimple_call_arg (stmt, 1);
13406 if (!validate_arg (len, INTEGER_TYPE))
13407 return NULL_TREE;
13408 flag = gimple_call_arg (stmt, 2);
13409 if (!validate_arg (flag, INTEGER_TYPE))
13410 return NULL_TREE;
13411 size = gimple_call_arg (stmt, 3);
13412 if (!validate_arg (size, INTEGER_TYPE))
13413 return NULL_TREE;
13414 fmt = gimple_call_arg (stmt, 4);
13415 if (!validate_arg (fmt, POINTER_TYPE))
13416 return NULL_TREE;
13417
13418 if (! host_integerp (size, 1))
13419 return NULL_TREE;
13420
13421 if (! integer_all_onesp (size))
13422 {
13423 if (! host_integerp (len, 1))
13424 {
13425 /* If LEN is not constant, try MAXLEN too.
13426 For MAXLEN only allow optimizing into non-_ocs function
13427 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13428 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13429 return NULL_TREE;
13430 }
13431 else
13432 maxlen = len;
13433
13434 if (tree_int_cst_lt (size, maxlen))
13435 return NULL_TREE;
13436 }
13437
13438 if (!init_target_chars ())
13439 return NULL_TREE;
13440
13441 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13442 or if format doesn't contain % chars or is "%s". */
13443 if (! integer_zerop (flag))
13444 {
13445 fmt_str = c_getstr (fmt);
13446 if (fmt_str == NULL)
13447 return NULL_TREE;
13448 if (strchr (fmt_str, target_percent) != NULL
13449 && strcmp (fmt_str, target_percent_s))
13450 return NULL_TREE;
13451 }
13452
13453 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13454 available. */
13455 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13456 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13457 if (!fn)
13458 return NULL_TREE;
13459
13460 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13461 }
13462
13463 /* Builtins with folding operations that operate on "..." arguments
13464 need special handling; we need to store the arguments in a convenient
13465 data structure before attempting any folding. Fortunately there are
13466 only a few builtins that fall into this category. FNDECL is the
13467 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13468 result of the function call is ignored. */
13469
13470 static tree
13471 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13472 bool ignore ATTRIBUTE_UNUSED)
13473 {
13474 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13475 tree ret = NULL_TREE;
13476
13477 switch (fcode)
13478 {
13479 case BUILT_IN_SPRINTF_CHK:
13480 case BUILT_IN_VSPRINTF_CHK:
13481 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13482 break;
13483
13484 case BUILT_IN_SNPRINTF_CHK:
13485 case BUILT_IN_VSNPRINTF_CHK:
13486 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13487
13488 default:
13489 break;
13490 }
13491 if (ret)
13492 {
13493 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13494 TREE_NO_WARNING (ret) = 1;
13495 return ret;
13496 }
13497 return NULL_TREE;
13498 }
13499
13500 /* A wrapper function for builtin folding that prevents warnings for
13501 "statement without effect" and the like, caused by removing the
13502 call node earlier than the warning is generated. */
13503
13504 tree
13505 fold_call_stmt (gimple stmt, bool ignore)
13506 {
13507 tree ret = NULL_TREE;
13508 tree fndecl = gimple_call_fndecl (stmt);
13509 location_t loc = gimple_location (stmt);
13510 if (fndecl
13511 && TREE_CODE (fndecl) == FUNCTION_DECL
13512 && DECL_BUILT_IN (fndecl)
13513 && !gimple_call_va_arg_pack_p (stmt))
13514 {
13515 int nargs = gimple_call_num_args (stmt);
13516
13517 if (avoid_folding_inline_builtin (fndecl))
13518 return NULL_TREE;
13519 /* FIXME: Don't use a list in this interface. */
13520 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13521 {
13522 tree arglist = NULL_TREE;
13523 int i;
13524 for (i = nargs - 1; i >= 0; i--)
13525 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13526 return targetm.fold_builtin (fndecl, arglist, ignore);
13527 }
13528 else
13529 {
13530 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13531 {
13532 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13533 int i;
13534 for (i = 0; i < nargs; i++)
13535 args[i] = gimple_call_arg (stmt, i);
13536 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13537 }
13538 if (!ret)
13539 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13540 if (ret)
13541 {
13542 /* Propagate location information from original call to
13543 expansion of builtin. Otherwise things like
13544 maybe_emit_chk_warning, that operate on the expansion
13545 of a builtin, will use the wrong location information. */
13546 if (gimple_has_location (stmt))
13547 {
13548 tree realret = ret;
13549 if (TREE_CODE (ret) == NOP_EXPR)
13550 realret = TREE_OPERAND (ret, 0);
13551 if (CAN_HAVE_LOCATION_P (realret)
13552 && !EXPR_HAS_LOCATION (realret))
13553 SET_EXPR_LOCATION (realret, loc);
13554 return realret;
13555 }
13556 return ret;
13557 }
13558 }
13559 }
13560 return NULL_TREE;
13561 }
13562
13563 /* Look up the function in built_in_decls that corresponds to DECL
13564 and set ASMSPEC as its user assembler name. DECL must be a
13565 function decl that declares a builtin. */
13566
13567 void
13568 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13569 {
13570 tree builtin;
13571 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13572 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13573 && asmspec != 0);
13574
13575 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13576 set_user_assembler_name (builtin, asmspec);
13577 switch (DECL_FUNCTION_CODE (decl))
13578 {
13579 case BUILT_IN_MEMCPY:
13580 init_block_move_fn (asmspec);
13581 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13582 break;
13583 case BUILT_IN_MEMSET:
13584 init_block_clear_fn (asmspec);
13585 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13586 break;
13587 case BUILT_IN_MEMMOVE:
13588 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13589 break;
13590 case BUILT_IN_MEMCMP:
13591 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13592 break;
13593 case BUILT_IN_ABORT:
13594 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13595 break;
13596 default:
13597 break;
13598 }
13599 }