b96cfb24da1e76b7ef7fc4a0977339c324353c19
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
65 {
66 #include "builtins.def"
67 };
68 #undef DEF_BUILTIN
69
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
77
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
86 #endif
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
189 enum tree_code);
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
197
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
206
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
220
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 #endif
242
243 /* This array records the insn_code of insns to imlement the signbit
244 function. */
245 enum insn_code signbit_optab[NUM_MACHINE_MODES];
246
247
248 /* Return true if NODE should be considered for inline expansion regardless
249 of the optimization level. This means whenever a function is invoked with
250 its "internal" name, which normally contains the prefix "__builtin". */
251
252 static bool called_as_built_in (tree node)
253 {
254 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
255 if (strncmp (name, "__builtin_", 10) == 0)
256 return true;
257 if (strncmp (name, "__sync_", 7) == 0)
258 return true;
259 return false;
260 }
261
262 /* Return the alignment in bits of EXP, a pointer valued expression.
263 But don't return more than MAX_ALIGN no matter what.
264 The alignment returned is, by default, the alignment of the thing that
265 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
266
267 Otherwise, look at the expression to see if we can do better, i.e., if the
268 expression is actually pointing at an object whose alignment is tighter. */
269
270 int
271 get_pointer_alignment (tree exp, unsigned int max_align)
272 {
273 unsigned int align, inner;
274
275 /* We rely on TER to compute accurate alignment information. */
276 if (!(optimize && flag_tree_ter))
277 return 0;
278
279 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
280 return 0;
281
282 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
283 align = MIN (align, max_align);
284
285 while (1)
286 {
287 switch (TREE_CODE (exp))
288 {
289 case NOP_EXPR:
290 case CONVERT_EXPR:
291 case NON_LVALUE_EXPR:
292 exp = TREE_OPERAND (exp, 0);
293 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
294 return align;
295
296 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
297 align = MIN (inner, max_align);
298 break;
299
300 case POINTER_PLUS_EXPR:
301 /* If sum of pointer + int, restrict our maximum alignment to that
302 imposed by the integer. If not, we can't do any better than
303 ALIGN. */
304 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
305 return align;
306
307 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
308 & (max_align / BITS_PER_UNIT - 1))
309 != 0)
310 max_align >>= 1;
311
312 exp = TREE_OPERAND (exp, 0);
313 break;
314
315 case ADDR_EXPR:
316 /* See what we are pointing at and look at its alignment. */
317 exp = TREE_OPERAND (exp, 0);
318 inner = max_align;
319 if (handled_component_p (exp))
320 {
321 HOST_WIDE_INT bitsize, bitpos;
322 tree offset;
323 enum machine_mode mode;
324 int unsignedp, volatilep;
325
326 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
327 &mode, &unsignedp, &volatilep, true);
328 if (bitpos)
329 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
330 if (offset && TREE_CODE (offset) == PLUS_EXPR
331 && host_integerp (TREE_OPERAND (offset, 1), 1))
332 {
333 /* Any overflow in calculating offset_bits won't change
334 the alignment. */
335 unsigned offset_bits
336 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
337 * BITS_PER_UNIT);
338
339 if (offset_bits)
340 inner = MIN (inner, (offset_bits & -offset_bits));
341 offset = TREE_OPERAND (offset, 0);
342 }
343 if (offset && TREE_CODE (offset) == MULT_EXPR
344 && host_integerp (TREE_OPERAND (offset, 1), 1))
345 {
346 /* Any overflow in calculating offset_factor won't change
347 the alignment. */
348 unsigned offset_factor
349 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
350 * BITS_PER_UNIT);
351
352 if (offset_factor)
353 inner = MIN (inner, (offset_factor & -offset_factor));
354 }
355 else if (offset)
356 inner = MIN (inner, BITS_PER_UNIT);
357 }
358 if (DECL_P (exp))
359 align = MIN (inner, DECL_ALIGN (exp));
360 #ifdef CONSTANT_ALIGNMENT
361 else if (CONSTANT_CLASS_P (exp))
362 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
363 #endif
364 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
365 || TREE_CODE (exp) == INDIRECT_REF)
366 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
367 else
368 align = MIN (align, inner);
369 return MIN (align, max_align);
370
371 default:
372 return align;
373 }
374 }
375 }
376
377 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
378 way, because it could contain a zero byte in the middle.
379 TREE_STRING_LENGTH is the size of the character array, not the string.
380
381 ONLY_VALUE should be nonzero if the result is not going to be emitted
382 into the instruction stream and zero if it is going to be expanded.
383 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
384 is returned, otherwise NULL, since
385 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
386 evaluate the side-effects.
387
388 The value returned is of type `ssizetype'.
389
390 Unfortunately, string_constant can't access the values of const char
391 arrays with initializers, so neither can we do so here. */
392
393 tree
394 c_strlen (tree src, int only_value)
395 {
396 tree offset_node;
397 HOST_WIDE_INT offset;
398 int max;
399 const char *ptr;
400
401 STRIP_NOPS (src);
402 if (TREE_CODE (src) == COND_EXPR
403 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
404 {
405 tree len1, len2;
406
407 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
408 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
409 if (tree_int_cst_equal (len1, len2))
410 return len1;
411 }
412
413 if (TREE_CODE (src) == COMPOUND_EXPR
414 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
415 return c_strlen (TREE_OPERAND (src, 1), only_value);
416
417 src = string_constant (src, &offset_node);
418 if (src == 0)
419 return NULL_TREE;
420
421 max = TREE_STRING_LENGTH (src) - 1;
422 ptr = TREE_STRING_POINTER (src);
423
424 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
425 {
426 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
427 compute the offset to the following null if we don't know where to
428 start searching for it. */
429 int i;
430
431 for (i = 0; i < max; i++)
432 if (ptr[i] == 0)
433 return NULL_TREE;
434
435 /* We don't know the starting offset, but we do know that the string
436 has no internal zero bytes. We can assume that the offset falls
437 within the bounds of the string; otherwise, the programmer deserves
438 what he gets. Subtract the offset from the length of the string,
439 and return that. This would perhaps not be valid if we were dealing
440 with named arrays in addition to literal string constants. */
441
442 return size_diffop (size_int (max), offset_node);
443 }
444
445 /* We have a known offset into the string. Start searching there for
446 a null character if we can represent it as a single HOST_WIDE_INT. */
447 if (offset_node == 0)
448 offset = 0;
449 else if (! host_integerp (offset_node, 0))
450 offset = -1;
451 else
452 offset = tree_low_cst (offset_node, 0);
453
454 /* If the offset is known to be out of bounds, warn, and call strlen at
455 runtime. */
456 if (offset < 0 || offset > max)
457 {
458 warning (0, "offset outside bounds of constant string");
459 return NULL_TREE;
460 }
461
462 /* Use strlen to search for the first zero byte. Since any strings
463 constructed with build_string will have nulls appended, we win even
464 if we get handed something like (char[4])"abcd".
465
466 Since OFFSET is our starting index into the string, no further
467 calculation is needed. */
468 return ssize_int (strlen (ptr + offset));
469 }
470
471 /* Return a char pointer for a C string if it is a string constant
472 or sum of string constant and integer constant. */
473
474 static const char *
475 c_getstr (tree src)
476 {
477 tree offset_node;
478
479 src = string_constant (src, &offset_node);
480 if (src == 0)
481 return 0;
482
483 if (offset_node == 0)
484 return TREE_STRING_POINTER (src);
485 else if (!host_integerp (offset_node, 1)
486 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
487 return 0;
488
489 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
490 }
491
492 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
493 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
494
495 static rtx
496 c_readstr (const char *str, enum machine_mode mode)
497 {
498 HOST_WIDE_INT c[2];
499 HOST_WIDE_INT ch;
500 unsigned int i, j;
501
502 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503
504 c[0] = 0;
505 c[1] = 0;
506 ch = 1;
507 for (i = 0; i < GET_MODE_SIZE (mode); i++)
508 {
509 j = i;
510 if (WORDS_BIG_ENDIAN)
511 j = GET_MODE_SIZE (mode) - i - 1;
512 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
513 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
514 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
515 j *= BITS_PER_UNIT;
516 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
517
518 if (ch)
519 ch = (unsigned char) str[i];
520 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
521 }
522 return immed_double_const (c[0], c[1], mode);
523 }
524
525 /* Cast a target constant CST to target CHAR and if that value fits into
526 host char type, return zero and put that value into variable pointed to by
527 P. */
528
529 static int
530 target_char_cast (tree cst, char *p)
531 {
532 unsigned HOST_WIDE_INT val, hostval;
533
534 if (!host_integerp (cst, 1)
535 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
536 return 1;
537
538 val = tree_low_cst (cst, 1);
539 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
540 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
541
542 hostval = val;
543 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
544 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
545
546 if (val != hostval)
547 return 1;
548
549 *p = hostval;
550 return 0;
551 }
552
553 /* Similar to save_expr, but assumes that arbitrary code is not executed
554 in between the multiple evaluations. In particular, we assume that a
555 non-addressable local variable will not be modified. */
556
557 static tree
558 builtin_save_expr (tree exp)
559 {
560 if (TREE_ADDRESSABLE (exp) == 0
561 && (TREE_CODE (exp) == PARM_DECL
562 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
563 return exp;
564
565 return save_expr (exp);
566 }
567
568 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
569 times to get the address of either a higher stack frame, or a return
570 address located within it (depending on FNDECL_CODE). */
571
572 static rtx
573 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
574 {
575 int i;
576
577 #ifdef INITIAL_FRAME_ADDRESS_RTX
578 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579 #else
580 rtx tem;
581
582 /* For a zero count with __builtin_return_address, we don't care what
583 frame address we return, because target-specific definitions will
584 override us. Therefore frame pointer elimination is OK, and using
585 the soft frame pointer is OK.
586
587 For a nonzero count, or a zero count with __builtin_frame_address,
588 we require a stable offset from the current frame pointer to the
589 previous one, so we must use the hard frame pointer, and
590 we must disable frame pointer elimination. */
591 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
592 tem = frame_pointer_rtx;
593 else
594 {
595 tem = hard_frame_pointer_rtx;
596
597 /* Tell reload not to eliminate the frame pointer. */
598 current_function_accesses_prior_frames = 1;
599 }
600 #endif
601
602 /* Some machines need special handling before we can access
603 arbitrary frames. For example, on the SPARC, we must first flush
604 all register windows to the stack. */
605 #ifdef SETUP_FRAME_ADDRESSES
606 if (count > 0)
607 SETUP_FRAME_ADDRESSES ();
608 #endif
609
610 /* On the SPARC, the return address is not in the frame, it is in a
611 register. There is no way to access it off of the current frame
612 pointer, but it can be accessed off the previous frame pointer by
613 reading the value from the register window save area. */
614 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
615 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
616 count--;
617 #endif
618
619 /* Scan back COUNT frames to the specified frame. */
620 for (i = 0; i < count; i++)
621 {
622 /* Assume the dynamic chain pointer is in the word that the
623 frame address points to, unless otherwise specified. */
624 #ifdef DYNAMIC_CHAIN_ADDRESS
625 tem = DYNAMIC_CHAIN_ADDRESS (tem);
626 #endif
627 tem = memory_address (Pmode, tem);
628 tem = gen_frame_mem (Pmode, tem);
629 tem = copy_to_reg (tem);
630 }
631
632 /* For __builtin_frame_address, return what we've got. But, on
633 the SPARC for example, we may have to add a bias. */
634 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
635 #ifdef FRAME_ADDR_RTX
636 return FRAME_ADDR_RTX (tem);
637 #else
638 return tem;
639 #endif
640
641 /* For __builtin_return_address, get the return address from that frame. */
642 #ifdef RETURN_ADDR_RTX
643 tem = RETURN_ADDR_RTX (count, tem);
644 #else
645 tem = memory_address (Pmode,
646 plus_constant (tem, GET_MODE_SIZE (Pmode)));
647 tem = gen_frame_mem (Pmode, tem);
648 #endif
649 return tem;
650 }
651
652 /* Alias set used for setjmp buffer. */
653 static HOST_WIDE_INT setjmp_alias_set = -1;
654
655 /* Construct the leading half of a __builtin_setjmp call. Control will
656 return to RECEIVER_LABEL. This is also called directly by the SJLJ
657 exception handling code. */
658
659 void
660 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
661 {
662 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
663 rtx stack_save;
664 rtx mem;
665
666 if (setjmp_alias_set == -1)
667 setjmp_alias_set = new_alias_set ();
668
669 buf_addr = convert_memory_address (Pmode, buf_addr);
670
671 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
672
673 /* We store the frame pointer and the address of receiver_label in
674 the buffer and use the rest of it for the stack save area, which
675 is machine-dependent. */
676
677 mem = gen_rtx_MEM (Pmode, buf_addr);
678 set_mem_alias_set (mem, setjmp_alias_set);
679 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
680
681 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
682 set_mem_alias_set (mem, setjmp_alias_set);
683
684 emit_move_insn (validize_mem (mem),
685 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
686
687 stack_save = gen_rtx_MEM (sa_mode,
688 plus_constant (buf_addr,
689 2 * GET_MODE_SIZE (Pmode)));
690 set_mem_alias_set (stack_save, setjmp_alias_set);
691 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
692
693 /* If there is further processing to do, do it. */
694 #ifdef HAVE_builtin_setjmp_setup
695 if (HAVE_builtin_setjmp_setup)
696 emit_insn (gen_builtin_setjmp_setup (buf_addr));
697 #endif
698
699 /* Tell optimize_save_area_alloca that extra work is going to
700 need to go on during alloca. */
701 current_function_calls_setjmp = 1;
702
703 /* We have a nonlocal label. */
704 current_function_has_nonlocal_label = 1;
705 }
706
707 /* Construct the trailing part of a __builtin_setjmp call. This is
708 also called directly by the SJLJ exception handling code. */
709
710 void
711 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
712 {
713 /* Clobber the FP when we get here, so we have to make sure it's
714 marked as used by this function. */
715 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
716
717 /* Mark the static chain as clobbered here so life information
718 doesn't get messed up for it. */
719 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
720
721 /* Now put in the code to restore the frame pointer, and argument
722 pointer, if needed. */
723 #ifdef HAVE_nonlocal_goto
724 if (! HAVE_nonlocal_goto)
725 #endif
726 {
727 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
728 /* This might change the hard frame pointer in ways that aren't
729 apparent to early optimization passes, so force a clobber. */
730 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
731 }
732
733 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
734 if (fixed_regs[ARG_POINTER_REGNUM])
735 {
736 #ifdef ELIMINABLE_REGS
737 size_t i;
738 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
739
740 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
741 if (elim_regs[i].from == ARG_POINTER_REGNUM
742 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
743 break;
744
745 if (i == ARRAY_SIZE (elim_regs))
746 #endif
747 {
748 /* Now restore our arg pointer from the address at which it
749 was saved in our stack frame. */
750 emit_move_insn (virtual_incoming_args_rtx,
751 copy_to_reg (get_arg_pointer_save_area (cfun)));
752 }
753 }
754 #endif
755
756 #ifdef HAVE_builtin_setjmp_receiver
757 if (HAVE_builtin_setjmp_receiver)
758 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
759 else
760 #endif
761 #ifdef HAVE_nonlocal_goto_receiver
762 if (HAVE_nonlocal_goto_receiver)
763 emit_insn (gen_nonlocal_goto_receiver ());
764 else
765 #endif
766 { /* Nothing */ }
767
768 /* We must not allow the code we just generated to be reordered by
769 scheduling. Specifically, the update of the frame pointer must
770 happen immediately, not later. */
771 emit_insn (gen_blockage ());
772 }
773
774 /* __builtin_longjmp is passed a pointer to an array of five words (not
775 all will be used on all machines). It operates similarly to the C
776 library function of the same name, but is more efficient. Much of
777 the code below is copied from the handling of non-local gotos. */
778
779 static void
780 expand_builtin_longjmp (rtx buf_addr, rtx value)
781 {
782 rtx fp, lab, stack, insn, last;
783 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
784
785 if (setjmp_alias_set == -1)
786 setjmp_alias_set = new_alias_set ();
787
788 buf_addr = convert_memory_address (Pmode, buf_addr);
789
790 buf_addr = force_reg (Pmode, buf_addr);
791
792 /* We used to store value in static_chain_rtx, but that fails if pointers
793 are smaller than integers. We instead require that the user must pass
794 a second argument of 1, because that is what builtin_setjmp will
795 return. This also makes EH slightly more efficient, since we are no
796 longer copying around a value that we don't care about. */
797 gcc_assert (value == const1_rtx);
798
799 last = get_last_insn ();
800 #ifdef HAVE_builtin_longjmp
801 if (HAVE_builtin_longjmp)
802 emit_insn (gen_builtin_longjmp (buf_addr));
803 else
804 #endif
805 {
806 fp = gen_rtx_MEM (Pmode, buf_addr);
807 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
808 GET_MODE_SIZE (Pmode)));
809
810 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
811 2 * GET_MODE_SIZE (Pmode)));
812 set_mem_alias_set (fp, setjmp_alias_set);
813 set_mem_alias_set (lab, setjmp_alias_set);
814 set_mem_alias_set (stack, setjmp_alias_set);
815
816 /* Pick up FP, label, and SP from the block and jump. This code is
817 from expand_goto in stmt.c; see there for detailed comments. */
818 #ifdef HAVE_nonlocal_goto
819 if (HAVE_nonlocal_goto)
820 /* We have to pass a value to the nonlocal_goto pattern that will
821 get copied into the static_chain pointer, but it does not matter
822 what that value is, because builtin_setjmp does not use it. */
823 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
824 else
825 #endif
826 {
827 lab = copy_to_reg (lab);
828
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 gen_rtx_SCRATCH (VOIDmode))));
832 emit_insn (gen_rtx_CLOBBER (VOIDmode,
833 gen_rtx_MEM (BLKmode,
834 hard_frame_pointer_rtx)));
835
836 emit_move_insn (hard_frame_pointer_rtx, fp);
837 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
838
839 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
840 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
841 emit_indirect_jump (lab);
842 }
843 }
844
845 /* Search backwards and mark the jump insn as a non-local goto.
846 Note that this precludes the use of __builtin_longjmp to a
847 __builtin_setjmp target in the same function. However, we've
848 already cautioned the user that these functions are for
849 internal exception handling use only. */
850 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
851 {
852 gcc_assert (insn != last);
853
854 if (JUMP_P (insn))
855 {
856 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
857 REG_NOTES (insn));
858 break;
859 }
860 else if (CALL_P (insn))
861 break;
862 }
863 }
864
865 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
866 and the address of the save area. */
867
868 static rtx
869 expand_builtin_nonlocal_goto (tree exp)
870 {
871 tree t_label, t_save_area;
872 rtx r_label, r_save_area, r_fp, r_sp, insn;
873
874 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
875 return NULL_RTX;
876
877 t_label = CALL_EXPR_ARG (exp, 0);
878 t_save_area = CALL_EXPR_ARG (exp, 1);
879
880 r_label = expand_normal (t_label);
881 r_label = convert_memory_address (Pmode, r_label);
882 r_save_area = expand_normal (t_save_area);
883 r_save_area = convert_memory_address (Pmode, r_save_area);
884 r_fp = gen_rtx_MEM (Pmode, r_save_area);
885 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
886 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
887
888 current_function_has_nonlocal_goto = 1;
889
890 #ifdef HAVE_nonlocal_goto
891 /* ??? We no longer need to pass the static chain value, afaik. */
892 if (HAVE_nonlocal_goto)
893 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
894 else
895 #endif
896 {
897 r_label = copy_to_reg (r_label);
898
899 emit_insn (gen_rtx_CLOBBER (VOIDmode,
900 gen_rtx_MEM (BLKmode,
901 gen_rtx_SCRATCH (VOIDmode))));
902
903 emit_insn (gen_rtx_CLOBBER (VOIDmode,
904 gen_rtx_MEM (BLKmode,
905 hard_frame_pointer_rtx)));
906
907 /* Restore frame pointer for containing function.
908 This sets the actual hard register used for the frame pointer
909 to the location of the function's incoming static chain info.
910 The non-local goto handler will then adjust it to contain the
911 proper value and reload the argument pointer, if needed. */
912 emit_move_insn (hard_frame_pointer_rtx, r_fp);
913 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
914
915 /* USE of hard_frame_pointer_rtx added for consistency;
916 not clear if really needed. */
917 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
918 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
919 emit_indirect_jump (r_label);
920 }
921
922 /* Search backwards to the jump insn and mark it as a
923 non-local goto. */
924 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
925 {
926 if (JUMP_P (insn))
927 {
928 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
929 const0_rtx, REG_NOTES (insn));
930 break;
931 }
932 else if (CALL_P (insn))
933 break;
934 }
935
936 return const0_rtx;
937 }
938
939 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
940 (not all will be used on all machines) that was passed to __builtin_setjmp.
941 It updates the stack pointer in that block to correspond to the current
942 stack pointer. */
943
944 static void
945 expand_builtin_update_setjmp_buf (rtx buf_addr)
946 {
947 enum machine_mode sa_mode = Pmode;
948 rtx stack_save;
949
950
951 #ifdef HAVE_save_stack_nonlocal
952 if (HAVE_save_stack_nonlocal)
953 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
954 #endif
955 #ifdef STACK_SAVEAREA_MODE
956 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
957 #endif
958
959 stack_save
960 = gen_rtx_MEM (sa_mode,
961 memory_address
962 (sa_mode,
963 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
964
965 #ifdef HAVE_setjmp
966 if (HAVE_setjmp)
967 emit_insn (gen_setjmp ());
968 #endif
969
970 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
971 }
972
973 /* Expand a call to __builtin_prefetch. For a target that does not support
974 data prefetch, evaluate the memory address argument in case it has side
975 effects. */
976
977 static void
978 expand_builtin_prefetch (tree exp)
979 {
980 tree arg0, arg1, arg2;
981 int nargs;
982 rtx op0, op1, op2;
983
984 if (!validate_arglist (exp, POINTER_TYPE, 0))
985 return;
986
987 arg0 = CALL_EXPR_ARG (exp, 0);
988
989 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
990 zero (read) and argument 2 (locality) defaults to 3 (high degree of
991 locality). */
992 nargs = call_expr_nargs (exp);
993 if (nargs > 1)
994 arg1 = CALL_EXPR_ARG (exp, 1);
995 else
996 arg1 = integer_zero_node;
997 if (nargs > 2)
998 arg2 = CALL_EXPR_ARG (exp, 2);
999 else
1000 arg2 = build_int_cst (NULL_TREE, 3);
1001
1002 /* Argument 0 is an address. */
1003 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1004
1005 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1006 if (TREE_CODE (arg1) != INTEGER_CST)
1007 {
1008 error ("second argument to %<__builtin_prefetch%> must be a constant");
1009 arg1 = integer_zero_node;
1010 }
1011 op1 = expand_normal (arg1);
1012 /* Argument 1 must be either zero or one. */
1013 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1014 {
1015 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1016 " using zero");
1017 op1 = const0_rtx;
1018 }
1019
1020 /* Argument 2 (locality) must be a compile-time constant int. */
1021 if (TREE_CODE (arg2) != INTEGER_CST)
1022 {
1023 error ("third argument to %<__builtin_prefetch%> must be a constant");
1024 arg2 = integer_zero_node;
1025 }
1026 op2 = expand_normal (arg2);
1027 /* Argument 2 must be 0, 1, 2, or 3. */
1028 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1029 {
1030 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1031 op2 = const0_rtx;
1032 }
1033
1034 #ifdef HAVE_prefetch
1035 if (HAVE_prefetch)
1036 {
1037 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1038 (op0,
1039 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1040 || (GET_MODE (op0) != Pmode))
1041 {
1042 op0 = convert_memory_address (Pmode, op0);
1043 op0 = force_reg (Pmode, op0);
1044 }
1045 emit_insn (gen_prefetch (op0, op1, op2));
1046 }
1047 #endif
1048
1049 /* Don't do anything with direct references to volatile memory, but
1050 generate code to handle other side effects. */
1051 if (!MEM_P (op0) && side_effects_p (op0))
1052 emit_insn (op0);
1053 }
1054
1055 /* Get a MEM rtx for expression EXP which is the address of an operand
1056 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1057 the maximum length of the block of memory that might be accessed or
1058 NULL if unknown. */
1059
1060 static rtx
1061 get_memory_rtx (tree exp, tree len)
1062 {
1063 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1064 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1065
1066 /* Get an expression we can use to find the attributes to assign to MEM.
1067 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1068 we can. First remove any nops. */
1069 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1070 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1071 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1072 exp = TREE_OPERAND (exp, 0);
1073
1074 if (TREE_CODE (exp) == ADDR_EXPR)
1075 exp = TREE_OPERAND (exp, 0);
1076 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1077 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1078 else
1079 exp = NULL;
1080
1081 /* Honor attributes derived from exp, except for the alias set
1082 (as builtin stringops may alias with anything) and the size
1083 (as stringops may access multiple array elements). */
1084 if (exp)
1085 {
1086 set_mem_attributes (mem, exp, 0);
1087
1088 /* Allow the string and memory builtins to overflow from one
1089 field into another, see http://gcc.gnu.org/PR23561.
1090 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1091 memory accessed by the string or memory builtin will fit
1092 within the field. */
1093 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1094 {
1095 tree mem_expr = MEM_EXPR (mem);
1096 HOST_WIDE_INT offset = -1, length = -1;
1097 tree inner = exp;
1098
1099 while (TREE_CODE (inner) == ARRAY_REF
1100 || TREE_CODE (inner) == NOP_EXPR
1101 || TREE_CODE (inner) == CONVERT_EXPR
1102 || TREE_CODE (inner) == NON_LVALUE_EXPR
1103 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1104 || TREE_CODE (inner) == SAVE_EXPR)
1105 inner = TREE_OPERAND (inner, 0);
1106
1107 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1108
1109 if (MEM_OFFSET (mem)
1110 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1111 offset = INTVAL (MEM_OFFSET (mem));
1112
1113 if (offset >= 0 && len && host_integerp (len, 0))
1114 length = tree_low_cst (len, 0);
1115
1116 while (TREE_CODE (inner) == COMPONENT_REF)
1117 {
1118 tree field = TREE_OPERAND (inner, 1);
1119 gcc_assert (! DECL_BIT_FIELD (field));
1120 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1121 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1122
1123 if (length >= 0
1124 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1125 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1126 {
1127 HOST_WIDE_INT size
1128 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1129 /* If we can prove the memory starting at XEXP (mem, 0)
1130 and ending at XEXP (mem, 0) + LENGTH will fit into
1131 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1132 if (offset <= size
1133 && length <= size
1134 && offset + length <= size)
1135 break;
1136 }
1137
1138 if (offset >= 0
1139 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1140 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1141 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1142 / BITS_PER_UNIT;
1143 else
1144 {
1145 offset = -1;
1146 length = -1;
1147 }
1148
1149 mem_expr = TREE_OPERAND (mem_expr, 0);
1150 inner = TREE_OPERAND (inner, 0);
1151 }
1152
1153 if (mem_expr == NULL)
1154 offset = -1;
1155 if (mem_expr != MEM_EXPR (mem))
1156 {
1157 set_mem_expr (mem, mem_expr);
1158 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1159 }
1160 }
1161 set_mem_alias_set (mem, 0);
1162 set_mem_size (mem, NULL_RTX);
1163 }
1164
1165 return mem;
1166 }
1167 \f
1168 /* Built-in functions to perform an untyped call and return. */
1169
1170 /* For each register that may be used for calling a function, this
1171 gives a mode used to copy the register's value. VOIDmode indicates
1172 the register is not used for calling a function. If the machine
1173 has register windows, this gives only the outbound registers.
1174 INCOMING_REGNO gives the corresponding inbound register. */
1175 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1176
1177 /* For each register that may be used for returning values, this gives
1178 a mode used to copy the register's value. VOIDmode indicates the
1179 register is not used for returning values. If the machine has
1180 register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1183
1184 /* For each register that may be used for calling a function, this
1185 gives the offset of that register into the block returned by
1186 __builtin_apply_args. 0 indicates that the register is not
1187 used for calling a function. */
1188 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1189
1190 /* Return the size required for the block returned by __builtin_apply_args,
1191 and initialize apply_args_mode. */
1192
1193 static int
1194 apply_args_size (void)
1195 {
1196 static int size = -1;
1197 int align;
1198 unsigned int regno;
1199 enum machine_mode mode;
1200
1201 /* The values computed by this function never change. */
1202 if (size < 0)
1203 {
1204 /* The first value is the incoming arg-pointer. */
1205 size = GET_MODE_SIZE (Pmode);
1206
1207 /* The second value is the structure value address unless this is
1208 passed as an "invisible" first argument. */
1209 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1210 size += GET_MODE_SIZE (Pmode);
1211
1212 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1213 if (FUNCTION_ARG_REGNO_P (regno))
1214 {
1215 mode = reg_raw_mode[regno];
1216
1217 gcc_assert (mode != VOIDmode);
1218
1219 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1220 if (size % align != 0)
1221 size = CEIL (size, align) * align;
1222 apply_args_reg_offset[regno] = size;
1223 size += GET_MODE_SIZE (mode);
1224 apply_args_mode[regno] = mode;
1225 }
1226 else
1227 {
1228 apply_args_mode[regno] = VOIDmode;
1229 apply_args_reg_offset[regno] = 0;
1230 }
1231 }
1232 return size;
1233 }
1234
1235 /* Return the size required for the block returned by __builtin_apply,
1236 and initialize apply_result_mode. */
1237
1238 static int
1239 apply_result_size (void)
1240 {
1241 static int size = -1;
1242 int align, regno;
1243 enum machine_mode mode;
1244
1245 /* The values computed by this function never change. */
1246 if (size < 0)
1247 {
1248 size = 0;
1249
1250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1251 if (FUNCTION_VALUE_REGNO_P (regno))
1252 {
1253 mode = reg_raw_mode[regno];
1254
1255 gcc_assert (mode != VOIDmode);
1256
1257 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1258 if (size % align != 0)
1259 size = CEIL (size, align) * align;
1260 size += GET_MODE_SIZE (mode);
1261 apply_result_mode[regno] = mode;
1262 }
1263 else
1264 apply_result_mode[regno] = VOIDmode;
1265
1266 /* Allow targets that use untyped_call and untyped_return to override
1267 the size so that machine-specific information can be stored here. */
1268 #ifdef APPLY_RESULT_SIZE
1269 size = APPLY_RESULT_SIZE;
1270 #endif
1271 }
1272 return size;
1273 }
1274
1275 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1276 /* Create a vector describing the result block RESULT. If SAVEP is true,
1277 the result block is used to save the values; otherwise it is used to
1278 restore the values. */
1279
1280 static rtx
1281 result_vector (int savep, rtx result)
1282 {
1283 int regno, size, align, nelts;
1284 enum machine_mode mode;
1285 rtx reg, mem;
1286 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1287
1288 size = nelts = 0;
1289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1290 if ((mode = apply_result_mode[regno]) != VOIDmode)
1291 {
1292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1293 if (size % align != 0)
1294 size = CEIL (size, align) * align;
1295 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1296 mem = adjust_address (result, mode, size);
1297 savevec[nelts++] = (savep
1298 ? gen_rtx_SET (VOIDmode, mem, reg)
1299 : gen_rtx_SET (VOIDmode, reg, mem));
1300 size += GET_MODE_SIZE (mode);
1301 }
1302 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1303 }
1304 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1305
1306 /* Save the state required to perform an untyped call with the same
1307 arguments as were passed to the current function. */
1308
1309 static rtx
1310 expand_builtin_apply_args_1 (void)
1311 {
1312 rtx registers, tem;
1313 int size, align, regno;
1314 enum machine_mode mode;
1315 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1316
1317 /* Create a block where the arg-pointer, structure value address,
1318 and argument registers can be saved. */
1319 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1320
1321 /* Walk past the arg-pointer and structure value address. */
1322 size = GET_MODE_SIZE (Pmode);
1323 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1324 size += GET_MODE_SIZE (Pmode);
1325
1326 /* Save each register used in calling a function to the block. */
1327 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1328 if ((mode = apply_args_mode[regno]) != VOIDmode)
1329 {
1330 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1331 if (size % align != 0)
1332 size = CEIL (size, align) * align;
1333
1334 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1335
1336 emit_move_insn (adjust_address (registers, mode, size), tem);
1337 size += GET_MODE_SIZE (mode);
1338 }
1339
1340 /* Save the arg pointer to the block. */
1341 tem = copy_to_reg (virtual_incoming_args_rtx);
1342 #ifdef STACK_GROWS_DOWNWARD
1343 /* We need the pointer as the caller actually passed them to us, not
1344 as we might have pretended they were passed. Make sure it's a valid
1345 operand, as emit_move_insn isn't expected to handle a PLUS. */
1346 tem
1347 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1348 NULL_RTX);
1349 #endif
1350 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1351
1352 size = GET_MODE_SIZE (Pmode);
1353
1354 /* Save the structure value address unless this is passed as an
1355 "invisible" first argument. */
1356 if (struct_incoming_value)
1357 {
1358 emit_move_insn (adjust_address (registers, Pmode, size),
1359 copy_to_reg (struct_incoming_value));
1360 size += GET_MODE_SIZE (Pmode);
1361 }
1362
1363 /* Return the address of the block. */
1364 return copy_addr_to_reg (XEXP (registers, 0));
1365 }
1366
1367 /* __builtin_apply_args returns block of memory allocated on
1368 the stack into which is stored the arg pointer, structure
1369 value address, static chain, and all the registers that might
1370 possibly be used in performing a function call. The code is
1371 moved to the start of the function so the incoming values are
1372 saved. */
1373
1374 static rtx
1375 expand_builtin_apply_args (void)
1376 {
1377 /* Don't do __builtin_apply_args more than once in a function.
1378 Save the result of the first call and reuse it. */
1379 if (apply_args_value != 0)
1380 return apply_args_value;
1381 {
1382 /* When this function is called, it means that registers must be
1383 saved on entry to this function. So we migrate the
1384 call to the first insn of this function. */
1385 rtx temp;
1386 rtx seq;
1387
1388 start_sequence ();
1389 temp = expand_builtin_apply_args_1 ();
1390 seq = get_insns ();
1391 end_sequence ();
1392
1393 apply_args_value = temp;
1394
1395 /* Put the insns after the NOTE that starts the function.
1396 If this is inside a start_sequence, make the outer-level insn
1397 chain current, so the code is placed at the start of the
1398 function. */
1399 push_topmost_sequence ();
1400 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1401 pop_topmost_sequence ();
1402 return temp;
1403 }
1404 }
1405
1406 /* Perform an untyped call and save the state required to perform an
1407 untyped return of whatever value was returned by the given function. */
1408
1409 static rtx
1410 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1411 {
1412 int size, align, regno;
1413 enum machine_mode mode;
1414 rtx incoming_args, result, reg, dest, src, call_insn;
1415 rtx old_stack_level = 0;
1416 rtx call_fusage = 0;
1417 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1418
1419 arguments = convert_memory_address (Pmode, arguments);
1420
1421 /* Create a block where the return registers can be saved. */
1422 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1423
1424 /* Fetch the arg pointer from the ARGUMENTS block. */
1425 incoming_args = gen_reg_rtx (Pmode);
1426 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1427 #ifndef STACK_GROWS_DOWNWARD
1428 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1429 incoming_args, 0, OPTAB_LIB_WIDEN);
1430 #endif
1431
1432 /* Push a new argument block and copy the arguments. Do not allow
1433 the (potential) memcpy call below to interfere with our stack
1434 manipulations. */
1435 do_pending_stack_adjust ();
1436 NO_DEFER_POP;
1437
1438 /* Save the stack with nonlocal if available. */
1439 #ifdef HAVE_save_stack_nonlocal
1440 if (HAVE_save_stack_nonlocal)
1441 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1442 else
1443 #endif
1444 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1445
1446 /* Allocate a block of memory onto the stack and copy the memory
1447 arguments to the outgoing arguments address. */
1448 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1449 dest = virtual_outgoing_args_rtx;
1450 #ifndef STACK_GROWS_DOWNWARD
1451 if (GET_CODE (argsize) == CONST_INT)
1452 dest = plus_constant (dest, -INTVAL (argsize));
1453 else
1454 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1455 #endif
1456 dest = gen_rtx_MEM (BLKmode, dest);
1457 set_mem_align (dest, PARM_BOUNDARY);
1458 src = gen_rtx_MEM (BLKmode, incoming_args);
1459 set_mem_align (src, PARM_BOUNDARY);
1460 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1461
1462 /* Refer to the argument block. */
1463 apply_args_size ();
1464 arguments = gen_rtx_MEM (BLKmode, arguments);
1465 set_mem_align (arguments, PARM_BOUNDARY);
1466
1467 /* Walk past the arg-pointer and structure value address. */
1468 size = GET_MODE_SIZE (Pmode);
1469 if (struct_value)
1470 size += GET_MODE_SIZE (Pmode);
1471
1472 /* Restore each of the registers previously saved. Make USE insns
1473 for each of these registers for use in making the call. */
1474 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1475 if ((mode = apply_args_mode[regno]) != VOIDmode)
1476 {
1477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1478 if (size % align != 0)
1479 size = CEIL (size, align) * align;
1480 reg = gen_rtx_REG (mode, regno);
1481 emit_move_insn (reg, adjust_address (arguments, mode, size));
1482 use_reg (&call_fusage, reg);
1483 size += GET_MODE_SIZE (mode);
1484 }
1485
1486 /* Restore the structure value address unless this is passed as an
1487 "invisible" first argument. */
1488 size = GET_MODE_SIZE (Pmode);
1489 if (struct_value)
1490 {
1491 rtx value = gen_reg_rtx (Pmode);
1492 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1493 emit_move_insn (struct_value, value);
1494 if (REG_P (struct_value))
1495 use_reg (&call_fusage, struct_value);
1496 size += GET_MODE_SIZE (Pmode);
1497 }
1498
1499 /* All arguments and registers used for the call are set up by now! */
1500 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1501
1502 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1503 and we don't want to load it into a register as an optimization,
1504 because prepare_call_address already did it if it should be done. */
1505 if (GET_CODE (function) != SYMBOL_REF)
1506 function = memory_address (FUNCTION_MODE, function);
1507
1508 /* Generate the actual call instruction and save the return value. */
1509 #ifdef HAVE_untyped_call
1510 if (HAVE_untyped_call)
1511 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1512 result, result_vector (1, result)));
1513 else
1514 #endif
1515 #ifdef HAVE_call_value
1516 if (HAVE_call_value)
1517 {
1518 rtx valreg = 0;
1519
1520 /* Locate the unique return register. It is not possible to
1521 express a call that sets more than one return register using
1522 call_value; use untyped_call for that. In fact, untyped_call
1523 only needs to save the return registers in the given block. */
1524 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1525 if ((mode = apply_result_mode[regno]) != VOIDmode)
1526 {
1527 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1528
1529 valreg = gen_rtx_REG (mode, regno);
1530 }
1531
1532 emit_call_insn (GEN_CALL_VALUE (valreg,
1533 gen_rtx_MEM (FUNCTION_MODE, function),
1534 const0_rtx, NULL_RTX, const0_rtx));
1535
1536 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1537 }
1538 else
1539 #endif
1540 gcc_unreachable ();
1541
1542 /* Find the CALL insn we just emitted, and attach the register usage
1543 information. */
1544 call_insn = last_call_insn ();
1545 add_function_usage_to (call_insn, call_fusage);
1546
1547 /* Restore the stack. */
1548 #ifdef HAVE_save_stack_nonlocal
1549 if (HAVE_save_stack_nonlocal)
1550 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1551 else
1552 #endif
1553 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1554
1555 OK_DEFER_POP;
1556
1557 /* Return the address of the result block. */
1558 result = copy_addr_to_reg (XEXP (result, 0));
1559 return convert_memory_address (ptr_mode, result);
1560 }
1561
1562 /* Perform an untyped return. */
1563
1564 static void
1565 expand_builtin_return (rtx result)
1566 {
1567 int size, align, regno;
1568 enum machine_mode mode;
1569 rtx reg;
1570 rtx call_fusage = 0;
1571
1572 result = convert_memory_address (Pmode, result);
1573
1574 apply_result_size ();
1575 result = gen_rtx_MEM (BLKmode, result);
1576
1577 #ifdef HAVE_untyped_return
1578 if (HAVE_untyped_return)
1579 {
1580 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1581 emit_barrier ();
1582 return;
1583 }
1584 #endif
1585
1586 /* Restore the return value and note that each value is used. */
1587 size = 0;
1588 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1589 if ((mode = apply_result_mode[regno]) != VOIDmode)
1590 {
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1595 emit_move_insn (reg, adjust_address (result, mode, size));
1596
1597 push_to_sequence (call_fusage);
1598 emit_insn (gen_rtx_USE (VOIDmode, reg));
1599 call_fusage = get_insns ();
1600 end_sequence ();
1601 size += GET_MODE_SIZE (mode);
1602 }
1603
1604 /* Put the USE insns before the return. */
1605 emit_insn (call_fusage);
1606
1607 /* Return whatever values was restored by jumping directly to the end
1608 of the function. */
1609 expand_naked_return ();
1610 }
1611
1612 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1613
1614 static enum type_class
1615 type_to_class (tree type)
1616 {
1617 switch (TREE_CODE (type))
1618 {
1619 case VOID_TYPE: return void_type_class;
1620 case INTEGER_TYPE: return integer_type_class;
1621 case ENUMERAL_TYPE: return enumeral_type_class;
1622 case BOOLEAN_TYPE: return boolean_type_class;
1623 case POINTER_TYPE: return pointer_type_class;
1624 case REFERENCE_TYPE: return reference_type_class;
1625 case OFFSET_TYPE: return offset_type_class;
1626 case REAL_TYPE: return real_type_class;
1627 case COMPLEX_TYPE: return complex_type_class;
1628 case FUNCTION_TYPE: return function_type_class;
1629 case METHOD_TYPE: return method_type_class;
1630 case RECORD_TYPE: return record_type_class;
1631 case UNION_TYPE:
1632 case QUAL_UNION_TYPE: return union_type_class;
1633 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1634 ? string_type_class : array_type_class);
1635 case LANG_TYPE: return lang_type_class;
1636 default: return no_type_class;
1637 }
1638 }
1639
1640 /* Expand a call EXP to __builtin_classify_type. */
1641
1642 static rtx
1643 expand_builtin_classify_type (tree exp)
1644 {
1645 if (call_expr_nargs (exp))
1646 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1647 return GEN_INT (no_type_class);
1648 }
1649
1650 /* This helper macro, meant to be used in mathfn_built_in below,
1651 determines which among a set of three builtin math functions is
1652 appropriate for a given type mode. The `F' and `L' cases are
1653 automatically generated from the `double' case. */
1654 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1655 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1656 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1657 fcodel = BUILT_IN_MATHFN##L ; break;
1658 /* Similar to above, but appends _R after any F/L suffix. */
1659 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1660 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1661 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1662 fcodel = BUILT_IN_MATHFN##L_R ; break;
1663
1664 /* Return mathematic function equivalent to FN but operating directly
1665 on TYPE, if available. If we can't do the conversion, return zero. */
1666 tree
1667 mathfn_built_in (tree type, enum built_in_function fn)
1668 {
1669 enum built_in_function fcode, fcodef, fcodel;
1670
1671 switch (fn)
1672 {
1673 CASE_MATHFN (BUILT_IN_ACOS)
1674 CASE_MATHFN (BUILT_IN_ACOSH)
1675 CASE_MATHFN (BUILT_IN_ASIN)
1676 CASE_MATHFN (BUILT_IN_ASINH)
1677 CASE_MATHFN (BUILT_IN_ATAN)
1678 CASE_MATHFN (BUILT_IN_ATAN2)
1679 CASE_MATHFN (BUILT_IN_ATANH)
1680 CASE_MATHFN (BUILT_IN_CBRT)
1681 CASE_MATHFN (BUILT_IN_CEIL)
1682 CASE_MATHFN (BUILT_IN_CEXPI)
1683 CASE_MATHFN (BUILT_IN_COPYSIGN)
1684 CASE_MATHFN (BUILT_IN_COS)
1685 CASE_MATHFN (BUILT_IN_COSH)
1686 CASE_MATHFN (BUILT_IN_DREM)
1687 CASE_MATHFN (BUILT_IN_ERF)
1688 CASE_MATHFN (BUILT_IN_ERFC)
1689 CASE_MATHFN (BUILT_IN_EXP)
1690 CASE_MATHFN (BUILT_IN_EXP10)
1691 CASE_MATHFN (BUILT_IN_EXP2)
1692 CASE_MATHFN (BUILT_IN_EXPM1)
1693 CASE_MATHFN (BUILT_IN_FABS)
1694 CASE_MATHFN (BUILT_IN_FDIM)
1695 CASE_MATHFN (BUILT_IN_FLOOR)
1696 CASE_MATHFN (BUILT_IN_FMA)
1697 CASE_MATHFN (BUILT_IN_FMAX)
1698 CASE_MATHFN (BUILT_IN_FMIN)
1699 CASE_MATHFN (BUILT_IN_FMOD)
1700 CASE_MATHFN (BUILT_IN_FREXP)
1701 CASE_MATHFN (BUILT_IN_GAMMA)
1702 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1703 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1704 CASE_MATHFN (BUILT_IN_HYPOT)
1705 CASE_MATHFN (BUILT_IN_ILOGB)
1706 CASE_MATHFN (BUILT_IN_INF)
1707 CASE_MATHFN (BUILT_IN_ISINF)
1708 CASE_MATHFN (BUILT_IN_J0)
1709 CASE_MATHFN (BUILT_IN_J1)
1710 CASE_MATHFN (BUILT_IN_JN)
1711 CASE_MATHFN (BUILT_IN_LCEIL)
1712 CASE_MATHFN (BUILT_IN_LDEXP)
1713 CASE_MATHFN (BUILT_IN_LFLOOR)
1714 CASE_MATHFN (BUILT_IN_LGAMMA)
1715 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1716 CASE_MATHFN (BUILT_IN_LLCEIL)
1717 CASE_MATHFN (BUILT_IN_LLFLOOR)
1718 CASE_MATHFN (BUILT_IN_LLRINT)
1719 CASE_MATHFN (BUILT_IN_LLROUND)
1720 CASE_MATHFN (BUILT_IN_LOG)
1721 CASE_MATHFN (BUILT_IN_LOG10)
1722 CASE_MATHFN (BUILT_IN_LOG1P)
1723 CASE_MATHFN (BUILT_IN_LOG2)
1724 CASE_MATHFN (BUILT_IN_LOGB)
1725 CASE_MATHFN (BUILT_IN_LRINT)
1726 CASE_MATHFN (BUILT_IN_LROUND)
1727 CASE_MATHFN (BUILT_IN_MODF)
1728 CASE_MATHFN (BUILT_IN_NAN)
1729 CASE_MATHFN (BUILT_IN_NANS)
1730 CASE_MATHFN (BUILT_IN_NEARBYINT)
1731 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1732 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1733 CASE_MATHFN (BUILT_IN_POW)
1734 CASE_MATHFN (BUILT_IN_POWI)
1735 CASE_MATHFN (BUILT_IN_POW10)
1736 CASE_MATHFN (BUILT_IN_REMAINDER)
1737 CASE_MATHFN (BUILT_IN_REMQUO)
1738 CASE_MATHFN (BUILT_IN_RINT)
1739 CASE_MATHFN (BUILT_IN_ROUND)
1740 CASE_MATHFN (BUILT_IN_SCALB)
1741 CASE_MATHFN (BUILT_IN_SCALBLN)
1742 CASE_MATHFN (BUILT_IN_SCALBN)
1743 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1744 CASE_MATHFN (BUILT_IN_SIN)
1745 CASE_MATHFN (BUILT_IN_SINCOS)
1746 CASE_MATHFN (BUILT_IN_SINH)
1747 CASE_MATHFN (BUILT_IN_SQRT)
1748 CASE_MATHFN (BUILT_IN_TAN)
1749 CASE_MATHFN (BUILT_IN_TANH)
1750 CASE_MATHFN (BUILT_IN_TGAMMA)
1751 CASE_MATHFN (BUILT_IN_TRUNC)
1752 CASE_MATHFN (BUILT_IN_Y0)
1753 CASE_MATHFN (BUILT_IN_Y1)
1754 CASE_MATHFN (BUILT_IN_YN)
1755
1756 default:
1757 return NULL_TREE;
1758 }
1759
1760 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1761 return implicit_built_in_decls[fcode];
1762 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1763 return implicit_built_in_decls[fcodef];
1764 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1765 return implicit_built_in_decls[fcodel];
1766 else
1767 return NULL_TREE;
1768 }
1769
1770 /* If errno must be maintained, expand the RTL to check if the result,
1771 TARGET, of a built-in function call, EXP, is NaN, and if so set
1772 errno to EDOM. */
1773
1774 static void
1775 expand_errno_check (tree exp, rtx target)
1776 {
1777 rtx lab = gen_label_rtx ();
1778
1779 /* Test the result; if it is NaN, set errno=EDOM because
1780 the argument was not in the domain. */
1781 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1782 0, lab);
1783
1784 #ifdef TARGET_EDOM
1785 /* If this built-in doesn't throw an exception, set errno directly. */
1786 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1787 {
1788 #ifdef GEN_ERRNO_RTX
1789 rtx errno_rtx = GEN_ERRNO_RTX;
1790 #else
1791 rtx errno_rtx
1792 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1793 #endif
1794 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1795 emit_label (lab);
1796 return;
1797 }
1798 #endif
1799
1800 /* We can't set errno=EDOM directly; let the library call do it.
1801 Pop the arguments right away in case the call gets deleted. */
1802 NO_DEFER_POP;
1803 expand_call (exp, target, 0);
1804 OK_DEFER_POP;
1805 emit_label (lab);
1806 }
1807
1808 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1809 Return NULL_RTX if a normal call should be emitted rather than expanding
1810 the function in-line. EXP is the expression that is a call to the builtin
1811 function; if convenient, the result should be placed in TARGET.
1812 SUBTARGET may be used as the target for computing one of EXP's operands. */
1813
1814 static rtx
1815 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1816 {
1817 optab builtin_optab;
1818 rtx op0, insns, before_call;
1819 tree fndecl = get_callee_fndecl (exp);
1820 enum machine_mode mode;
1821 bool errno_set = false;
1822 tree arg, narg;
1823
1824 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1825 return NULL_RTX;
1826
1827 arg = CALL_EXPR_ARG (exp, 0);
1828
1829 switch (DECL_FUNCTION_CODE (fndecl))
1830 {
1831 CASE_FLT_FN (BUILT_IN_SQRT):
1832 errno_set = ! tree_expr_nonnegative_p (arg);
1833 builtin_optab = sqrt_optab;
1834 break;
1835 CASE_FLT_FN (BUILT_IN_EXP):
1836 errno_set = true; builtin_optab = exp_optab; break;
1837 CASE_FLT_FN (BUILT_IN_EXP10):
1838 CASE_FLT_FN (BUILT_IN_POW10):
1839 errno_set = true; builtin_optab = exp10_optab; break;
1840 CASE_FLT_FN (BUILT_IN_EXP2):
1841 errno_set = true; builtin_optab = exp2_optab; break;
1842 CASE_FLT_FN (BUILT_IN_EXPM1):
1843 errno_set = true; builtin_optab = expm1_optab; break;
1844 CASE_FLT_FN (BUILT_IN_LOGB):
1845 errno_set = true; builtin_optab = logb_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOG):
1847 errno_set = true; builtin_optab = log_optab; break;
1848 CASE_FLT_FN (BUILT_IN_LOG10):
1849 errno_set = true; builtin_optab = log10_optab; break;
1850 CASE_FLT_FN (BUILT_IN_LOG2):
1851 errno_set = true; builtin_optab = log2_optab; break;
1852 CASE_FLT_FN (BUILT_IN_LOG1P):
1853 errno_set = true; builtin_optab = log1p_optab; break;
1854 CASE_FLT_FN (BUILT_IN_ASIN):
1855 builtin_optab = asin_optab; break;
1856 CASE_FLT_FN (BUILT_IN_ACOS):
1857 builtin_optab = acos_optab; break;
1858 CASE_FLT_FN (BUILT_IN_TAN):
1859 builtin_optab = tan_optab; break;
1860 CASE_FLT_FN (BUILT_IN_ATAN):
1861 builtin_optab = atan_optab; break;
1862 CASE_FLT_FN (BUILT_IN_FLOOR):
1863 builtin_optab = floor_optab; break;
1864 CASE_FLT_FN (BUILT_IN_CEIL):
1865 builtin_optab = ceil_optab; break;
1866 CASE_FLT_FN (BUILT_IN_TRUNC):
1867 builtin_optab = btrunc_optab; break;
1868 CASE_FLT_FN (BUILT_IN_ROUND):
1869 builtin_optab = round_optab; break;
1870 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1871 builtin_optab = nearbyint_optab;
1872 if (flag_trapping_math)
1873 break;
1874 /* Else fallthrough and expand as rint. */
1875 CASE_FLT_FN (BUILT_IN_RINT):
1876 builtin_optab = rint_optab; break;
1877 default:
1878 gcc_unreachable ();
1879 }
1880
1881 /* Make a suitable register to place result in. */
1882 mode = TYPE_MODE (TREE_TYPE (exp));
1883
1884 if (! flag_errno_math || ! HONOR_NANS (mode))
1885 errno_set = false;
1886
1887 /* Before working hard, check whether the instruction is available. */
1888 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1889 {
1890 target = gen_reg_rtx (mode);
1891
1892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1893 need to expand the argument again. This way, we will not perform
1894 side-effects more the once. */
1895 narg = builtin_save_expr (arg);
1896 if (narg != arg)
1897 {
1898 arg = narg;
1899 exp = build_call_expr (fndecl, 1, arg);
1900 }
1901
1902 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1903
1904 start_sequence ();
1905
1906 /* Compute into TARGET.
1907 Set TARGET to wherever the result comes back. */
1908 target = expand_unop (mode, builtin_optab, op0, target, 0);
1909
1910 if (target != 0)
1911 {
1912 if (errno_set)
1913 expand_errno_check (exp, target);
1914
1915 /* Output the entire sequence. */
1916 insns = get_insns ();
1917 end_sequence ();
1918 emit_insn (insns);
1919 return target;
1920 }
1921
1922 /* If we were unable to expand via the builtin, stop the sequence
1923 (without outputting the insns) and call to the library function
1924 with the stabilized argument list. */
1925 end_sequence ();
1926 }
1927
1928 before_call = get_last_insn ();
1929
1930 target = expand_call (exp, target, target == const0_rtx);
1931
1932 /* If this is a sqrt operation and we don't care about errno, try to
1933 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1934 This allows the semantics of the libcall to be visible to the RTL
1935 optimizers. */
1936 if (builtin_optab == sqrt_optab && !errno_set)
1937 {
1938 /* Search backwards through the insns emitted by expand_call looking
1939 for the instruction with the REG_RETVAL note. */
1940 rtx last = get_last_insn ();
1941 while (last != before_call)
1942 {
1943 if (find_reg_note (last, REG_RETVAL, NULL))
1944 {
1945 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1946 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1947 two elements, i.e. symbol_ref(sqrt) and the operand. */
1948 if (note
1949 && GET_CODE (note) == EXPR_LIST
1950 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1951 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1952 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1953 {
1954 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1955 /* Check operand is a register with expected mode. */
1956 if (operand
1957 && REG_P (operand)
1958 && GET_MODE (operand) == mode)
1959 {
1960 /* Replace the REG_EQUAL note with a SQRT rtx. */
1961 rtx equiv = gen_rtx_SQRT (mode, operand);
1962 set_unique_reg_note (last, REG_EQUAL, equiv);
1963 }
1964 }
1965 break;
1966 }
1967 last = PREV_INSN (last);
1968 }
1969 }
1970
1971 return target;
1972 }
1973
1974 /* Expand a call to the builtin binary math functions (pow and atan2).
1975 Return NULL_RTX if a normal call should be emitted rather than expanding the
1976 function in-line. EXP is the expression that is a call to the builtin
1977 function; if convenient, the result should be placed in TARGET.
1978 SUBTARGET may be used as the target for computing one of EXP's
1979 operands. */
1980
1981 static rtx
1982 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1983 {
1984 optab builtin_optab;
1985 rtx op0, op1, insns;
1986 int op1_type = REAL_TYPE;
1987 tree fndecl = get_callee_fndecl (exp);
1988 tree arg0, arg1, narg;
1989 enum machine_mode mode;
1990 bool errno_set = true;
1991 bool stable = true;
1992
1993 switch (DECL_FUNCTION_CODE (fndecl))
1994 {
1995 CASE_FLT_FN (BUILT_IN_SCALBN):
1996 CASE_FLT_FN (BUILT_IN_SCALBLN):
1997 CASE_FLT_FN (BUILT_IN_LDEXP):
1998 op1_type = INTEGER_TYPE;
1999 default:
2000 break;
2001 }
2002
2003 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2004 return NULL_RTX;
2005
2006 arg0 = CALL_EXPR_ARG (exp, 0);
2007 arg1 = CALL_EXPR_ARG (exp, 1);
2008
2009 switch (DECL_FUNCTION_CODE (fndecl))
2010 {
2011 CASE_FLT_FN (BUILT_IN_POW):
2012 builtin_optab = pow_optab; break;
2013 CASE_FLT_FN (BUILT_IN_ATAN2):
2014 builtin_optab = atan2_optab; break;
2015 CASE_FLT_FN (BUILT_IN_SCALB):
2016 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2017 return 0;
2018 builtin_optab = scalb_optab; break;
2019 CASE_FLT_FN (BUILT_IN_SCALBN):
2020 CASE_FLT_FN (BUILT_IN_SCALBLN):
2021 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2022 return 0;
2023 /* Fall through... */
2024 CASE_FLT_FN (BUILT_IN_LDEXP):
2025 builtin_optab = ldexp_optab; break;
2026 CASE_FLT_FN (BUILT_IN_FMOD):
2027 builtin_optab = fmod_optab; break;
2028 CASE_FLT_FN (BUILT_IN_REMAINDER):
2029 CASE_FLT_FN (BUILT_IN_DREM):
2030 builtin_optab = remainder_optab; break;
2031 default:
2032 gcc_unreachable ();
2033 }
2034
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2037
2038 /* Before working hard, check whether the instruction is available. */
2039 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2040 return NULL_RTX;
2041
2042 target = gen_reg_rtx (mode);
2043
2044 if (! flag_errno_math || ! HONOR_NANS (mode))
2045 errno_set = false;
2046
2047 /* Always stabilize the argument list. */
2048 narg = builtin_save_expr (arg1);
2049 if (narg != arg1)
2050 {
2051 arg1 = narg;
2052 stable = false;
2053 }
2054 narg = builtin_save_expr (arg0);
2055 if (narg != arg0)
2056 {
2057 arg0 = narg;
2058 stable = false;
2059 }
2060
2061 if (! stable)
2062 exp = build_call_expr (fndecl, 2, arg0, arg1);
2063
2064 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2065 op1 = expand_normal (arg1);
2066
2067 start_sequence ();
2068
2069 /* Compute into TARGET.
2070 Set TARGET to wherever the result comes back. */
2071 target = expand_binop (mode, builtin_optab, op0, op1,
2072 target, 0, OPTAB_DIRECT);
2073
2074 /* If we were unable to expand via the builtin, stop the sequence
2075 (without outputting the insns) and call to the library function
2076 with the stabilized argument list. */
2077 if (target == 0)
2078 {
2079 end_sequence ();
2080 return expand_call (exp, target, target == const0_rtx);
2081 }
2082
2083 if (errno_set)
2084 expand_errno_check (exp, target);
2085
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2088 end_sequence ();
2089 emit_insn (insns);
2090
2091 return target;
2092 }
2093
2094 /* Expand a call to the builtin sin and cos math functions.
2095 Return NULL_RTX if a normal call should be emitted rather than expanding the
2096 function in-line. EXP is the expression that is a call to the builtin
2097 function; if convenient, the result should be placed in TARGET.
2098 SUBTARGET may be used as the target for computing one of EXP's
2099 operands. */
2100
2101 static rtx
2102 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2103 {
2104 optab builtin_optab;
2105 rtx op0, insns;
2106 tree fndecl = get_callee_fndecl (exp);
2107 enum machine_mode mode;
2108 tree arg, narg;
2109
2110 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2111 return NULL_RTX;
2112
2113 arg = CALL_EXPR_ARG (exp, 0);
2114
2115 switch (DECL_FUNCTION_CODE (fndecl))
2116 {
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 CASE_FLT_FN (BUILT_IN_COS):
2119 builtin_optab = sincos_optab; break;
2120 default:
2121 gcc_unreachable ();
2122 }
2123
2124 /* Make a suitable register to place result in. */
2125 mode = TYPE_MODE (TREE_TYPE (exp));
2126
2127 /* Check if sincos insn is available, otherwise fallback
2128 to sin or cos insn. */
2129 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2130 switch (DECL_FUNCTION_CODE (fndecl))
2131 {
2132 CASE_FLT_FN (BUILT_IN_SIN):
2133 builtin_optab = sin_optab; break;
2134 CASE_FLT_FN (BUILT_IN_COS):
2135 builtin_optab = cos_optab; break;
2136 default:
2137 gcc_unreachable ();
2138 }
2139
2140 /* Before working hard, check whether the instruction is available. */
2141 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2142 {
2143 target = gen_reg_rtx (mode);
2144
2145 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2146 need to expand the argument again. This way, we will not perform
2147 side-effects more the once. */
2148 narg = save_expr (arg);
2149 if (narg != arg)
2150 {
2151 arg = narg;
2152 exp = build_call_expr (fndecl, 1, arg);
2153 }
2154
2155 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2156
2157 start_sequence ();
2158
2159 /* Compute into TARGET.
2160 Set TARGET to wherever the result comes back. */
2161 if (builtin_optab == sincos_optab)
2162 {
2163 int result;
2164
2165 switch (DECL_FUNCTION_CODE (fndecl))
2166 {
2167 CASE_FLT_FN (BUILT_IN_SIN):
2168 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2169 break;
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2172 break;
2173 default:
2174 gcc_unreachable ();
2175 }
2176 gcc_assert (result);
2177 }
2178 else
2179 {
2180 target = expand_unop (mode, builtin_optab, op0, target, 0);
2181 }
2182
2183 if (target != 0)
2184 {
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2187 end_sequence ();
2188 emit_insn (insns);
2189 return target;
2190 }
2191
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2195 end_sequence ();
2196 }
2197
2198 target = expand_call (exp, target, target == const0_rtx);
2199
2200 return target;
2201 }
2202
2203 /* Expand a call to one of the builtin math functions that operate on
2204 floating point argument and output an integer result (ilogb, isinf,
2205 isnan, etc).
2206 Return 0 if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's operands. */
2210
2211 static rtx
2212 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2213 {
2214 optab builtin_optab = 0;
2215 enum insn_code icode = CODE_FOR_nothing;
2216 rtx op0;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2219 bool errno_set = false;
2220 tree arg, narg;
2221
2222 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2223 return NULL_RTX;
2224
2225 arg = CALL_EXPR_ARG (exp, 0);
2226
2227 switch (DECL_FUNCTION_CODE (fndecl))
2228 {
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISFINITE:
2234 CASE_FLT_FN (BUILT_IN_FINITE):
2235 /* These builtins have no optabs (yet). */
2236 break;
2237 default:
2238 gcc_unreachable ();
2239 }
2240
2241 /* There's no easy way to detect the case we need to set EDOM. */
2242 if (flag_errno_math && errno_set)
2243 return NULL_RTX;
2244
2245 /* Optab mode depends on the mode of the input argument. */
2246 mode = TYPE_MODE (TREE_TYPE (arg));
2247
2248 if (builtin_optab)
2249 icode = builtin_optab->handlers[(int) mode].insn_code;
2250
2251 /* Before working hard, check whether the instruction is available. */
2252 if (icode != CODE_FOR_nothing)
2253 {
2254 /* Make a suitable register to place result in. */
2255 if (!target
2256 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2257 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2258
2259 gcc_assert (insn_data[icode].operand[0].predicate
2260 (target, GET_MODE (target)));
2261
2262 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2263 need to expand the argument again. This way, we will not perform
2264 side-effects more the once. */
2265 narg = builtin_save_expr (arg);
2266 if (narg != arg)
2267 {
2268 arg = narg;
2269 exp = build_call_expr (fndecl, 1, arg);
2270 }
2271
2272 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2273
2274 if (mode != GET_MODE (op0))
2275 op0 = convert_to_mode (mode, op0, 0);
2276
2277 /* Compute into TARGET.
2278 Set TARGET to wherever the result comes back. */
2279 emit_unop_insn (icode, target, op0, UNKNOWN);
2280 return target;
2281 }
2282
2283 /* If there is no optab, try generic code. */
2284 switch (DECL_FUNCTION_CODE (fndecl))
2285 {
2286 tree result;
2287
2288 CASE_FLT_FN (BUILT_IN_ISINF):
2289 {
2290 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2291 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2292 tree const type = TREE_TYPE (arg);
2293 REAL_VALUE_TYPE r;
2294 char buf[128];
2295
2296 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2297 real_from_string (&r, buf);
2298 result = build_call_expr (isgr_fn, 2,
2299 fold_build1 (ABS_EXPR, type, arg),
2300 build_real (type, r));
2301 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2302 }
2303 CASE_FLT_FN (BUILT_IN_FINITE):
2304 case BUILT_IN_ISFINITE:
2305 {
2306 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2307 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2308 tree const type = TREE_TYPE (arg);
2309 REAL_VALUE_TYPE r;
2310 char buf[128];
2311
2312 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2313 real_from_string (&r, buf);
2314 result = build_call_expr (isle_fn, 2,
2315 fold_build1 (ABS_EXPR, type, arg),
2316 build_real (type, r));
2317 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2318 }
2319 default:
2320 break;
2321 }
2322
2323 target = expand_call (exp, target, target == const0_rtx);
2324
2325 return target;
2326 }
2327
2328 /* Expand a call to the builtin sincos math function.
2329 Return NULL_RTX if a normal call should be emitted rather than expanding the
2330 function in-line. EXP is the expression that is a call to the builtin
2331 function. */
2332
2333 static rtx
2334 expand_builtin_sincos (tree exp)
2335 {
2336 rtx op0, op1, op2, target1, target2;
2337 enum machine_mode mode;
2338 tree arg, sinp, cosp;
2339 int result;
2340
2341 if (!validate_arglist (exp, REAL_TYPE,
2342 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2343 return NULL_RTX;
2344
2345 arg = CALL_EXPR_ARG (exp, 0);
2346 sinp = CALL_EXPR_ARG (exp, 1);
2347 cosp = CALL_EXPR_ARG (exp, 2);
2348
2349 /* Make a suitable register to place result in. */
2350 mode = TYPE_MODE (TREE_TYPE (arg));
2351
2352 /* Check if sincos insn is available, otherwise emit the call. */
2353 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2354 return NULL_RTX;
2355
2356 target1 = gen_reg_rtx (mode);
2357 target2 = gen_reg_rtx (mode);
2358
2359 op0 = expand_normal (arg);
2360 op1 = expand_normal (build_fold_indirect_ref (sinp));
2361 op2 = expand_normal (build_fold_indirect_ref (cosp));
2362
2363 /* Compute into target1 and target2.
2364 Set TARGET to wherever the result comes back. */
2365 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2366 gcc_assert (result);
2367
2368 /* Move target1 and target2 to the memory locations indicated
2369 by op1 and op2. */
2370 emit_move_insn (op1, target1);
2371 emit_move_insn (op2, target2);
2372
2373 return const0_rtx;
2374 }
2375
2376 /* Expand a call to the internal cexpi builtin to the sincos math function.
2377 EXP is the expression that is a call to the builtin function; if convenient,
2378 the result should be placed in TARGET. SUBTARGET may be used as the target
2379 for computing one of EXP's operands. */
2380
2381 static rtx
2382 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2383 {
2384 tree fndecl = get_callee_fndecl (exp);
2385 tree arg, type;
2386 enum machine_mode mode;
2387 rtx op0, op1, op2;
2388
2389 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2390 return NULL_RTX;
2391
2392 arg = CALL_EXPR_ARG (exp, 0);
2393 type = TREE_TYPE (arg);
2394 mode = TYPE_MODE (TREE_TYPE (arg));
2395
2396 /* Try expanding via a sincos optab, fall back to emitting a libcall
2397 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2398 is only generated from sincos, cexp or if we have either of them. */
2399 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2400 {
2401 op1 = gen_reg_rtx (mode);
2402 op2 = gen_reg_rtx (mode);
2403
2404 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2405
2406 /* Compute into op1 and op2. */
2407 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2408 }
2409 else if (TARGET_HAS_SINCOS)
2410 {
2411 tree call, fn = NULL_TREE;
2412 tree top1, top2;
2413 rtx op1a, op2a;
2414
2415 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2416 fn = built_in_decls[BUILT_IN_SINCOSF];
2417 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2418 fn = built_in_decls[BUILT_IN_SINCOS];
2419 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2420 fn = built_in_decls[BUILT_IN_SINCOSL];
2421 else
2422 gcc_unreachable ();
2423
2424 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2425 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2426 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2427 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2428 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2429 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2430
2431 /* Make sure not to fold the sincos call again. */
2432 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2433 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2434 call, 3, arg, top1, top2));
2435 }
2436 else
2437 {
2438 tree call, fn = NULL_TREE, narg;
2439 tree ctype = build_complex_type (type);
2440
2441 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2442 fn = built_in_decls[BUILT_IN_CEXPF];
2443 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2444 fn = built_in_decls[BUILT_IN_CEXP];
2445 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2446 fn = built_in_decls[BUILT_IN_CEXPL];
2447 else
2448 gcc_unreachable ();
2449
2450 /* If we don't have a decl for cexp create one. This is the
2451 friendliest fallback if the user calls __builtin_cexpi
2452 without full target C99 function support. */
2453 if (fn == NULL_TREE)
2454 {
2455 tree fntype;
2456 const char *name = NULL;
2457
2458 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2459 name = "cexpf";
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2461 name = "cexp";
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2463 name = "cexpl";
2464
2465 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2466 fn = build_fn_decl (name, fntype);
2467 }
2468
2469 narg = fold_build2 (COMPLEX_EXPR, ctype,
2470 build_real (type, dconst0), arg);
2471
2472 /* Make sure not to fold the cexp call again. */
2473 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2474 return expand_expr (build_call_nary (ctype, call, 1, narg),
2475 target, VOIDmode, EXPAND_NORMAL);
2476 }
2477
2478 /* Now build the proper return type. */
2479 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2480 make_tree (TREE_TYPE (arg), op2),
2481 make_tree (TREE_TYPE (arg), op1)),
2482 target, VOIDmode, EXPAND_NORMAL);
2483 }
2484
2485 /* Expand a call to one of the builtin rounding functions gcc defines
2486 as an extension (lfloor and lceil). As these are gcc extensions we
2487 do not need to worry about setting errno to EDOM.
2488 If expanding via optab fails, lower expression to (int)(floor(x)).
2489 EXP is the expression that is a call to the builtin function;
2490 if convenient, the result should be placed in TARGET. SUBTARGET may
2491 be used as the target for computing one of EXP's operands. */
2492
2493 static rtx
2494 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2495 {
2496 convert_optab builtin_optab;
2497 rtx op0, insns, tmp;
2498 tree fndecl = get_callee_fndecl (exp);
2499 enum built_in_function fallback_fn;
2500 tree fallback_fndecl;
2501 enum machine_mode mode;
2502 tree arg, narg;
2503
2504 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2505 gcc_unreachable ();
2506
2507 arg = CALL_EXPR_ARG (exp, 0);
2508
2509 switch (DECL_FUNCTION_CODE (fndecl))
2510 {
2511 CASE_FLT_FN (BUILT_IN_LCEIL):
2512 CASE_FLT_FN (BUILT_IN_LLCEIL):
2513 builtin_optab = lceil_optab;
2514 fallback_fn = BUILT_IN_CEIL;
2515 break;
2516
2517 CASE_FLT_FN (BUILT_IN_LFLOOR):
2518 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2519 builtin_optab = lfloor_optab;
2520 fallback_fn = BUILT_IN_FLOOR;
2521 break;
2522
2523 default:
2524 gcc_unreachable ();
2525 }
2526
2527 /* Make a suitable register to place result in. */
2528 mode = TYPE_MODE (TREE_TYPE (exp));
2529
2530 target = gen_reg_rtx (mode);
2531
2532 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2533 need to expand the argument again. This way, we will not perform
2534 side-effects more the once. */
2535 narg = builtin_save_expr (arg);
2536 if (narg != arg)
2537 {
2538 arg = narg;
2539 exp = build_call_expr (fndecl, 1, arg);
2540 }
2541
2542 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2543
2544 start_sequence ();
2545
2546 /* Compute into TARGET. */
2547 if (expand_sfix_optab (target, op0, builtin_optab))
2548 {
2549 /* Output the entire sequence. */
2550 insns = get_insns ();
2551 end_sequence ();
2552 emit_insn (insns);
2553 return target;
2554 }
2555
2556 /* If we were unable to expand via the builtin, stop the sequence
2557 (without outputting the insns). */
2558 end_sequence ();
2559
2560 /* Fall back to floating point rounding optab. */
2561 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2562
2563 /* For non-C99 targets we may end up without a fallback fndecl here
2564 if the user called __builtin_lfloor directly. In this case emit
2565 a call to the floor/ceil variants nevertheless. This should result
2566 in the best user experience for not full C99 targets. */
2567 if (fallback_fndecl == NULL_TREE)
2568 {
2569 tree fntype;
2570 const char *name = NULL;
2571
2572 switch (DECL_FUNCTION_CODE (fndecl))
2573 {
2574 case BUILT_IN_LCEIL:
2575 case BUILT_IN_LLCEIL:
2576 name = "ceil";
2577 break;
2578 case BUILT_IN_LCEILF:
2579 case BUILT_IN_LLCEILF:
2580 name = "ceilf";
2581 break;
2582 case BUILT_IN_LCEILL:
2583 case BUILT_IN_LLCEILL:
2584 name = "ceill";
2585 break;
2586 case BUILT_IN_LFLOOR:
2587 case BUILT_IN_LLFLOOR:
2588 name = "floor";
2589 break;
2590 case BUILT_IN_LFLOORF:
2591 case BUILT_IN_LLFLOORF:
2592 name = "floorf";
2593 break;
2594 case BUILT_IN_LFLOORL:
2595 case BUILT_IN_LLFLOORL:
2596 name = "floorl";
2597 break;
2598 default:
2599 gcc_unreachable ();
2600 }
2601
2602 fntype = build_function_type_list (TREE_TYPE (arg),
2603 TREE_TYPE (arg), NULL_TREE);
2604 fallback_fndecl = build_fn_decl (name, fntype);
2605 }
2606
2607 exp = build_call_expr (fallback_fndecl, 1, arg);
2608
2609 tmp = expand_normal (exp);
2610
2611 /* Truncate the result of floating point optab to integer
2612 via expand_fix (). */
2613 target = gen_reg_rtx (mode);
2614 expand_fix (target, tmp, 0);
2615
2616 return target;
2617 }
2618
2619 /* Expand a call to one of the builtin math functions doing integer
2620 conversion (lrint).
2621 Return 0 if a normal call should be emitted rather than expanding the
2622 function in-line. EXP is the expression that is a call to the builtin
2623 function; if convenient, the result should be placed in TARGET.
2624 SUBTARGET may be used as the target for computing one of EXP's operands. */
2625
2626 static rtx
2627 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2628 {
2629 convert_optab builtin_optab;
2630 rtx op0, insns;
2631 tree fndecl = get_callee_fndecl (exp);
2632 tree arg, narg;
2633 enum machine_mode mode;
2634
2635 /* There's no easy way to detect the case we need to set EDOM. */
2636 if (flag_errno_math)
2637 return NULL_RTX;
2638
2639 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2640 gcc_unreachable ();
2641
2642 arg = CALL_EXPR_ARG (exp, 0);
2643
2644 switch (DECL_FUNCTION_CODE (fndecl))
2645 {
2646 CASE_FLT_FN (BUILT_IN_LRINT):
2647 CASE_FLT_FN (BUILT_IN_LLRINT):
2648 builtin_optab = lrint_optab; break;
2649 CASE_FLT_FN (BUILT_IN_LROUND):
2650 CASE_FLT_FN (BUILT_IN_LLROUND):
2651 builtin_optab = lround_optab; break;
2652 default:
2653 gcc_unreachable ();
2654 }
2655
2656 /* Make a suitable register to place result in. */
2657 mode = TYPE_MODE (TREE_TYPE (exp));
2658
2659 target = gen_reg_rtx (mode);
2660
2661 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2662 need to expand the argument again. This way, we will not perform
2663 side-effects more the once. */
2664 narg = builtin_save_expr (arg);
2665 if (narg != arg)
2666 {
2667 arg = narg;
2668 exp = build_call_expr (fndecl, 1, arg);
2669 }
2670
2671 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2672
2673 start_sequence ();
2674
2675 if (expand_sfix_optab (target, op0, builtin_optab))
2676 {
2677 /* Output the entire sequence. */
2678 insns = get_insns ();
2679 end_sequence ();
2680 emit_insn (insns);
2681 return target;
2682 }
2683
2684 /* If we were unable to expand via the builtin, stop the sequence
2685 (without outputting the insns) and call to the library function
2686 with the stabilized argument list. */
2687 end_sequence ();
2688
2689 target = expand_call (exp, target, target == const0_rtx);
2690
2691 return target;
2692 }
2693
2694 /* To evaluate powi(x,n), the floating point value x raised to the
2695 constant integer exponent n, we use a hybrid algorithm that
2696 combines the "window method" with look-up tables. For an
2697 introduction to exponentiation algorithms and "addition chains",
2698 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2699 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2700 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2701 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2702
2703 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2704 multiplications to inline before calling the system library's pow
2705 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2706 so this default never requires calling pow, powf or powl. */
2707
2708 #ifndef POWI_MAX_MULTS
2709 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2710 #endif
2711
2712 /* The size of the "optimal power tree" lookup table. All
2713 exponents less than this value are simply looked up in the
2714 powi_table below. This threshold is also used to size the
2715 cache of pseudo registers that hold intermediate results. */
2716 #define POWI_TABLE_SIZE 256
2717
2718 /* The size, in bits of the window, used in the "window method"
2719 exponentiation algorithm. This is equivalent to a radix of
2720 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2721 #define POWI_WINDOW_SIZE 3
2722
2723 /* The following table is an efficient representation of an
2724 "optimal power tree". For each value, i, the corresponding
2725 value, j, in the table states than an optimal evaluation
2726 sequence for calculating pow(x,i) can be found by evaluating
2727 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2728 100 integers is given in Knuth's "Seminumerical algorithms". */
2729
2730 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2731 {
2732 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2733 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2734 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2735 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2736 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2737 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2738 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2739 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2740 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2741 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2742 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2743 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2744 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2745 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2746 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2747 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2748 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2749 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2750 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2751 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2752 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2753 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2754 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2755 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2756 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2757 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2758 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2759 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2760 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2761 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2762 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2763 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2764 };
2765
2766
2767 /* Return the number of multiplications required to calculate
2768 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2769 subroutine of powi_cost. CACHE is an array indicating
2770 which exponents have already been calculated. */
2771
2772 static int
2773 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2774 {
2775 /* If we've already calculated this exponent, then this evaluation
2776 doesn't require any additional multiplications. */
2777 if (cache[n])
2778 return 0;
2779
2780 cache[n] = true;
2781 return powi_lookup_cost (n - powi_table[n], cache)
2782 + powi_lookup_cost (powi_table[n], cache) + 1;
2783 }
2784
2785 /* Return the number of multiplications required to calculate
2786 powi(x,n) for an arbitrary x, given the exponent N. This
2787 function needs to be kept in sync with expand_powi below. */
2788
2789 static int
2790 powi_cost (HOST_WIDE_INT n)
2791 {
2792 bool cache[POWI_TABLE_SIZE];
2793 unsigned HOST_WIDE_INT digit;
2794 unsigned HOST_WIDE_INT val;
2795 int result;
2796
2797 if (n == 0)
2798 return 0;
2799
2800 /* Ignore the reciprocal when calculating the cost. */
2801 val = (n < 0) ? -n : n;
2802
2803 /* Initialize the exponent cache. */
2804 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2805 cache[1] = true;
2806
2807 result = 0;
2808
2809 while (val >= POWI_TABLE_SIZE)
2810 {
2811 if (val & 1)
2812 {
2813 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2814 result += powi_lookup_cost (digit, cache)
2815 + POWI_WINDOW_SIZE + 1;
2816 val >>= POWI_WINDOW_SIZE;
2817 }
2818 else
2819 {
2820 val >>= 1;
2821 result++;
2822 }
2823 }
2824
2825 return result + powi_lookup_cost (val, cache);
2826 }
2827
2828 /* Recursive subroutine of expand_powi. This function takes the array,
2829 CACHE, of already calculated exponents and an exponent N and returns
2830 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2831
2832 static rtx
2833 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2834 {
2835 unsigned HOST_WIDE_INT digit;
2836 rtx target, result;
2837 rtx op0, op1;
2838
2839 if (n < POWI_TABLE_SIZE)
2840 {
2841 if (cache[n])
2842 return cache[n];
2843
2844 target = gen_reg_rtx (mode);
2845 cache[n] = target;
2846
2847 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2848 op1 = expand_powi_1 (mode, powi_table[n], cache);
2849 }
2850 else if (n & 1)
2851 {
2852 target = gen_reg_rtx (mode);
2853 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2854 op0 = expand_powi_1 (mode, n - digit, cache);
2855 op1 = expand_powi_1 (mode, digit, cache);
2856 }
2857 else
2858 {
2859 target = gen_reg_rtx (mode);
2860 op0 = expand_powi_1 (mode, n >> 1, cache);
2861 op1 = op0;
2862 }
2863
2864 result = expand_mult (mode, op0, op1, target, 0);
2865 if (result != target)
2866 emit_move_insn (target, result);
2867 return target;
2868 }
2869
2870 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2871 floating point operand in mode MODE, and N is the exponent. This
2872 function needs to be kept in sync with powi_cost above. */
2873
2874 static rtx
2875 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2876 {
2877 unsigned HOST_WIDE_INT val;
2878 rtx cache[POWI_TABLE_SIZE];
2879 rtx result;
2880
2881 if (n == 0)
2882 return CONST1_RTX (mode);
2883
2884 val = (n < 0) ? -n : n;
2885
2886 memset (cache, 0, sizeof (cache));
2887 cache[1] = x;
2888
2889 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2890
2891 /* If the original exponent was negative, reciprocate the result. */
2892 if (n < 0)
2893 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2894 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2895
2896 return result;
2897 }
2898
2899 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2900 a normal call should be emitted rather than expanding the function
2901 in-line. EXP is the expression that is a call to the builtin
2902 function; if convenient, the result should be placed in TARGET. */
2903
2904 static rtx
2905 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2906 {
2907 tree arg0, arg1;
2908 tree fn, narg0;
2909 tree type = TREE_TYPE (exp);
2910 REAL_VALUE_TYPE cint, c, c2;
2911 HOST_WIDE_INT n;
2912 rtx op, op2;
2913 enum machine_mode mode = TYPE_MODE (type);
2914
2915 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2916 return NULL_RTX;
2917
2918 arg0 = CALL_EXPR_ARG (exp, 0);
2919 arg1 = CALL_EXPR_ARG (exp, 1);
2920
2921 if (TREE_CODE (arg1) != REAL_CST
2922 || TREE_OVERFLOW (arg1))
2923 return expand_builtin_mathfn_2 (exp, target, subtarget);
2924
2925 /* Handle constant exponents. */
2926
2927 /* For integer valued exponents we can expand to an optimal multiplication
2928 sequence using expand_powi. */
2929 c = TREE_REAL_CST (arg1);
2930 n = real_to_integer (&c);
2931 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2932 if (real_identical (&c, &cint)
2933 && ((n >= -1 && n <= 2)
2934 || (flag_unsafe_math_optimizations
2935 && !optimize_size
2936 && powi_cost (n) <= POWI_MAX_MULTS)))
2937 {
2938 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2939 if (n != 1)
2940 {
2941 op = force_reg (mode, op);
2942 op = expand_powi (op, mode, n);
2943 }
2944 return op;
2945 }
2946
2947 narg0 = builtin_save_expr (arg0);
2948
2949 /* If the exponent is not integer valued, check if it is half of an integer.
2950 In this case we can expand to sqrt (x) * x**(n/2). */
2951 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2952 if (fn != NULL_TREE)
2953 {
2954 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2955 n = real_to_integer (&c2);
2956 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2957 if (real_identical (&c2, &cint)
2958 && ((flag_unsafe_math_optimizations
2959 && !optimize_size
2960 && powi_cost (n/2) <= POWI_MAX_MULTS)
2961 || n == 1))
2962 {
2963 tree call_expr = build_call_expr (fn, 1, narg0);
2964 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2965 if (n != 1)
2966 {
2967 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2968 op2 = force_reg (mode, op2);
2969 op2 = expand_powi (op2, mode, abs (n / 2));
2970 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2971 0, OPTAB_LIB_WIDEN);
2972 /* If the original exponent was negative, reciprocate the
2973 result. */
2974 if (n < 0)
2975 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2976 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2977 }
2978 return op;
2979 }
2980 }
2981
2982 /* Try if the exponent is a third of an integer. In this case
2983 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2984 different from pow (x, 1./3.) due to rounding and behavior
2985 with negative x we need to constrain this transformation to
2986 unsafe math and positive x or finite math. */
2987 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2988 if (fn != NULL_TREE
2989 && flag_unsafe_math_optimizations
2990 && (tree_expr_nonnegative_p (arg0)
2991 || !HONOR_NANS (mode)))
2992 {
2993 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2994 real_round (&c2, mode, &c2);
2995 n = real_to_integer (&c2);
2996 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2997 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2998 real_convert (&c2, mode, &c2);
2999 if (real_identical (&c2, &c)
3000 && ((!optimize_size
3001 && powi_cost (n/3) <= POWI_MAX_MULTS)
3002 || n == 1))
3003 {
3004 tree call_expr = build_call_expr (fn, 1,narg0);
3005 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3006 if (abs (n) % 3 == 2)
3007 op = expand_simple_binop (mode, MULT, op, op, op,
3008 0, OPTAB_LIB_WIDEN);
3009 if (n != 1)
3010 {
3011 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3012 op2 = force_reg (mode, op2);
3013 op2 = expand_powi (op2, mode, abs (n / 3));
3014 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3015 0, OPTAB_LIB_WIDEN);
3016 /* If the original exponent was negative, reciprocate the
3017 result. */
3018 if (n < 0)
3019 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3020 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3021 }
3022 return op;
3023 }
3024 }
3025
3026 /* Fall back to optab expansion. */
3027 return expand_builtin_mathfn_2 (exp, target, subtarget);
3028 }
3029
3030 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3031 a normal call should be emitted rather than expanding the function
3032 in-line. EXP is the expression that is a call to the builtin
3033 function; if convenient, the result should be placed in TARGET. */
3034
3035 static rtx
3036 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3037 {
3038 tree arg0, arg1;
3039 rtx op0, op1;
3040 enum machine_mode mode;
3041 enum machine_mode mode2;
3042
3043 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3044 return NULL_RTX;
3045
3046 arg0 = CALL_EXPR_ARG (exp, 0);
3047 arg1 = CALL_EXPR_ARG (exp, 1);
3048 mode = TYPE_MODE (TREE_TYPE (exp));
3049
3050 /* Handle constant power. */
3051
3052 if (TREE_CODE (arg1) == INTEGER_CST
3053 && !TREE_OVERFLOW (arg1))
3054 {
3055 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3056
3057 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3058 Otherwise, check the number of multiplications required. */
3059 if ((TREE_INT_CST_HIGH (arg1) == 0
3060 || TREE_INT_CST_HIGH (arg1) == -1)
3061 && ((n >= -1 && n <= 2)
3062 || (! optimize_size
3063 && powi_cost (n) <= POWI_MAX_MULTS)))
3064 {
3065 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3066 op0 = force_reg (mode, op0);
3067 return expand_powi (op0, mode, n);
3068 }
3069 }
3070
3071 /* Emit a libcall to libgcc. */
3072
3073 /* Mode of the 2nd argument must match that of an int. */
3074 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3075
3076 if (target == NULL_RTX)
3077 target = gen_reg_rtx (mode);
3078
3079 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3080 if (GET_MODE (op0) != mode)
3081 op0 = convert_to_mode (mode, op0, 0);
3082 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3083 if (GET_MODE (op1) != mode2)
3084 op1 = convert_to_mode (mode2, op1, 0);
3085
3086 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3087 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3088 op0, mode, op1, mode2);
3089
3090 return target;
3091 }
3092
3093 /* Expand expression EXP which is a call to the strlen builtin. Return
3094 NULL_RTX if we failed the caller should emit a normal call, otherwise
3095 try to get the result in TARGET, if convenient. */
3096
3097 static rtx
3098 expand_builtin_strlen (tree exp, rtx target,
3099 enum machine_mode target_mode)
3100 {
3101 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3102 return NULL_RTX;
3103 else
3104 {
3105 rtx pat;
3106 tree len;
3107 tree src = CALL_EXPR_ARG (exp, 0);
3108 rtx result, src_reg, char_rtx, before_strlen;
3109 enum machine_mode insn_mode = target_mode, char_mode;
3110 enum insn_code icode = CODE_FOR_nothing;
3111 int align;
3112
3113 /* If the length can be computed at compile-time, return it. */
3114 len = c_strlen (src, 0);
3115 if (len)
3116 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3117
3118 /* If the length can be computed at compile-time and is constant
3119 integer, but there are side-effects in src, evaluate
3120 src for side-effects, then return len.
3121 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3122 can be optimized into: i++; x = 3; */
3123 len = c_strlen (src, 1);
3124 if (len && TREE_CODE (len) == INTEGER_CST)
3125 {
3126 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3127 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3128 }
3129
3130 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3131
3132 /* If SRC is not a pointer type, don't do this operation inline. */
3133 if (align == 0)
3134 return NULL_RTX;
3135
3136 /* Bail out if we can't compute strlen in the right mode. */
3137 while (insn_mode != VOIDmode)
3138 {
3139 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3140 if (icode != CODE_FOR_nothing)
3141 break;
3142
3143 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3144 }
3145 if (insn_mode == VOIDmode)
3146 return NULL_RTX;
3147
3148 /* Make a place to write the result of the instruction. */
3149 result = target;
3150 if (! (result != 0
3151 && REG_P (result)
3152 && GET_MODE (result) == insn_mode
3153 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3154 result = gen_reg_rtx (insn_mode);
3155
3156 /* Make a place to hold the source address. We will not expand
3157 the actual source until we are sure that the expansion will
3158 not fail -- there are trees that cannot be expanded twice. */
3159 src_reg = gen_reg_rtx (Pmode);
3160
3161 /* Mark the beginning of the strlen sequence so we can emit the
3162 source operand later. */
3163 before_strlen = get_last_insn ();
3164
3165 char_rtx = const0_rtx;
3166 char_mode = insn_data[(int) icode].operand[2].mode;
3167 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3168 char_mode))
3169 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3170
3171 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3172 char_rtx, GEN_INT (align));
3173 if (! pat)
3174 return NULL_RTX;
3175 emit_insn (pat);
3176
3177 /* Now that we are assured of success, expand the source. */
3178 start_sequence ();
3179 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3180 if (pat != src_reg)
3181 emit_move_insn (src_reg, pat);
3182 pat = get_insns ();
3183 end_sequence ();
3184
3185 if (before_strlen)
3186 emit_insn_after (pat, before_strlen);
3187 else
3188 emit_insn_before (pat, get_insns ());
3189
3190 /* Return the value in the proper mode for this function. */
3191 if (GET_MODE (result) == target_mode)
3192 target = result;
3193 else if (target != 0)
3194 convert_move (target, result, 0);
3195 else
3196 target = convert_to_mode (target_mode, result, 0);
3197
3198 return target;
3199 }
3200 }
3201
3202 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3203 caller should emit a normal call, otherwise try to get the result
3204 in TARGET, if convenient (and in mode MODE if that's convenient). */
3205
3206 static rtx
3207 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3208 {
3209 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3210 {
3211 tree type = TREE_TYPE (exp);
3212 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3213 CALL_EXPR_ARG (exp, 1), type);
3214 if (result)
3215 return expand_expr (result, target, mode, EXPAND_NORMAL);
3216 }
3217 return NULL_RTX;
3218 }
3219
3220 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3221 caller should emit a normal call, otherwise try to get the result
3222 in TARGET, if convenient (and in mode MODE if that's convenient). */
3223
3224 static rtx
3225 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3226 {
3227 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3228 {
3229 tree type = TREE_TYPE (exp);
3230 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3231 CALL_EXPR_ARG (exp, 1), type);
3232 if (result)
3233 return expand_expr (result, target, mode, EXPAND_NORMAL);
3234
3235 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3236 }
3237 return NULL_RTX;
3238 }
3239
3240 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3241 caller should emit a normal call, otherwise try to get the result
3242 in TARGET, if convenient (and in mode MODE if that's convenient). */
3243
3244 static rtx
3245 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3246 {
3247 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3248 {
3249 tree type = TREE_TYPE (exp);
3250 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3251 CALL_EXPR_ARG (exp, 1), type);
3252 if (result)
3253 return expand_expr (result, target, mode, EXPAND_NORMAL);
3254 }
3255 return NULL_RTX;
3256 }
3257
3258 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3259 caller should emit a normal call, otherwise try to get the result
3260 in TARGET, if convenient (and in mode MODE if that's convenient). */
3261
3262 static rtx
3263 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3264 {
3265 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3266 {
3267 tree type = TREE_TYPE (exp);
3268 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3269 CALL_EXPR_ARG (exp, 1), type);
3270 if (result)
3271 return expand_expr (result, target, mode, EXPAND_NORMAL);
3272 }
3273 return NULL_RTX;
3274 }
3275
3276 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3277 bytes from constant string DATA + OFFSET and return it as target
3278 constant. */
3279
3280 static rtx
3281 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3282 enum machine_mode mode)
3283 {
3284 const char *str = (const char *) data;
3285
3286 gcc_assert (offset >= 0
3287 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3288 <= strlen (str) + 1));
3289
3290 return c_readstr (str + offset, mode);
3291 }
3292
3293 /* Expand a call EXP to the memcpy builtin.
3294 Return NULL_RTX if we failed, the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). */
3297
3298 static rtx
3299 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3300 {
3301 tree fndecl = get_callee_fndecl (exp);
3302
3303 if (!validate_arglist (exp,
3304 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3305 return NULL_RTX;
3306 else
3307 {
3308 tree dest = CALL_EXPR_ARG (exp, 0);
3309 tree src = CALL_EXPR_ARG (exp, 1);
3310 tree len = CALL_EXPR_ARG (exp, 2);
3311 const char *src_str;
3312 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3313 unsigned int dest_align
3314 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3315 rtx dest_mem, src_mem, dest_addr, len_rtx;
3316 tree result = fold_builtin_memory_op (dest, src, len,
3317 TREE_TYPE (TREE_TYPE (fndecl)),
3318 false, /*endp=*/0);
3319 HOST_WIDE_INT expected_size = -1;
3320 unsigned int expected_align = 0;
3321
3322 if (result)
3323 {
3324 while (TREE_CODE (result) == COMPOUND_EXPR)
3325 {
3326 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3327 EXPAND_NORMAL);
3328 result = TREE_OPERAND (result, 1);
3329 }
3330 return expand_expr (result, target, mode, EXPAND_NORMAL);
3331 }
3332
3333 /* If DEST is not a pointer type, call the normal function. */
3334 if (dest_align == 0)
3335 return NULL_RTX;
3336
3337 /* If either SRC is not a pointer type, don't do this
3338 operation in-line. */
3339 if (src_align == 0)
3340 return NULL_RTX;
3341
3342 stringop_block_profile (exp, &expected_align, &expected_size);
3343 if (expected_align < dest_align)
3344 expected_align = dest_align;
3345 dest_mem = get_memory_rtx (dest, len);
3346 set_mem_align (dest_mem, dest_align);
3347 len_rtx = expand_normal (len);
3348 src_str = c_getstr (src);
3349
3350 /* If SRC is a string constant and block move would be done
3351 by pieces, we can avoid loading the string from memory
3352 and only stored the computed constants. */
3353 if (src_str
3354 && GET_CODE (len_rtx) == CONST_INT
3355 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3356 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3357 (void *) src_str, dest_align))
3358 {
3359 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3360 builtin_memcpy_read_str,
3361 (void *) src_str, dest_align, 0);
3362 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3363 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3364 return dest_mem;
3365 }
3366
3367 src_mem = get_memory_rtx (src, len);
3368 set_mem_align (src_mem, src_align);
3369
3370 /* Copy word part most expediently. */
3371 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3372 CALL_EXPR_TAILCALL (exp)
3373 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3374 expected_align, expected_size);
3375
3376 if (dest_addr == 0)
3377 {
3378 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3379 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3380 }
3381 return dest_addr;
3382 }
3383 }
3384
3385 /* Expand a call EXP to the mempcpy builtin.
3386 Return NULL_RTX if we failed; the caller should emit a normal call,
3387 otherwise try to get the result in TARGET, if convenient (and in
3388 mode MODE if that's convenient). If ENDP is 0 return the
3389 destination pointer, if ENDP is 1 return the end pointer ala
3390 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3391 stpcpy. */
3392
3393 static rtx
3394 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3395 {
3396 if (!validate_arglist (exp,
3397 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3398 return NULL_RTX;
3399 else
3400 {
3401 tree dest = CALL_EXPR_ARG (exp, 0);
3402 tree src = CALL_EXPR_ARG (exp, 1);
3403 tree len = CALL_EXPR_ARG (exp, 2);
3404 return expand_builtin_mempcpy_args (dest, src, len,
3405 TREE_TYPE (exp),
3406 target, mode, /*endp=*/ 1);
3407 }
3408 }
3409
3410 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3411 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3412 so that this can also be called without constructing an actual CALL_EXPR.
3413 TYPE is the return type of the call. The other arguments and return value
3414 are the same as for expand_builtin_mempcpy. */
3415
3416 static rtx
3417 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3418 rtx target, enum machine_mode mode, int endp)
3419 {
3420 /* If return value is ignored, transform mempcpy into memcpy. */
3421 if (target == const0_rtx)
3422 {
3423 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3424
3425 if (!fn)
3426 return NULL_RTX;
3427
3428 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3429 target, mode, EXPAND_NORMAL);
3430 }
3431 else
3432 {
3433 const char *src_str;
3434 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3435 unsigned int dest_align
3436 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3437 rtx dest_mem, src_mem, len_rtx;
3438 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3439
3440 if (result)
3441 {
3442 while (TREE_CODE (result) == COMPOUND_EXPR)
3443 {
3444 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3445 EXPAND_NORMAL);
3446 result = TREE_OPERAND (result, 1);
3447 }
3448 return expand_expr (result, target, mode, EXPAND_NORMAL);
3449 }
3450
3451 /* If either SRC or DEST is not a pointer type, don't do this
3452 operation in-line. */
3453 if (dest_align == 0 || src_align == 0)
3454 return NULL_RTX;
3455
3456 /* If LEN is not constant, call the normal function. */
3457 if (! host_integerp (len, 1))
3458 return NULL_RTX;
3459
3460 len_rtx = expand_normal (len);
3461 src_str = c_getstr (src);
3462
3463 /* If SRC is a string constant and block move would be done
3464 by pieces, we can avoid loading the string from memory
3465 and only stored the computed constants. */
3466 if (src_str
3467 && GET_CODE (len_rtx) == CONST_INT
3468 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3469 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3470 (void *) src_str, dest_align))
3471 {
3472 dest_mem = get_memory_rtx (dest, len);
3473 set_mem_align (dest_mem, dest_align);
3474 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3475 builtin_memcpy_read_str,
3476 (void *) src_str, dest_align, endp);
3477 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3478 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3479 return dest_mem;
3480 }
3481
3482 if (GET_CODE (len_rtx) == CONST_INT
3483 && can_move_by_pieces (INTVAL (len_rtx),
3484 MIN (dest_align, src_align)))
3485 {
3486 dest_mem = get_memory_rtx (dest, len);
3487 set_mem_align (dest_mem, dest_align);
3488 src_mem = get_memory_rtx (src, len);
3489 set_mem_align (src_mem, src_align);
3490 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3491 MIN (dest_align, src_align), endp);
3492 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3493 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3494 return dest_mem;
3495 }
3496
3497 return NULL_RTX;
3498 }
3499 }
3500
3501 /* Expand expression EXP, which is a call to the memmove builtin. Return
3502 NULL_RTX if we failed; the caller should emit a normal call. */
3503
3504 static rtx
3505 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3506 {
3507 if (!validate_arglist (exp,
3508 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3509 return NULL_RTX;
3510 else
3511 {
3512 tree dest = CALL_EXPR_ARG (exp, 0);
3513 tree src = CALL_EXPR_ARG (exp, 1);
3514 tree len = CALL_EXPR_ARG (exp, 2);
3515 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3516 target, mode, ignore);
3517 }
3518 }
3519
3520 /* Helper function to do the actual work for expand_builtin_memmove. The
3521 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3522 so that this can also be called without constructing an actual CALL_EXPR.
3523 TYPE is the return type of the call. The other arguments and return value
3524 are the same as for expand_builtin_memmove. */
3525
3526 static rtx
3527 expand_builtin_memmove_args (tree dest, tree src, tree len,
3528 tree type, rtx target, enum machine_mode mode,
3529 int ignore)
3530 {
3531 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3532
3533 if (result)
3534 {
3535 STRIP_TYPE_NOPS (result);
3536 while (TREE_CODE (result) == COMPOUND_EXPR)
3537 {
3538 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3539 EXPAND_NORMAL);
3540 result = TREE_OPERAND (result, 1);
3541 }
3542 return expand_expr (result, target, mode, EXPAND_NORMAL);
3543 }
3544
3545 /* Otherwise, call the normal function. */
3546 return NULL_RTX;
3547 }
3548
3549 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3550 NULL_RTX if we failed the caller should emit a normal call. */
3551
3552 static rtx
3553 expand_builtin_bcopy (tree exp, int ignore)
3554 {
3555 tree type = TREE_TYPE (exp);
3556 tree src, dest, size;
3557
3558 if (!validate_arglist (exp,
3559 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3560 return NULL_RTX;
3561
3562 src = CALL_EXPR_ARG (exp, 0);
3563 dest = CALL_EXPR_ARG (exp, 1);
3564 size = CALL_EXPR_ARG (exp, 2);
3565
3566 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3567 This is done this way so that if it isn't expanded inline, we fall
3568 back to calling bcopy instead of memmove. */
3569 return expand_builtin_memmove_args (dest, src,
3570 fold_convert (sizetype, size),
3571 type, const0_rtx, VOIDmode,
3572 ignore);
3573 }
3574
3575 #ifndef HAVE_movstr
3576 # define HAVE_movstr 0
3577 # define CODE_FOR_movstr CODE_FOR_nothing
3578 #endif
3579
3580 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3581 we failed, the caller should emit a normal call, otherwise try to
3582 get the result in TARGET, if convenient. If ENDP is 0 return the
3583 destination pointer, if ENDP is 1 return the end pointer ala
3584 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3585 stpcpy. */
3586
3587 static rtx
3588 expand_movstr (tree dest, tree src, rtx target, int endp)
3589 {
3590 rtx end;
3591 rtx dest_mem;
3592 rtx src_mem;
3593 rtx insn;
3594 const struct insn_data * data;
3595
3596 if (!HAVE_movstr)
3597 return NULL_RTX;
3598
3599 dest_mem = get_memory_rtx (dest, NULL);
3600 src_mem = get_memory_rtx (src, NULL);
3601 if (!endp)
3602 {
3603 target = force_reg (Pmode, XEXP (dest_mem, 0));
3604 dest_mem = replace_equiv_address (dest_mem, target);
3605 end = gen_reg_rtx (Pmode);
3606 }
3607 else
3608 {
3609 if (target == 0 || target == const0_rtx)
3610 {
3611 end = gen_reg_rtx (Pmode);
3612 if (target == 0)
3613 target = end;
3614 }
3615 else
3616 end = target;
3617 }
3618
3619 data = insn_data + CODE_FOR_movstr;
3620
3621 if (data->operand[0].mode != VOIDmode)
3622 end = gen_lowpart (data->operand[0].mode, end);
3623
3624 insn = data->genfun (end, dest_mem, src_mem);
3625
3626 gcc_assert (insn);
3627
3628 emit_insn (insn);
3629
3630 /* movstr is supposed to set end to the address of the NUL
3631 terminator. If the caller requested a mempcpy-like return value,
3632 adjust it. */
3633 if (endp == 1 && target != const0_rtx)
3634 {
3635 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3636 emit_move_insn (target, force_operand (tem, NULL_RTX));
3637 }
3638
3639 return target;
3640 }
3641
3642 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call, otherwise
3644 try to get the result in TARGET, if convenient (and in mode MODE if that's
3645 convenient). */
3646
3647 static rtx
3648 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3649 {
3650 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3651 {
3652 tree dest = CALL_EXPR_ARG (exp, 0);
3653 tree src = CALL_EXPR_ARG (exp, 1);
3654 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3655 }
3656 return NULL_RTX;
3657 }
3658
3659 /* Helper function to do the actual work for expand_builtin_strcpy. The
3660 arguments to the builtin_strcpy call DEST and SRC are broken out
3661 so that this can also be called without constructing an actual CALL_EXPR.
3662 The other arguments and return value are the same as for
3663 expand_builtin_strcpy. */
3664
3665 static rtx
3666 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3667 rtx target, enum machine_mode mode)
3668 {
3669 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3670 if (result)
3671 return expand_expr (result, target, mode, EXPAND_NORMAL);
3672 return expand_movstr (dest, src, target, /*endp=*/0);
3673
3674 }
3675
3676 /* Expand a call EXP to the stpcpy builtin.
3677 Return NULL_RTX if we failed the caller should emit a normal call,
3678 otherwise try to get the result in TARGET, if convenient (and in
3679 mode MODE if that's convenient). */
3680
3681 static rtx
3682 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3683 {
3684 tree dst, src;
3685
3686 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3687 return NULL_RTX;
3688
3689 dst = CALL_EXPR_ARG (exp, 0);
3690 src = CALL_EXPR_ARG (exp, 1);
3691
3692 /* If return value is ignored, transform stpcpy into strcpy. */
3693 if (target == const0_rtx)
3694 {
3695 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3696 if (!fn)
3697 return NULL_RTX;
3698
3699 return expand_expr (build_call_expr (fn, 2, dst, src),
3700 target, mode, EXPAND_NORMAL);
3701 }
3702 else
3703 {
3704 tree len, lenp1;
3705 rtx ret;
3706
3707 /* Ensure we get an actual string whose length can be evaluated at
3708 compile-time, not an expression containing a string. This is
3709 because the latter will potentially produce pessimized code
3710 when used to produce the return value. */
3711 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3712 return expand_movstr (dst, src, target, /*endp=*/2);
3713
3714 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3715 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3716 target, mode, /*endp=*/2);
3717
3718 if (ret)
3719 return ret;
3720
3721 if (TREE_CODE (len) == INTEGER_CST)
3722 {
3723 rtx len_rtx = expand_normal (len);
3724
3725 if (GET_CODE (len_rtx) == CONST_INT)
3726 {
3727 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3728 dst, src, target, mode);
3729
3730 if (ret)
3731 {
3732 if (! target)
3733 {
3734 if (mode != VOIDmode)
3735 target = gen_reg_rtx (mode);
3736 else
3737 target = gen_reg_rtx (GET_MODE (ret));
3738 }
3739 if (GET_MODE (target) != GET_MODE (ret))
3740 ret = gen_lowpart (GET_MODE (target), ret);
3741
3742 ret = plus_constant (ret, INTVAL (len_rtx));
3743 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3744 gcc_assert (ret);
3745
3746 return target;
3747 }
3748 }
3749 }
3750
3751 return expand_movstr (dst, src, target, /*endp=*/2);
3752 }
3753 }
3754
3755 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3756 bytes from constant string DATA + OFFSET and return it as target
3757 constant. */
3758
3759 static rtx
3760 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3761 enum machine_mode mode)
3762 {
3763 const char *str = (const char *) data;
3764
3765 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3766 return const0_rtx;
3767
3768 return c_readstr (str + offset, mode);
3769 }
3770
3771 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3772 NULL_RTX if we failed the caller should emit a normal call. */
3773
3774 static rtx
3775 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3776 {
3777 tree fndecl = get_callee_fndecl (exp);
3778
3779 if (validate_arglist (exp,
3780 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3781 {
3782 tree dest = CALL_EXPR_ARG (exp, 0);
3783 tree src = CALL_EXPR_ARG (exp, 1);
3784 tree len = CALL_EXPR_ARG (exp, 2);
3785 tree slen = c_strlen (src, 1);
3786 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3787
3788 if (result)
3789 {
3790 while (TREE_CODE (result) == COMPOUND_EXPR)
3791 {
3792 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3793 EXPAND_NORMAL);
3794 result = TREE_OPERAND (result, 1);
3795 }
3796 return expand_expr (result, target, mode, EXPAND_NORMAL);
3797 }
3798
3799 /* We must be passed a constant len and src parameter. */
3800 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3801 return NULL_RTX;
3802
3803 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3804
3805 /* We're required to pad with trailing zeros if the requested
3806 len is greater than strlen(s2)+1. In that case try to
3807 use store_by_pieces, if it fails, punt. */
3808 if (tree_int_cst_lt (slen, len))
3809 {
3810 unsigned int dest_align
3811 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3812 const char *p = c_getstr (src);
3813 rtx dest_mem;
3814
3815 if (!p || dest_align == 0 || !host_integerp (len, 1)
3816 || !can_store_by_pieces (tree_low_cst (len, 1),
3817 builtin_strncpy_read_str,
3818 (void *) p, dest_align))
3819 return NULL_RTX;
3820
3821 dest_mem = get_memory_rtx (dest, len);
3822 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3823 builtin_strncpy_read_str,
3824 (void *) p, dest_align, 0);
3825 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3826 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3827 return dest_mem;
3828 }
3829 }
3830 return NULL_RTX;
3831 }
3832
3833 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3834 bytes from constant string DATA + OFFSET and return it as target
3835 constant. */
3836
3837 rtx
3838 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3839 enum machine_mode mode)
3840 {
3841 const char *c = (const char *) data;
3842 char *p = alloca (GET_MODE_SIZE (mode));
3843
3844 memset (p, *c, GET_MODE_SIZE (mode));
3845
3846 return c_readstr (p, mode);
3847 }
3848
3849 /* Callback routine for store_by_pieces. Return the RTL of a register
3850 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3851 char value given in the RTL register data. For example, if mode is
3852 4 bytes wide, return the RTL for 0x01010101*data. */
3853
3854 static rtx
3855 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3856 enum machine_mode mode)
3857 {
3858 rtx target, coeff;
3859 size_t size;
3860 char *p;
3861
3862 size = GET_MODE_SIZE (mode);
3863 if (size == 1)
3864 return (rtx) data;
3865
3866 p = alloca (size);
3867 memset (p, 1, size);
3868 coeff = c_readstr (p, mode);
3869
3870 target = convert_to_mode (mode, (rtx) data, 1);
3871 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3872 return force_reg (mode, target);
3873 }
3874
3875 /* Expand expression EXP, which is a call to the memset builtin. Return
3876 NULL_RTX if we failed the caller should emit a normal call, otherwise
3877 try to get the result in TARGET, if convenient (and in mode MODE if that's
3878 convenient). */
3879
3880 static rtx
3881 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3882 {
3883 if (!validate_arglist (exp,
3884 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3885 return NULL_RTX;
3886 else
3887 {
3888 tree dest = CALL_EXPR_ARG (exp, 0);
3889 tree val = CALL_EXPR_ARG (exp, 1);
3890 tree len = CALL_EXPR_ARG (exp, 2);
3891 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3892 }
3893 }
3894
3895 /* Helper function to do the actual work for expand_builtin_memset. The
3896 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3897 so that this can also be called without constructing an actual CALL_EXPR.
3898 The other arguments and return value are the same as for
3899 expand_builtin_memset. */
3900
3901 static rtx
3902 expand_builtin_memset_args (tree dest, tree val, tree len,
3903 rtx target, enum machine_mode mode, tree orig_exp)
3904 {
3905 tree fndecl, fn;
3906 enum built_in_function fcode;
3907 char c;
3908 unsigned int dest_align;
3909 rtx dest_mem, dest_addr, len_rtx;
3910 HOST_WIDE_INT expected_size = -1;
3911 unsigned int expected_align = 0;
3912
3913 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3914
3915 /* If DEST is not a pointer type, don't do this operation in-line. */
3916 if (dest_align == 0)
3917 return NULL_RTX;
3918
3919 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3920 if (expected_align < dest_align)
3921 expected_align = dest_align;
3922
3923 /* If the LEN parameter is zero, return DEST. */
3924 if (integer_zerop (len))
3925 {
3926 /* Evaluate and ignore VAL in case it has side-effects. */
3927 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3928 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3929 }
3930
3931 /* Stabilize the arguments in case we fail. */
3932 dest = builtin_save_expr (dest);
3933 val = builtin_save_expr (val);
3934 len = builtin_save_expr (len);
3935
3936 len_rtx = expand_normal (len);
3937 dest_mem = get_memory_rtx (dest, len);
3938
3939 if (TREE_CODE (val) != INTEGER_CST)
3940 {
3941 rtx val_rtx;
3942
3943 val_rtx = expand_normal (val);
3944 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3945 val_rtx, 0);
3946
3947 /* Assume that we can memset by pieces if we can store
3948 * the coefficients by pieces (in the required modes).
3949 * We can't pass builtin_memset_gen_str as that emits RTL. */
3950 c = 1;
3951 if (host_integerp (len, 1)
3952 && !(optimize_size && tree_low_cst (len, 1) > 1)
3953 && can_store_by_pieces (tree_low_cst (len, 1),
3954 builtin_memset_read_str, &c, dest_align))
3955 {
3956 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3957 val_rtx);
3958 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3959 builtin_memset_gen_str, val_rtx, dest_align, 0);
3960 }
3961 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3962 dest_align, expected_align,
3963 expected_size))
3964 goto do_libcall;
3965
3966 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3967 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3968 return dest_mem;
3969 }
3970
3971 if (target_char_cast (val, &c))
3972 goto do_libcall;
3973
3974 if (c)
3975 {
3976 if (host_integerp (len, 1)
3977 && !(optimize_size && tree_low_cst (len, 1) > 1)
3978 && can_store_by_pieces (tree_low_cst (len, 1),
3979 builtin_memset_read_str, &c, dest_align))
3980 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3981 builtin_memset_read_str, &c, dest_align, 0);
3982 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3983 dest_align, expected_align,
3984 expected_size))
3985 goto do_libcall;
3986
3987 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3988 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3989 return dest_mem;
3990 }
3991
3992 set_mem_align (dest_mem, dest_align);
3993 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3994 CALL_EXPR_TAILCALL (orig_exp)
3995 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3996 expected_align, expected_size);
3997
3998 if (dest_addr == 0)
3999 {
4000 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4001 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4002 }
4003
4004 return dest_addr;
4005
4006 do_libcall:
4007 fndecl = get_callee_fndecl (orig_exp);
4008 fcode = DECL_FUNCTION_CODE (fndecl);
4009 if (fcode == BUILT_IN_MEMSET)
4010 fn = build_call_expr (fndecl, 3, dest, val, len);
4011 else if (fcode == BUILT_IN_BZERO)
4012 fn = build_call_expr (fndecl, 2, dest, len);
4013 else
4014 gcc_unreachable ();
4015 if (TREE_CODE (fn) == CALL_EXPR)
4016 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4017 return expand_call (fn, target, target == const0_rtx);
4018 }
4019
4020 /* Expand expression EXP, which is a call to the bzero builtin. Return
4021 NULL_RTX if we failed the caller should emit a normal call. */
4022
4023 static rtx
4024 expand_builtin_bzero (tree exp)
4025 {
4026 tree dest, size;
4027
4028 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4029 return NULL_RTX;
4030
4031 dest = CALL_EXPR_ARG (exp, 0);
4032 size = CALL_EXPR_ARG (exp, 1);
4033
4034 /* New argument list transforming bzero(ptr x, int y) to
4035 memset(ptr x, int 0, size_t y). This is done this way
4036 so that if it isn't expanded inline, we fallback to
4037 calling bzero instead of memset. */
4038
4039 return expand_builtin_memset_args (dest, integer_zero_node,
4040 fold_convert (sizetype, size),
4041 const0_rtx, VOIDmode, exp);
4042 }
4043
4044 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4045 caller should emit a normal call, otherwise try to get the result
4046 in TARGET, if convenient (and in mode MODE if that's convenient). */
4047
4048 static rtx
4049 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4050 {
4051 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4052 INTEGER_TYPE, VOID_TYPE))
4053 {
4054 tree type = TREE_TYPE (exp);
4055 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4056 CALL_EXPR_ARG (exp, 1),
4057 CALL_EXPR_ARG (exp, 2), type);
4058 if (result)
4059 return expand_expr (result, target, mode, EXPAND_NORMAL);
4060 }
4061 return NULL_RTX;
4062 }
4063
4064 /* Expand expression EXP, which is a call to the memcmp built-in function.
4065 Return NULL_RTX if we failed and the
4066 caller should emit a normal call, otherwise try to get the result in
4067 TARGET, if convenient (and in mode MODE, if that's convenient). */
4068
4069 static rtx
4070 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4071 {
4072 if (!validate_arglist (exp,
4073 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4074 return NULL_RTX;
4075 else
4076 {
4077 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4078 CALL_EXPR_ARG (exp, 1),
4079 CALL_EXPR_ARG (exp, 2));
4080 if (result)
4081 return expand_expr (result, target, mode, EXPAND_NORMAL);
4082 }
4083
4084 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4085 {
4086 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4087 rtx result;
4088 rtx insn;
4089 tree arg1 = CALL_EXPR_ARG (exp, 0);
4090 tree arg2 = CALL_EXPR_ARG (exp, 1);
4091 tree len = CALL_EXPR_ARG (exp, 2);
4092
4093 int arg1_align
4094 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4095 int arg2_align
4096 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4097 enum machine_mode insn_mode;
4098
4099 #ifdef HAVE_cmpmemsi
4100 if (HAVE_cmpmemsi)
4101 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4102 else
4103 #endif
4104 #ifdef HAVE_cmpstrnsi
4105 if (HAVE_cmpstrnsi)
4106 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4107 else
4108 #endif
4109 return NULL_RTX;
4110
4111 /* If we don't have POINTER_TYPE, call the function. */
4112 if (arg1_align == 0 || arg2_align == 0)
4113 return NULL_RTX;
4114
4115 /* Make a place to write the result of the instruction. */
4116 result = target;
4117 if (! (result != 0
4118 && REG_P (result) && GET_MODE (result) == insn_mode
4119 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4120 result = gen_reg_rtx (insn_mode);
4121
4122 arg1_rtx = get_memory_rtx (arg1, len);
4123 arg2_rtx = get_memory_rtx (arg2, len);
4124 arg3_rtx = expand_normal (len);
4125
4126 /* Set MEM_SIZE as appropriate. */
4127 if (GET_CODE (arg3_rtx) == CONST_INT)
4128 {
4129 set_mem_size (arg1_rtx, arg3_rtx);
4130 set_mem_size (arg2_rtx, arg3_rtx);
4131 }
4132
4133 #ifdef HAVE_cmpmemsi
4134 if (HAVE_cmpmemsi)
4135 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4136 GEN_INT (MIN (arg1_align, arg2_align)));
4137 else
4138 #endif
4139 #ifdef HAVE_cmpstrnsi
4140 if (HAVE_cmpstrnsi)
4141 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4142 GEN_INT (MIN (arg1_align, arg2_align)));
4143 else
4144 #endif
4145 gcc_unreachable ();
4146
4147 if (insn)
4148 emit_insn (insn);
4149 else
4150 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4151 TYPE_MODE (integer_type_node), 3,
4152 XEXP (arg1_rtx, 0), Pmode,
4153 XEXP (arg2_rtx, 0), Pmode,
4154 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4155 TYPE_UNSIGNED (sizetype)),
4156 TYPE_MODE (sizetype));
4157
4158 /* Return the value in the proper mode for this function. */
4159 mode = TYPE_MODE (TREE_TYPE (exp));
4160 if (GET_MODE (result) == mode)
4161 return result;
4162 else if (target != 0)
4163 {
4164 convert_move (target, result, 0);
4165 return target;
4166 }
4167 else
4168 return convert_to_mode (mode, result, 0);
4169 }
4170 #endif
4171
4172 return NULL_RTX;
4173 }
4174
4175 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4176 if we failed the caller should emit a normal call, otherwise try to get
4177 the result in TARGET, if convenient. */
4178
4179 static rtx
4180 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4181 {
4182 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4183 return NULL_RTX;
4184 else
4185 {
4186 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4187 CALL_EXPR_ARG (exp, 1));
4188 if (result)
4189 return expand_expr (result, target, mode, EXPAND_NORMAL);
4190 }
4191
4192 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4193 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4194 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4195 {
4196 rtx arg1_rtx, arg2_rtx;
4197 rtx result, insn = NULL_RTX;
4198 tree fndecl, fn;
4199 tree arg1 = CALL_EXPR_ARG (exp, 0);
4200 tree arg2 = CALL_EXPR_ARG (exp, 1);
4201
4202 int arg1_align
4203 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4204 int arg2_align
4205 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4206
4207 /* If we don't have POINTER_TYPE, call the function. */
4208 if (arg1_align == 0 || arg2_align == 0)
4209 return NULL_RTX;
4210
4211 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4212 arg1 = builtin_save_expr (arg1);
4213 arg2 = builtin_save_expr (arg2);
4214
4215 arg1_rtx = get_memory_rtx (arg1, NULL);
4216 arg2_rtx = get_memory_rtx (arg2, NULL);
4217
4218 #ifdef HAVE_cmpstrsi
4219 /* Try to call cmpstrsi. */
4220 if (HAVE_cmpstrsi)
4221 {
4222 enum machine_mode insn_mode
4223 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4224
4225 /* Make a place to write the result of the instruction. */
4226 result = target;
4227 if (! (result != 0
4228 && REG_P (result) && GET_MODE (result) == insn_mode
4229 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4230 result = gen_reg_rtx (insn_mode);
4231
4232 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4233 GEN_INT (MIN (arg1_align, arg2_align)));
4234 }
4235 #endif
4236 #ifdef HAVE_cmpstrnsi
4237 /* Try to determine at least one length and call cmpstrnsi. */
4238 if (!insn && HAVE_cmpstrnsi)
4239 {
4240 tree len;
4241 rtx arg3_rtx;
4242
4243 enum machine_mode insn_mode
4244 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4245 tree len1 = c_strlen (arg1, 1);
4246 tree len2 = c_strlen (arg2, 1);
4247
4248 if (len1)
4249 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4250 if (len2)
4251 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4252
4253 /* If we don't have a constant length for the first, use the length
4254 of the second, if we know it. We don't require a constant for
4255 this case; some cost analysis could be done if both are available
4256 but neither is constant. For now, assume they're equally cheap,
4257 unless one has side effects. If both strings have constant lengths,
4258 use the smaller. */
4259
4260 if (!len1)
4261 len = len2;
4262 else if (!len2)
4263 len = len1;
4264 else if (TREE_SIDE_EFFECTS (len1))
4265 len = len2;
4266 else if (TREE_SIDE_EFFECTS (len2))
4267 len = len1;
4268 else if (TREE_CODE (len1) != INTEGER_CST)
4269 len = len2;
4270 else if (TREE_CODE (len2) != INTEGER_CST)
4271 len = len1;
4272 else if (tree_int_cst_lt (len1, len2))
4273 len = len1;
4274 else
4275 len = len2;
4276
4277 /* If both arguments have side effects, we cannot optimize. */
4278 if (!len || TREE_SIDE_EFFECTS (len))
4279 goto do_libcall;
4280
4281 arg3_rtx = expand_normal (len);
4282
4283 /* Make a place to write the result of the instruction. */
4284 result = target;
4285 if (! (result != 0
4286 && REG_P (result) && GET_MODE (result) == insn_mode
4287 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4288 result = gen_reg_rtx (insn_mode);
4289
4290 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4291 GEN_INT (MIN (arg1_align, arg2_align)));
4292 }
4293 #endif
4294
4295 if (insn)
4296 {
4297 emit_insn (insn);
4298
4299 /* Return the value in the proper mode for this function. */
4300 mode = TYPE_MODE (TREE_TYPE (exp));
4301 if (GET_MODE (result) == mode)
4302 return result;
4303 if (target == 0)
4304 return convert_to_mode (mode, result, 0);
4305 convert_move (target, result, 0);
4306 return target;
4307 }
4308
4309 /* Expand the library call ourselves using a stabilized argument
4310 list to avoid re-evaluating the function's arguments twice. */
4311 #ifdef HAVE_cmpstrnsi
4312 do_libcall:
4313 #endif
4314 fndecl = get_callee_fndecl (exp);
4315 fn = build_call_expr (fndecl, 2, arg1, arg2);
4316 if (TREE_CODE (fn) == CALL_EXPR)
4317 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4318 return expand_call (fn, target, target == const0_rtx);
4319 }
4320 #endif
4321 return NULL_RTX;
4322 }
4323
4324 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4325 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4326 the result in TARGET, if convenient. */
4327
4328 static rtx
4329 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4330 {
4331 if (!validate_arglist (exp,
4332 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4333 return NULL_RTX;
4334 else
4335 {
4336 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4337 CALL_EXPR_ARG (exp, 1),
4338 CALL_EXPR_ARG (exp, 2));
4339 if (result)
4340 return expand_expr (result, target, mode, EXPAND_NORMAL);
4341 }
4342
4343 /* If c_strlen can determine an expression for one of the string
4344 lengths, and it doesn't have side effects, then emit cmpstrnsi
4345 using length MIN(strlen(string)+1, arg3). */
4346 #ifdef HAVE_cmpstrnsi
4347 if (HAVE_cmpstrnsi)
4348 {
4349 tree len, len1, len2;
4350 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4351 rtx result, insn;
4352 tree fndecl, fn;
4353 tree arg1 = CALL_EXPR_ARG (exp, 0);
4354 tree arg2 = CALL_EXPR_ARG (exp, 1);
4355 tree arg3 = CALL_EXPR_ARG (exp, 2);
4356
4357 int arg1_align
4358 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4359 int arg2_align
4360 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4361 enum machine_mode insn_mode
4362 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4363
4364 len1 = c_strlen (arg1, 1);
4365 len2 = c_strlen (arg2, 1);
4366
4367 if (len1)
4368 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4369 if (len2)
4370 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4371
4372 /* If we don't have a constant length for the first, use the length
4373 of the second, if we know it. We don't require a constant for
4374 this case; some cost analysis could be done if both are available
4375 but neither is constant. For now, assume they're equally cheap,
4376 unless one has side effects. If both strings have constant lengths,
4377 use the smaller. */
4378
4379 if (!len1)
4380 len = len2;
4381 else if (!len2)
4382 len = len1;
4383 else if (TREE_SIDE_EFFECTS (len1))
4384 len = len2;
4385 else if (TREE_SIDE_EFFECTS (len2))
4386 len = len1;
4387 else if (TREE_CODE (len1) != INTEGER_CST)
4388 len = len2;
4389 else if (TREE_CODE (len2) != INTEGER_CST)
4390 len = len1;
4391 else if (tree_int_cst_lt (len1, len2))
4392 len = len1;
4393 else
4394 len = len2;
4395
4396 /* If both arguments have side effects, we cannot optimize. */
4397 if (!len || TREE_SIDE_EFFECTS (len))
4398 return NULL_RTX;
4399
4400 /* The actual new length parameter is MIN(len,arg3). */
4401 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4402 fold_convert (TREE_TYPE (len), arg3));
4403
4404 /* If we don't have POINTER_TYPE, call the function. */
4405 if (arg1_align == 0 || arg2_align == 0)
4406 return NULL_RTX;
4407
4408 /* Make a place to write the result of the instruction. */
4409 result = target;
4410 if (! (result != 0
4411 && REG_P (result) && GET_MODE (result) == insn_mode
4412 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4413 result = gen_reg_rtx (insn_mode);
4414
4415 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4416 arg1 = builtin_save_expr (arg1);
4417 arg2 = builtin_save_expr (arg2);
4418 len = builtin_save_expr (len);
4419
4420 arg1_rtx = get_memory_rtx (arg1, len);
4421 arg2_rtx = get_memory_rtx (arg2, len);
4422 arg3_rtx = expand_normal (len);
4423 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4424 GEN_INT (MIN (arg1_align, arg2_align)));
4425 if (insn)
4426 {
4427 emit_insn (insn);
4428
4429 /* Return the value in the proper mode for this function. */
4430 mode = TYPE_MODE (TREE_TYPE (exp));
4431 if (GET_MODE (result) == mode)
4432 return result;
4433 if (target == 0)
4434 return convert_to_mode (mode, result, 0);
4435 convert_move (target, result, 0);
4436 return target;
4437 }
4438
4439 /* Expand the library call ourselves using a stabilized argument
4440 list to avoid re-evaluating the function's arguments twice. */
4441 fndecl = get_callee_fndecl (exp);
4442 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4443 if (TREE_CODE (fn) == CALL_EXPR)
4444 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4445 return expand_call (fn, target, target == const0_rtx);
4446 }
4447 #endif
4448 return NULL_RTX;
4449 }
4450
4451 /* Expand expression EXP, which is a call to the strcat builtin.
4452 Return NULL_RTX if we failed the caller should emit a normal call,
4453 otherwise try to get the result in TARGET, if convenient. */
4454
4455 static rtx
4456 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4457 {
4458 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4459 return NULL_RTX;
4460 else
4461 {
4462 tree dst = CALL_EXPR_ARG (exp, 0);
4463 tree src = CALL_EXPR_ARG (exp, 1);
4464 const char *p = c_getstr (src);
4465
4466 /* If the string length is zero, return the dst parameter. */
4467 if (p && *p == '\0')
4468 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4469
4470 if (!optimize_size)
4471 {
4472 /* See if we can store by pieces into (dst + strlen(dst)). */
4473 tree newsrc, newdst,
4474 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4475 rtx insns;
4476
4477 /* Stabilize the argument list. */
4478 newsrc = builtin_save_expr (src);
4479 dst = builtin_save_expr (dst);
4480
4481 start_sequence ();
4482
4483 /* Create strlen (dst). */
4484 newdst = build_call_expr (strlen_fn, 1, dst);
4485 /* Create (dst p+ strlen (dst)). */
4486
4487 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4488 newdst = builtin_save_expr (newdst);
4489
4490 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4491 {
4492 end_sequence (); /* Stop sequence. */
4493 return NULL_RTX;
4494 }
4495
4496 /* Output the entire sequence. */
4497 insns = get_insns ();
4498 end_sequence ();
4499 emit_insn (insns);
4500
4501 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4502 }
4503
4504 return NULL_RTX;
4505 }
4506 }
4507
4508 /* Expand expression EXP, which is a call to the strncat builtin.
4509 Return NULL_RTX if we failed the caller should emit a normal call,
4510 otherwise try to get the result in TARGET, if convenient. */
4511
4512 static rtx
4513 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4514 {
4515 if (validate_arglist (exp,
4516 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4517 {
4518 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4519 CALL_EXPR_ARG (exp, 1),
4520 CALL_EXPR_ARG (exp, 2));
4521 if (result)
4522 return expand_expr (result, target, mode, EXPAND_NORMAL);
4523 }
4524 return NULL_RTX;
4525 }
4526
4527 /* Expand expression EXP, which is a call to the strspn builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4530
4531 static rtx
4532 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4533 {
4534 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4535 {
4536 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4537 CALL_EXPR_ARG (exp, 1));
4538 if (result)
4539 return expand_expr (result, target, mode, EXPAND_NORMAL);
4540 }
4541 return NULL_RTX;
4542 }
4543
4544 /* Expand expression EXP, which is a call to the strcspn builtin.
4545 Return NULL_RTX if we failed the caller should emit a normal call,
4546 otherwise try to get the result in TARGET, if convenient. */
4547
4548 static rtx
4549 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4550 {
4551 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4552 {
4553 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4554 CALL_EXPR_ARG (exp, 1));
4555 if (result)
4556 return expand_expr (result, target, mode, EXPAND_NORMAL);
4557 }
4558 return NULL_RTX;
4559 }
4560
4561 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4562 if that's convenient. */
4563
4564 rtx
4565 expand_builtin_saveregs (void)
4566 {
4567 rtx val, seq;
4568
4569 /* Don't do __builtin_saveregs more than once in a function.
4570 Save the result of the first call and reuse it. */
4571 if (saveregs_value != 0)
4572 return saveregs_value;
4573
4574 /* When this function is called, it means that registers must be
4575 saved on entry to this function. So we migrate the call to the
4576 first insn of this function. */
4577
4578 start_sequence ();
4579
4580 /* Do whatever the machine needs done in this case. */
4581 val = targetm.calls.expand_builtin_saveregs ();
4582
4583 seq = get_insns ();
4584 end_sequence ();
4585
4586 saveregs_value = val;
4587
4588 /* Put the insns after the NOTE that starts the function. If this
4589 is inside a start_sequence, make the outer-level insn chain current, so
4590 the code is placed at the start of the function. */
4591 push_topmost_sequence ();
4592 emit_insn_after (seq, entry_of_function ());
4593 pop_topmost_sequence ();
4594
4595 return val;
4596 }
4597
4598 /* __builtin_args_info (N) returns word N of the arg space info
4599 for the current function. The number and meanings of words
4600 is controlled by the definition of CUMULATIVE_ARGS. */
4601
4602 static rtx
4603 expand_builtin_args_info (tree exp)
4604 {
4605 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4606 int *word_ptr = (int *) &current_function_args_info;
4607
4608 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4609
4610 if (call_expr_nargs (exp) != 0)
4611 {
4612 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4613 error ("argument of %<__builtin_args_info%> must be constant");
4614 else
4615 {
4616 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4617
4618 if (wordnum < 0 || wordnum >= nwords)
4619 error ("argument of %<__builtin_args_info%> out of range");
4620 else
4621 return GEN_INT (word_ptr[wordnum]);
4622 }
4623 }
4624 else
4625 error ("missing argument in %<__builtin_args_info%>");
4626
4627 return const0_rtx;
4628 }
4629
4630 /* Expand a call to __builtin_next_arg. */
4631
4632 static rtx
4633 expand_builtin_next_arg (void)
4634 {
4635 /* Checking arguments is already done in fold_builtin_next_arg
4636 that must be called before this function. */
4637 return expand_binop (ptr_mode, add_optab,
4638 current_function_internal_arg_pointer,
4639 current_function_arg_offset_rtx,
4640 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4641 }
4642
4643 /* Make it easier for the backends by protecting the valist argument
4644 from multiple evaluations. */
4645
4646 static tree
4647 stabilize_va_list (tree valist, int needs_lvalue)
4648 {
4649 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4650 {
4651 if (TREE_SIDE_EFFECTS (valist))
4652 valist = save_expr (valist);
4653
4654 /* For this case, the backends will be expecting a pointer to
4655 TREE_TYPE (va_list_type_node), but it's possible we've
4656 actually been given an array (an actual va_list_type_node).
4657 So fix it. */
4658 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4659 {
4660 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4661 valist = build_fold_addr_expr_with_type (valist, p1);
4662 }
4663 }
4664 else
4665 {
4666 tree pt;
4667
4668 if (! needs_lvalue)
4669 {
4670 if (! TREE_SIDE_EFFECTS (valist))
4671 return valist;
4672
4673 pt = build_pointer_type (va_list_type_node);
4674 valist = fold_build1 (ADDR_EXPR, pt, valist);
4675 TREE_SIDE_EFFECTS (valist) = 1;
4676 }
4677
4678 if (TREE_SIDE_EFFECTS (valist))
4679 valist = save_expr (valist);
4680 valist = build_fold_indirect_ref (valist);
4681 }
4682
4683 return valist;
4684 }
4685
4686 /* The "standard" definition of va_list is void*. */
4687
4688 tree
4689 std_build_builtin_va_list (void)
4690 {
4691 return ptr_type_node;
4692 }
4693
4694 /* The "standard" implementation of va_start: just assign `nextarg' to
4695 the variable. */
4696
4697 void
4698 std_expand_builtin_va_start (tree valist, rtx nextarg)
4699 {
4700 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4701 convert_move (va_r, nextarg, 0);
4702 }
4703
4704 /* Expand EXP, a call to __builtin_va_start. */
4705
4706 static rtx
4707 expand_builtin_va_start (tree exp)
4708 {
4709 rtx nextarg;
4710 tree valist;
4711
4712 if (call_expr_nargs (exp) < 2)
4713 {
4714 error ("too few arguments to function %<va_start%>");
4715 return const0_rtx;
4716 }
4717
4718 if (fold_builtin_next_arg (exp, true))
4719 return const0_rtx;
4720
4721 nextarg = expand_builtin_next_arg ();
4722 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4723
4724 #ifdef EXPAND_BUILTIN_VA_START
4725 EXPAND_BUILTIN_VA_START (valist, nextarg);
4726 #else
4727 std_expand_builtin_va_start (valist, nextarg);
4728 #endif
4729
4730 return const0_rtx;
4731 }
4732
4733 /* The "standard" implementation of va_arg: read the value from the
4734 current (padded) address and increment by the (padded) size. */
4735
4736 tree
4737 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4738 {
4739 tree addr, t, type_size, rounded_size, valist_tmp;
4740 unsigned HOST_WIDE_INT align, boundary;
4741 bool indirect;
4742
4743 #ifdef ARGS_GROW_DOWNWARD
4744 /* All of the alignment and movement below is for args-grow-up machines.
4745 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4746 implement their own specialized gimplify_va_arg_expr routines. */
4747 gcc_unreachable ();
4748 #endif
4749
4750 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4751 if (indirect)
4752 type = build_pointer_type (type);
4753
4754 align = PARM_BOUNDARY / BITS_PER_UNIT;
4755 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4756
4757 /* Hoist the valist value into a temporary for the moment. */
4758 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4759
4760 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4761 requires greater alignment, we must perform dynamic alignment. */
4762 if (boundary > align
4763 && !integer_zerop (TYPE_SIZE (type)))
4764 {
4765 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4766 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4767 valist_tmp, size_int (boundary - 1)));
4768 gimplify_and_add (t, pre_p);
4769
4770 t = fold_convert (sizetype, valist_tmp);
4771 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4772 fold_convert (TREE_TYPE (valist),
4773 fold_build2 (BIT_AND_EXPR, sizetype, t,
4774 size_int (-boundary))));
4775 gimplify_and_add (t, pre_p);
4776 }
4777 else
4778 boundary = align;
4779
4780 /* If the actual alignment is less than the alignment of the type,
4781 adjust the type accordingly so that we don't assume strict alignment
4782 when deferencing the pointer. */
4783 boundary *= BITS_PER_UNIT;
4784 if (boundary < TYPE_ALIGN (type))
4785 {
4786 type = build_variant_type_copy (type);
4787 TYPE_ALIGN (type) = boundary;
4788 }
4789
4790 /* Compute the rounded size of the type. */
4791 type_size = size_in_bytes (type);
4792 rounded_size = round_up (type_size, align);
4793
4794 /* Reduce rounded_size so it's sharable with the postqueue. */
4795 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4796
4797 /* Get AP. */
4798 addr = valist_tmp;
4799 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4800 {
4801 /* Small args are padded downward. */
4802 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4803 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4804 size_binop (MINUS_EXPR, rounded_size, type_size));
4805 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4806 }
4807
4808 /* Compute new value for AP. */
4809 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4810 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4811 gimplify_and_add (t, pre_p);
4812
4813 addr = fold_convert (build_pointer_type (type), addr);
4814
4815 if (indirect)
4816 addr = build_va_arg_indirect_ref (addr);
4817
4818 return build_va_arg_indirect_ref (addr);
4819 }
4820
4821 /* Build an indirect-ref expression over the given TREE, which represents a
4822 piece of a va_arg() expansion. */
4823 tree
4824 build_va_arg_indirect_ref (tree addr)
4825 {
4826 addr = build_fold_indirect_ref (addr);
4827
4828 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4829 mf_mark (addr);
4830
4831 return addr;
4832 }
4833
4834 /* Return a dummy expression of type TYPE in order to keep going after an
4835 error. */
4836
4837 static tree
4838 dummy_object (tree type)
4839 {
4840 tree t = build_int_cst (build_pointer_type (type), 0);
4841 return build1 (INDIRECT_REF, type, t);
4842 }
4843
4844 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4845 builtin function, but a very special sort of operator. */
4846
4847 enum gimplify_status
4848 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4849 {
4850 tree promoted_type, want_va_type, have_va_type;
4851 tree valist = TREE_OPERAND (*expr_p, 0);
4852 tree type = TREE_TYPE (*expr_p);
4853 tree t;
4854
4855 /* Verify that valist is of the proper type. */
4856 want_va_type = va_list_type_node;
4857 have_va_type = TREE_TYPE (valist);
4858
4859 if (have_va_type == error_mark_node)
4860 return GS_ERROR;
4861
4862 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4863 {
4864 /* If va_list is an array type, the argument may have decayed
4865 to a pointer type, e.g. by being passed to another function.
4866 In that case, unwrap both types so that we can compare the
4867 underlying records. */
4868 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4869 || POINTER_TYPE_P (have_va_type))
4870 {
4871 want_va_type = TREE_TYPE (want_va_type);
4872 have_va_type = TREE_TYPE (have_va_type);
4873 }
4874 }
4875
4876 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4877 {
4878 error ("first argument to %<va_arg%> not of type %<va_list%>");
4879 return GS_ERROR;
4880 }
4881
4882 /* Generate a diagnostic for requesting data of a type that cannot
4883 be passed through `...' due to type promotion at the call site. */
4884 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4885 != type)
4886 {
4887 static bool gave_help;
4888
4889 /* Unfortunately, this is merely undefined, rather than a constraint
4890 violation, so we cannot make this an error. If this call is never
4891 executed, the program is still strictly conforming. */
4892 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4893 type, promoted_type);
4894 if (! gave_help)
4895 {
4896 gave_help = true;
4897 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4898 promoted_type, type);
4899 }
4900
4901 /* We can, however, treat "undefined" any way we please.
4902 Call abort to encourage the user to fix the program. */
4903 inform ("if this code is reached, the program will abort");
4904 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4905 append_to_statement_list (t, pre_p);
4906
4907 /* This is dead code, but go ahead and finish so that the
4908 mode of the result comes out right. */
4909 *expr_p = dummy_object (type);
4910 return GS_ALL_DONE;
4911 }
4912 else
4913 {
4914 /* Make it easier for the backends by protecting the valist argument
4915 from multiple evaluations. */
4916 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4917 {
4918 /* For this case, the backends will be expecting a pointer to
4919 TREE_TYPE (va_list_type_node), but it's possible we've
4920 actually been given an array (an actual va_list_type_node).
4921 So fix it. */
4922 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4923 {
4924 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4925 valist = build_fold_addr_expr_with_type (valist, p1);
4926 }
4927 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4928 }
4929 else
4930 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4931
4932 if (!targetm.gimplify_va_arg_expr)
4933 /* FIXME:Once most targets are converted we should merely
4934 assert this is non-null. */
4935 return GS_ALL_DONE;
4936
4937 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4938 return GS_OK;
4939 }
4940 }
4941
4942 /* Expand EXP, a call to __builtin_va_end. */
4943
4944 static rtx
4945 expand_builtin_va_end (tree exp)
4946 {
4947 tree valist = CALL_EXPR_ARG (exp, 0);
4948
4949 /* Evaluate for side effects, if needed. I hate macros that don't
4950 do that. */
4951 if (TREE_SIDE_EFFECTS (valist))
4952 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4953
4954 return const0_rtx;
4955 }
4956
4957 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4958 builtin rather than just as an assignment in stdarg.h because of the
4959 nastiness of array-type va_list types. */
4960
4961 static rtx
4962 expand_builtin_va_copy (tree exp)
4963 {
4964 tree dst, src, t;
4965
4966 dst = CALL_EXPR_ARG (exp, 0);
4967 src = CALL_EXPR_ARG (exp, 1);
4968
4969 dst = stabilize_va_list (dst, 1);
4970 src = stabilize_va_list (src, 0);
4971
4972 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4973 {
4974 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4975 TREE_SIDE_EFFECTS (t) = 1;
4976 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4977 }
4978 else
4979 {
4980 rtx dstb, srcb, size;
4981
4982 /* Evaluate to pointers. */
4983 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4984 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4985 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4986 VOIDmode, EXPAND_NORMAL);
4987
4988 dstb = convert_memory_address (Pmode, dstb);
4989 srcb = convert_memory_address (Pmode, srcb);
4990
4991 /* "Dereference" to BLKmode memories. */
4992 dstb = gen_rtx_MEM (BLKmode, dstb);
4993 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4994 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4995 srcb = gen_rtx_MEM (BLKmode, srcb);
4996 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4997 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4998
4999 /* Copy. */
5000 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5001 }
5002
5003 return const0_rtx;
5004 }
5005
5006 /* Expand a call to one of the builtin functions __builtin_frame_address or
5007 __builtin_return_address. */
5008
5009 static rtx
5010 expand_builtin_frame_address (tree fndecl, tree exp)
5011 {
5012 /* The argument must be a nonnegative integer constant.
5013 It counts the number of frames to scan up the stack.
5014 The value is the return address saved in that frame. */
5015 if (call_expr_nargs (exp) == 0)
5016 /* Warning about missing arg was already issued. */
5017 return const0_rtx;
5018 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5019 {
5020 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5021 error ("invalid argument to %<__builtin_frame_address%>");
5022 else
5023 error ("invalid argument to %<__builtin_return_address%>");
5024 return const0_rtx;
5025 }
5026 else
5027 {
5028 rtx tem
5029 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5030 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5031
5032 /* Some ports cannot access arbitrary stack frames. */
5033 if (tem == NULL)
5034 {
5035 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5036 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5037 else
5038 warning (0, "unsupported argument to %<__builtin_return_address%>");
5039 return const0_rtx;
5040 }
5041
5042 /* For __builtin_frame_address, return what we've got. */
5043 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5044 return tem;
5045
5046 if (!REG_P (tem)
5047 && ! CONSTANT_P (tem))
5048 tem = copy_to_mode_reg (Pmode, tem);
5049 return tem;
5050 }
5051 }
5052
5053 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5054 we failed and the caller should emit a normal call, otherwise try to get
5055 the result in TARGET, if convenient. */
5056
5057 static rtx
5058 expand_builtin_alloca (tree exp, rtx target)
5059 {
5060 rtx op0;
5061 rtx result;
5062
5063 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5064 should always expand to function calls. These can be intercepted
5065 in libmudflap. */
5066 if (flag_mudflap)
5067 return NULL_RTX;
5068
5069 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5070 return NULL_RTX;
5071
5072 /* Compute the argument. */
5073 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5074
5075 /* Allocate the desired space. */
5076 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5077 result = convert_memory_address (ptr_mode, result);
5078
5079 return result;
5080 }
5081
5082 /* Expand a call to a bswap builtin with argument ARG0. MODE
5083 is the mode to expand with. */
5084
5085 static rtx
5086 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5087 {
5088 enum machine_mode mode;
5089 tree arg;
5090 rtx op0;
5091
5092 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5093 return NULL_RTX;
5094
5095 arg = CALL_EXPR_ARG (exp, 0);
5096 mode = TYPE_MODE (TREE_TYPE (arg));
5097 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5098
5099 target = expand_unop (mode, bswap_optab, op0, target, 1);
5100
5101 gcc_assert (target);
5102
5103 return convert_to_mode (mode, target, 0);
5104 }
5105
5106 /* Expand a call to a unary builtin in EXP.
5107 Return NULL_RTX if a normal call should be emitted rather than expanding the
5108 function in-line. If convenient, the result should be placed in TARGET.
5109 SUBTARGET may be used as the target for computing one of EXP's operands. */
5110
5111 static rtx
5112 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5113 rtx subtarget, optab op_optab)
5114 {
5115 rtx op0;
5116
5117 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5118 return NULL_RTX;
5119
5120 /* Compute the argument. */
5121 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5122 VOIDmode, EXPAND_NORMAL);
5123 /* Compute op, into TARGET if possible.
5124 Set TARGET to wherever the result comes back. */
5125 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5126 op_optab, op0, target, 1);
5127 gcc_assert (target);
5128
5129 return convert_to_mode (target_mode, target, 0);
5130 }
5131
5132 /* If the string passed to fputs is a constant and is one character
5133 long, we attempt to transform this call into __builtin_fputc(). */
5134
5135 static rtx
5136 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5137 {
5138 /* Verify the arguments in the original call. */
5139 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5140 {
5141 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5142 CALL_EXPR_ARG (exp, 1),
5143 (target == const0_rtx),
5144 unlocked, NULL_TREE);
5145 if (result)
5146 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5147 }
5148 return NULL_RTX;
5149 }
5150
5151 /* Expand a call to __builtin_expect. We just return our argument
5152 as the builtin_expect semantic should've been already executed by
5153 tree branch prediction pass. */
5154
5155 static rtx
5156 expand_builtin_expect (tree exp, rtx target)
5157 {
5158 tree arg, c;
5159
5160 if (call_expr_nargs (exp) < 2)
5161 return const0_rtx;
5162 arg = CALL_EXPR_ARG (exp, 0);
5163 c = CALL_EXPR_ARG (exp, 1);
5164
5165 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5166 /* When guessing was done, the hints should be already stripped away. */
5167 gcc_assert (!flag_guess_branch_prob
5168 || optimize == 0 || errorcount || sorrycount);
5169 return target;
5170 }
5171
5172 void
5173 expand_builtin_trap (void)
5174 {
5175 #ifdef HAVE_trap
5176 if (HAVE_trap)
5177 emit_insn (gen_trap ());
5178 else
5179 #endif
5180 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5181 emit_barrier ();
5182 }
5183
5184 /* Expand EXP, a call to fabs, fabsf or fabsl.
5185 Return NULL_RTX if a normal call should be emitted rather than expanding
5186 the function inline. If convenient, the result should be placed
5187 in TARGET. SUBTARGET may be used as the target for computing
5188 the operand. */
5189
5190 static rtx
5191 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5192 {
5193 enum machine_mode mode;
5194 tree arg;
5195 rtx op0;
5196
5197 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5198 return NULL_RTX;
5199
5200 arg = CALL_EXPR_ARG (exp, 0);
5201 mode = TYPE_MODE (TREE_TYPE (arg));
5202 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5203 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5204 }
5205
5206 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5207 Return NULL is a normal call should be emitted rather than expanding the
5208 function inline. If convenient, the result should be placed in TARGET.
5209 SUBTARGET may be used as the target for computing the operand. */
5210
5211 static rtx
5212 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5213 {
5214 rtx op0, op1;
5215 tree arg;
5216
5217 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5218 return NULL_RTX;
5219
5220 arg = CALL_EXPR_ARG (exp, 0);
5221 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5222
5223 arg = CALL_EXPR_ARG (exp, 1);
5224 op1 = expand_normal (arg);
5225
5226 return expand_copysign (op0, op1, target);
5227 }
5228
5229 /* Create a new constant string literal and return a char* pointer to it.
5230 The STRING_CST value is the LEN characters at STR. */
5231 tree
5232 build_string_literal (int len, const char *str)
5233 {
5234 tree t, elem, index, type;
5235
5236 t = build_string (len, str);
5237 elem = build_type_variant (char_type_node, 1, 0);
5238 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5239 type = build_array_type (elem, index);
5240 TREE_TYPE (t) = type;
5241 TREE_CONSTANT (t) = 1;
5242 TREE_INVARIANT (t) = 1;
5243 TREE_READONLY (t) = 1;
5244 TREE_STATIC (t) = 1;
5245
5246 type = build_pointer_type (type);
5247 t = build1 (ADDR_EXPR, type, t);
5248
5249 type = build_pointer_type (elem);
5250 t = build1 (NOP_EXPR, type, t);
5251 return t;
5252 }
5253
5254 /* Expand EXP, a call to printf or printf_unlocked.
5255 Return NULL_RTX if a normal call should be emitted rather than transforming
5256 the function inline. If convenient, the result should be placed in
5257 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5258 call. */
5259 static rtx
5260 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5261 bool unlocked)
5262 {
5263 /* If we're using an unlocked function, assume the other unlocked
5264 functions exist explicitly. */
5265 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5266 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5267 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5268 : implicit_built_in_decls[BUILT_IN_PUTS];
5269 const char *fmt_str;
5270 tree fn = 0;
5271 tree fmt, arg;
5272 int nargs = call_expr_nargs (exp);
5273
5274 /* If the return value is used, don't do the transformation. */
5275 if (target != const0_rtx)
5276 return NULL_RTX;
5277
5278 /* Verify the required arguments in the original call. */
5279 if (nargs == 0)
5280 return NULL_RTX;
5281 fmt = CALL_EXPR_ARG (exp, 0);
5282 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5283 return NULL_RTX;
5284
5285 /* Check whether the format is a literal string constant. */
5286 fmt_str = c_getstr (fmt);
5287 if (fmt_str == NULL)
5288 return NULL_RTX;
5289
5290 if (!init_target_chars ())
5291 return NULL_RTX;
5292
5293 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5294 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5295 {
5296 if ((nargs != 2)
5297 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5298 return NULL_RTX;
5299 if (fn_puts)
5300 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5301 }
5302 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5303 else if (strcmp (fmt_str, target_percent_c) == 0)
5304 {
5305 if ((nargs != 2)
5306 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5307 return NULL_RTX;
5308 if (fn_putchar)
5309 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5310 }
5311 else
5312 {
5313 /* We can't handle anything else with % args or %% ... yet. */
5314 if (strchr (fmt_str, target_percent))
5315 return NULL_RTX;
5316
5317 if (nargs > 1)
5318 return NULL_RTX;
5319
5320 /* If the format specifier was "", printf does nothing. */
5321 if (fmt_str[0] == '\0')
5322 return const0_rtx;
5323 /* If the format specifier has length of 1, call putchar. */
5324 if (fmt_str[1] == '\0')
5325 {
5326 /* Given printf("c"), (where c is any one character,)
5327 convert "c"[0] to an int and pass that to the replacement
5328 function. */
5329 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5330 if (fn_putchar)
5331 fn = build_call_expr (fn_putchar, 1, arg);
5332 }
5333 else
5334 {
5335 /* If the format specifier was "string\n", call puts("string"). */
5336 size_t len = strlen (fmt_str);
5337 if ((unsigned char)fmt_str[len - 1] == target_newline)
5338 {
5339 /* Create a NUL-terminated string that's one char shorter
5340 than the original, stripping off the trailing '\n'. */
5341 char *newstr = alloca (len);
5342 memcpy (newstr, fmt_str, len - 1);
5343 newstr[len - 1] = 0;
5344 arg = build_string_literal (len, newstr);
5345 if (fn_puts)
5346 fn = build_call_expr (fn_puts, 1, arg);
5347 }
5348 else
5349 /* We'd like to arrange to call fputs(string,stdout) here,
5350 but we need stdout and don't have a way to get it yet. */
5351 return NULL_RTX;
5352 }
5353 }
5354
5355 if (!fn)
5356 return NULL_RTX;
5357 if (TREE_CODE (fn) == CALL_EXPR)
5358 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5359 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5360 }
5361
5362 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5363 Return NULL_RTX if a normal call should be emitted rather than transforming
5364 the function inline. If convenient, the result should be placed in
5365 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5366 call. */
5367 static rtx
5368 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5369 bool unlocked)
5370 {
5371 /* If we're using an unlocked function, assume the other unlocked
5372 functions exist explicitly. */
5373 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5374 : implicit_built_in_decls[BUILT_IN_FPUTC];
5375 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5376 : implicit_built_in_decls[BUILT_IN_FPUTS];
5377 const char *fmt_str;
5378 tree fn = 0;
5379 tree fmt, fp, arg;
5380 int nargs = call_expr_nargs (exp);
5381
5382 /* If the return value is used, don't do the transformation. */
5383 if (target != const0_rtx)
5384 return NULL_RTX;
5385
5386 /* Verify the required arguments in the original call. */
5387 if (nargs < 2)
5388 return NULL_RTX;
5389 fp = CALL_EXPR_ARG (exp, 0);
5390 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5391 return NULL_RTX;
5392 fmt = CALL_EXPR_ARG (exp, 1);
5393 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5394 return NULL_RTX;
5395
5396 /* Check whether the format is a literal string constant. */
5397 fmt_str = c_getstr (fmt);
5398 if (fmt_str == NULL)
5399 return NULL_RTX;
5400
5401 if (!init_target_chars ())
5402 return NULL_RTX;
5403
5404 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5405 if (strcmp (fmt_str, target_percent_s) == 0)
5406 {
5407 if ((nargs != 3)
5408 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5409 return NULL_RTX;
5410 arg = CALL_EXPR_ARG (exp, 2);
5411 if (fn_fputs)
5412 fn = build_call_expr (fn_fputs, 2, arg, fp);
5413 }
5414 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5415 else if (strcmp (fmt_str, target_percent_c) == 0)
5416 {
5417 if ((nargs != 3)
5418 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5419 return NULL_RTX;
5420 arg = CALL_EXPR_ARG (exp, 2);
5421 if (fn_fputc)
5422 fn = build_call_expr (fn_fputc, 2, arg, fp);
5423 }
5424 else
5425 {
5426 /* We can't handle anything else with % args or %% ... yet. */
5427 if (strchr (fmt_str, target_percent))
5428 return NULL_RTX;
5429
5430 if (nargs > 2)
5431 return NULL_RTX;
5432
5433 /* If the format specifier was "", fprintf does nothing. */
5434 if (fmt_str[0] == '\0')
5435 {
5436 /* Evaluate and ignore FILE* argument for side-effects. */
5437 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5438 return const0_rtx;
5439 }
5440
5441 /* When "string" doesn't contain %, replace all cases of
5442 fprintf(stream,string) with fputs(string,stream). The fputs
5443 builtin will take care of special cases like length == 1. */
5444 if (fn_fputs)
5445 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5446 }
5447
5448 if (!fn)
5449 return NULL_RTX;
5450 if (TREE_CODE (fn) == CALL_EXPR)
5451 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5452 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5453 }
5454
5455 /* Expand a call EXP to sprintf. Return NULL_RTX if
5456 a normal call should be emitted rather than expanding the function
5457 inline. If convenient, the result should be placed in TARGET with
5458 mode MODE. */
5459
5460 static rtx
5461 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5462 {
5463 tree dest, fmt;
5464 const char *fmt_str;
5465 int nargs = call_expr_nargs (exp);
5466
5467 /* Verify the required arguments in the original call. */
5468 if (nargs < 2)
5469 return NULL_RTX;
5470 dest = CALL_EXPR_ARG (exp, 0);
5471 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5472 return NULL_RTX;
5473 fmt = CALL_EXPR_ARG (exp, 0);
5474 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5475 return NULL_RTX;
5476
5477 /* Check whether the format is a literal string constant. */
5478 fmt_str = c_getstr (fmt);
5479 if (fmt_str == NULL)
5480 return NULL_RTX;
5481
5482 if (!init_target_chars ())
5483 return NULL_RTX;
5484
5485 /* If the format doesn't contain % args or %%, use strcpy. */
5486 if (strchr (fmt_str, target_percent) == 0)
5487 {
5488 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5489 tree exp;
5490
5491 if ((nargs > 2) || ! fn)
5492 return NULL_RTX;
5493 expand_expr (build_call_expr (fn, 2, dest, fmt),
5494 const0_rtx, VOIDmode, EXPAND_NORMAL);
5495 if (target == const0_rtx)
5496 return const0_rtx;
5497 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5498 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5499 }
5500 /* If the format is "%s", use strcpy if the result isn't used. */
5501 else if (strcmp (fmt_str, target_percent_s) == 0)
5502 {
5503 tree fn, arg, len;
5504 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5505
5506 if (! fn)
5507 return NULL_RTX;
5508 if (nargs != 3)
5509 return NULL_RTX;
5510 arg = CALL_EXPR_ARG (exp, 2);
5511 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5512 return NULL_RTX;
5513
5514 if (target != const0_rtx)
5515 {
5516 len = c_strlen (arg, 1);
5517 if (! len || TREE_CODE (len) != INTEGER_CST)
5518 return NULL_RTX;
5519 }
5520 else
5521 len = NULL_TREE;
5522
5523 expand_expr (build_call_expr (fn, 2, dest, arg),
5524 const0_rtx, VOIDmode, EXPAND_NORMAL);
5525
5526 if (target == const0_rtx)
5527 return const0_rtx;
5528 return expand_expr (len, target, mode, EXPAND_NORMAL);
5529 }
5530
5531 return NULL_RTX;
5532 }
5533
5534 /* Expand a call to either the entry or exit function profiler. */
5535
5536 static rtx
5537 expand_builtin_profile_func (bool exitp)
5538 {
5539 rtx this, which;
5540
5541 this = DECL_RTL (current_function_decl);
5542 gcc_assert (MEM_P (this));
5543 this = XEXP (this, 0);
5544
5545 if (exitp)
5546 which = profile_function_exit_libfunc;
5547 else
5548 which = profile_function_entry_libfunc;
5549
5550 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5551 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5552 0),
5553 Pmode);
5554
5555 return const0_rtx;
5556 }
5557
5558 /* Expand a call to __builtin___clear_cache. */
5559
5560 static rtx
5561 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5562 {
5563 #ifndef HAVE_clear_cache
5564 #ifdef CLEAR_INSN_CACHE
5565 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5566 does something. Just do the default expansion to a call to
5567 __clear_cache(). */
5568 return NULL_RTX;
5569 #else
5570 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5571 does nothing. There is no need to call it. Do nothing. */
5572 return const0_rtx;
5573 #endif /* CLEAR_INSN_CACHE */
5574 #else
5575 /* We have a "clear_cache" insn, and it will handle everything. */
5576 tree begin, end;
5577 rtx begin_rtx, end_rtx;
5578 enum insn_code icode;
5579
5580 /* We must not expand to a library call. If we did, any
5581 fallback library function in libgcc that might contain a call to
5582 __builtin___clear_cache() would recurse infinitely. */
5583 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5584 {
5585 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5586 return const0_rtx;
5587 }
5588
5589 if (HAVE_clear_cache)
5590 {
5591 icode = CODE_FOR_clear_cache;
5592
5593 begin = CALL_EXPR_ARG (exp, 0);
5594 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5595 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5596 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5597 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5598
5599 end = CALL_EXPR_ARG (exp, 1);
5600 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5601 end_rtx = convert_memory_address (Pmode, end_rtx);
5602 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5603 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5604
5605 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5606 }
5607 return const0_rtx;
5608 #endif /* HAVE_clear_cache */
5609 }
5610
5611 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5612
5613 static rtx
5614 round_trampoline_addr (rtx tramp)
5615 {
5616 rtx temp, addend, mask;
5617
5618 /* If we don't need too much alignment, we'll have been guaranteed
5619 proper alignment by get_trampoline_type. */
5620 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5621 return tramp;
5622
5623 /* Round address up to desired boundary. */
5624 temp = gen_reg_rtx (Pmode);
5625 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5626 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5627
5628 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5629 temp, 0, OPTAB_LIB_WIDEN);
5630 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5631 temp, 0, OPTAB_LIB_WIDEN);
5632
5633 return tramp;
5634 }
5635
5636 static rtx
5637 expand_builtin_init_trampoline (tree exp)
5638 {
5639 tree t_tramp, t_func, t_chain;
5640 rtx r_tramp, r_func, r_chain;
5641 #ifdef TRAMPOLINE_TEMPLATE
5642 rtx blktramp;
5643 #endif
5644
5645 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5646 POINTER_TYPE, VOID_TYPE))
5647 return NULL_RTX;
5648
5649 t_tramp = CALL_EXPR_ARG (exp, 0);
5650 t_func = CALL_EXPR_ARG (exp, 1);
5651 t_chain = CALL_EXPR_ARG (exp, 2);
5652
5653 r_tramp = expand_normal (t_tramp);
5654 r_func = expand_normal (t_func);
5655 r_chain = expand_normal (t_chain);
5656
5657 /* Generate insns to initialize the trampoline. */
5658 r_tramp = round_trampoline_addr (r_tramp);
5659 #ifdef TRAMPOLINE_TEMPLATE
5660 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5661 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5662 emit_block_move (blktramp, assemble_trampoline_template (),
5663 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5664 #endif
5665 trampolines_created = 1;
5666 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5667
5668 return const0_rtx;
5669 }
5670
5671 static rtx
5672 expand_builtin_adjust_trampoline (tree exp)
5673 {
5674 rtx tramp;
5675
5676 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5677 return NULL_RTX;
5678
5679 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5680 tramp = round_trampoline_addr (tramp);
5681 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5682 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5683 #endif
5684
5685 return tramp;
5686 }
5687
5688 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5689 function. The function first checks whether the back end provides
5690 an insn to implement signbit for the respective mode. If not, it
5691 checks whether the floating point format of the value is such that
5692 the sign bit can be extracted. If that is not the case, the
5693 function returns NULL_RTX to indicate that a normal call should be
5694 emitted rather than expanding the function in-line. EXP is the
5695 expression that is a call to the builtin function; if convenient,
5696 the result should be placed in TARGET. */
5697 static rtx
5698 expand_builtin_signbit (tree exp, rtx target)
5699 {
5700 const struct real_format *fmt;
5701 enum machine_mode fmode, imode, rmode;
5702 HOST_WIDE_INT hi, lo;
5703 tree arg;
5704 int word, bitpos;
5705 enum insn_code signbit_insn_code;
5706 rtx temp;
5707
5708 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5709 return NULL_RTX;
5710
5711 arg = CALL_EXPR_ARG (exp, 0);
5712 fmode = TYPE_MODE (TREE_TYPE (arg));
5713 rmode = TYPE_MODE (TREE_TYPE (exp));
5714 fmt = REAL_MODE_FORMAT (fmode);
5715
5716 arg = builtin_save_expr (arg);
5717
5718 /* Expand the argument yielding a RTX expression. */
5719 temp = expand_normal (arg);
5720
5721 /* Check if the back end provides an insn that handles signbit for the
5722 argument's mode. */
5723 signbit_insn_code = signbit_optab [(int) fmode];
5724 if (signbit_insn_code != CODE_FOR_nothing)
5725 {
5726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5727 emit_unop_insn (signbit_insn_code, target, temp, UNKNOWN);
5728 return target;
5729 }
5730
5731 /* For floating point formats without a sign bit, implement signbit
5732 as "ARG < 0.0". */
5733 bitpos = fmt->signbit_ro;
5734 if (bitpos < 0)
5735 {
5736 /* But we can't do this if the format supports signed zero. */
5737 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5738 return NULL_RTX;
5739
5740 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5741 build_real (TREE_TYPE (arg), dconst0));
5742 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5743 }
5744
5745 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5746 {
5747 imode = int_mode_for_mode (fmode);
5748 if (imode == BLKmode)
5749 return NULL_RTX;
5750 temp = gen_lowpart (imode, temp);
5751 }
5752 else
5753 {
5754 imode = word_mode;
5755 /* Handle targets with different FP word orders. */
5756 if (FLOAT_WORDS_BIG_ENDIAN)
5757 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5758 else
5759 word = bitpos / BITS_PER_WORD;
5760 temp = operand_subword_force (temp, word, fmode);
5761 bitpos = bitpos % BITS_PER_WORD;
5762 }
5763
5764 /* Force the intermediate word_mode (or narrower) result into a
5765 register. This avoids attempting to create paradoxical SUBREGs
5766 of floating point modes below. */
5767 temp = force_reg (imode, temp);
5768
5769 /* If the bitpos is within the "result mode" lowpart, the operation
5770 can be implement with a single bitwise AND. Otherwise, we need
5771 a right shift and an AND. */
5772
5773 if (bitpos < GET_MODE_BITSIZE (rmode))
5774 {
5775 if (bitpos < HOST_BITS_PER_WIDE_INT)
5776 {
5777 hi = 0;
5778 lo = (HOST_WIDE_INT) 1 << bitpos;
5779 }
5780 else
5781 {
5782 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5783 lo = 0;
5784 }
5785
5786 if (imode != rmode)
5787 temp = gen_lowpart (rmode, temp);
5788 temp = expand_binop (rmode, and_optab, temp,
5789 immed_double_const (lo, hi, rmode),
5790 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5791 }
5792 else
5793 {
5794 /* Perform a logical right shift to place the signbit in the least
5795 significant bit, then truncate the result to the desired mode
5796 and mask just this bit. */
5797 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5798 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5799 temp = gen_lowpart (rmode, temp);
5800 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5801 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5802 }
5803
5804 return temp;
5805 }
5806
5807 /* Expand fork or exec calls. TARGET is the desired target of the
5808 call. EXP is the call. FN is the
5809 identificator of the actual function. IGNORE is nonzero if the
5810 value is to be ignored. */
5811
5812 static rtx
5813 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5814 {
5815 tree id, decl;
5816 tree call;
5817
5818 /* If we are not profiling, just call the function. */
5819 if (!profile_arc_flag)
5820 return NULL_RTX;
5821
5822 /* Otherwise call the wrapper. This should be equivalent for the rest of
5823 compiler, so the code does not diverge, and the wrapper may run the
5824 code necessary for keeping the profiling sane. */
5825
5826 switch (DECL_FUNCTION_CODE (fn))
5827 {
5828 case BUILT_IN_FORK:
5829 id = get_identifier ("__gcov_fork");
5830 break;
5831
5832 case BUILT_IN_EXECL:
5833 id = get_identifier ("__gcov_execl");
5834 break;
5835
5836 case BUILT_IN_EXECV:
5837 id = get_identifier ("__gcov_execv");
5838 break;
5839
5840 case BUILT_IN_EXECLP:
5841 id = get_identifier ("__gcov_execlp");
5842 break;
5843
5844 case BUILT_IN_EXECLE:
5845 id = get_identifier ("__gcov_execle");
5846 break;
5847
5848 case BUILT_IN_EXECVP:
5849 id = get_identifier ("__gcov_execvp");
5850 break;
5851
5852 case BUILT_IN_EXECVE:
5853 id = get_identifier ("__gcov_execve");
5854 break;
5855
5856 default:
5857 gcc_unreachable ();
5858 }
5859
5860 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5861 DECL_EXTERNAL (decl) = 1;
5862 TREE_PUBLIC (decl) = 1;
5863 DECL_ARTIFICIAL (decl) = 1;
5864 TREE_NOTHROW (decl) = 1;
5865 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5866 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5867 call = rewrite_call_expr (exp, 0, decl, 0);
5868 return expand_call (call, target, ignore);
5869 }
5870
5871
5872 \f
5873 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5874 the pointer in these functions is void*, the tree optimizers may remove
5875 casts. The mode computed in expand_builtin isn't reliable either, due
5876 to __sync_bool_compare_and_swap.
5877
5878 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5879 group of builtins. This gives us log2 of the mode size. */
5880
5881 static inline enum machine_mode
5882 get_builtin_sync_mode (int fcode_diff)
5883 {
5884 /* The size is not negotiable, so ask not to get BLKmode in return
5885 if the target indicates that a smaller size would be better. */
5886 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5887 }
5888
5889 /* Expand the memory expression LOC and return the appropriate memory operand
5890 for the builtin_sync operations. */
5891
5892 static rtx
5893 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5894 {
5895 rtx addr, mem;
5896
5897 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5898
5899 /* Note that we explicitly do not want any alias information for this
5900 memory, so that we kill all other live memories. Otherwise we don't
5901 satisfy the full barrier semantics of the intrinsic. */
5902 mem = validize_mem (gen_rtx_MEM (mode, addr));
5903
5904 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5905 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5906 MEM_VOLATILE_P (mem) = 1;
5907
5908 return mem;
5909 }
5910
5911 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5912 EXP is the CALL_EXPR. CODE is the rtx code
5913 that corresponds to the arithmetic or logical operation from the name;
5914 an exception here is that NOT actually means NAND. TARGET is an optional
5915 place for us to store the results; AFTER is true if this is the
5916 fetch_and_xxx form. IGNORE is true if we don't actually care about
5917 the result of the operation at all. */
5918
5919 static rtx
5920 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5921 enum rtx_code code, bool after,
5922 rtx target, bool ignore)
5923 {
5924 rtx val, mem;
5925 enum machine_mode old_mode;
5926
5927 /* Expand the operands. */
5928 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5929
5930 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5931 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5932 of CONST_INTs, where we know the old_mode only from the call argument. */
5933 old_mode = GET_MODE (val);
5934 if (old_mode == VOIDmode)
5935 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5936 val = convert_modes (mode, old_mode, val, 1);
5937
5938 if (ignore)
5939 return expand_sync_operation (mem, val, code);
5940 else
5941 return expand_sync_fetch_operation (mem, val, code, after, target);
5942 }
5943
5944 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5945 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5946 true if this is the boolean form. TARGET is a place for us to store the
5947 results; this is NOT optional if IS_BOOL is true. */
5948
5949 static rtx
5950 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5951 bool is_bool, rtx target)
5952 {
5953 rtx old_val, new_val, mem;
5954 enum machine_mode old_mode;
5955
5956 /* Expand the operands. */
5957 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5958
5959
5960 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5961 mode, EXPAND_NORMAL);
5962 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5963 of CONST_INTs, where we know the old_mode only from the call argument. */
5964 old_mode = GET_MODE (old_val);
5965 if (old_mode == VOIDmode)
5966 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5967 old_val = convert_modes (mode, old_mode, old_val, 1);
5968
5969 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5970 mode, EXPAND_NORMAL);
5971 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5972 of CONST_INTs, where we know the old_mode only from the call argument. */
5973 old_mode = GET_MODE (new_val);
5974 if (old_mode == VOIDmode)
5975 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5976 new_val = convert_modes (mode, old_mode, new_val, 1);
5977
5978 if (is_bool)
5979 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5980 else
5981 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5982 }
5983
5984 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5985 general form is actually an atomic exchange, and some targets only
5986 support a reduced form with the second argument being a constant 1.
5987 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5988 the results. */
5989
5990 static rtx
5991 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5992 rtx target)
5993 {
5994 rtx val, mem;
5995 enum machine_mode old_mode;
5996
5997 /* Expand the operands. */
5998 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5999 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6000 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6001 of CONST_INTs, where we know the old_mode only from the call argument. */
6002 old_mode = GET_MODE (val);
6003 if (old_mode == VOIDmode)
6004 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6005 val = convert_modes (mode, old_mode, val, 1);
6006
6007 return expand_sync_lock_test_and_set (mem, val, target);
6008 }
6009
6010 /* Expand the __sync_synchronize intrinsic. */
6011
6012 static void
6013 expand_builtin_synchronize (void)
6014 {
6015 tree x;
6016
6017 #ifdef HAVE_memory_barrier
6018 if (HAVE_memory_barrier)
6019 {
6020 emit_insn (gen_memory_barrier ());
6021 return;
6022 }
6023 #endif
6024
6025 /* If no explicit memory barrier instruction is available, create an
6026 empty asm stmt with a memory clobber. */
6027 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6028 tree_cons (NULL, build_string (6, "memory"), NULL));
6029 ASM_VOLATILE_P (x) = 1;
6030 expand_asm_expr (x);
6031 }
6032
6033 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6034
6035 static void
6036 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6037 {
6038 enum insn_code icode;
6039 rtx mem, insn;
6040 rtx val = const0_rtx;
6041
6042 /* Expand the operands. */
6043 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6044
6045 /* If there is an explicit operation in the md file, use it. */
6046 icode = sync_lock_release[mode];
6047 if (icode != CODE_FOR_nothing)
6048 {
6049 if (!insn_data[icode].operand[1].predicate (val, mode))
6050 val = force_reg (mode, val);
6051
6052 insn = GEN_FCN (icode) (mem, val);
6053 if (insn)
6054 {
6055 emit_insn (insn);
6056 return;
6057 }
6058 }
6059
6060 /* Otherwise we can implement this operation by emitting a barrier
6061 followed by a store of zero. */
6062 expand_builtin_synchronize ();
6063 emit_move_insn (mem, val);
6064 }
6065 \f
6066 /* Expand an expression EXP that calls a built-in function,
6067 with result going to TARGET if that's convenient
6068 (and in mode MODE if that's convenient).
6069 SUBTARGET may be used as the target for computing one of EXP's operands.
6070 IGNORE is nonzero if the value is to be ignored. */
6071
6072 rtx
6073 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6074 int ignore)
6075 {
6076 tree fndecl = get_callee_fndecl (exp);
6077 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6078 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6079
6080 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6081 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6082
6083 /* When not optimizing, generate calls to library functions for a certain
6084 set of builtins. */
6085 if (!optimize
6086 && !called_as_built_in (fndecl)
6087 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6088 && fcode != BUILT_IN_ALLOCA)
6089 return expand_call (exp, target, ignore);
6090
6091 /* The built-in function expanders test for target == const0_rtx
6092 to determine whether the function's result will be ignored. */
6093 if (ignore)
6094 target = const0_rtx;
6095
6096 /* If the result of a pure or const built-in function is ignored, and
6097 none of its arguments are volatile, we can avoid expanding the
6098 built-in call and just evaluate the arguments for side-effects. */
6099 if (target == const0_rtx
6100 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6101 {
6102 bool volatilep = false;
6103 tree arg;
6104 call_expr_arg_iterator iter;
6105
6106 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6107 if (TREE_THIS_VOLATILE (arg))
6108 {
6109 volatilep = true;
6110 break;
6111 }
6112
6113 if (! volatilep)
6114 {
6115 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6116 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6117 return const0_rtx;
6118 }
6119 }
6120
6121 switch (fcode)
6122 {
6123 CASE_FLT_FN (BUILT_IN_FABS):
6124 target = expand_builtin_fabs (exp, target, subtarget);
6125 if (target)
6126 return target;
6127 break;
6128
6129 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6130 target = expand_builtin_copysign (exp, target, subtarget);
6131 if (target)
6132 return target;
6133 break;
6134
6135 /* Just do a normal library call if we were unable to fold
6136 the values. */
6137 CASE_FLT_FN (BUILT_IN_CABS):
6138 break;
6139
6140 CASE_FLT_FN (BUILT_IN_EXP):
6141 CASE_FLT_FN (BUILT_IN_EXP10):
6142 CASE_FLT_FN (BUILT_IN_POW10):
6143 CASE_FLT_FN (BUILT_IN_EXP2):
6144 CASE_FLT_FN (BUILT_IN_EXPM1):
6145 CASE_FLT_FN (BUILT_IN_LOGB):
6146 CASE_FLT_FN (BUILT_IN_LOG):
6147 CASE_FLT_FN (BUILT_IN_LOG10):
6148 CASE_FLT_FN (BUILT_IN_LOG2):
6149 CASE_FLT_FN (BUILT_IN_LOG1P):
6150 CASE_FLT_FN (BUILT_IN_TAN):
6151 CASE_FLT_FN (BUILT_IN_ASIN):
6152 CASE_FLT_FN (BUILT_IN_ACOS):
6153 CASE_FLT_FN (BUILT_IN_ATAN):
6154 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6155 because of possible accuracy problems. */
6156 if (! flag_unsafe_math_optimizations)
6157 break;
6158 CASE_FLT_FN (BUILT_IN_SQRT):
6159 CASE_FLT_FN (BUILT_IN_FLOOR):
6160 CASE_FLT_FN (BUILT_IN_CEIL):
6161 CASE_FLT_FN (BUILT_IN_TRUNC):
6162 CASE_FLT_FN (BUILT_IN_ROUND):
6163 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6164 CASE_FLT_FN (BUILT_IN_RINT):
6165 target = expand_builtin_mathfn (exp, target, subtarget);
6166 if (target)
6167 return target;
6168 break;
6169
6170 CASE_FLT_FN (BUILT_IN_ILOGB):
6171 if (! flag_unsafe_math_optimizations)
6172 break;
6173 CASE_FLT_FN (BUILT_IN_ISINF):
6174 CASE_FLT_FN (BUILT_IN_FINITE):
6175 case BUILT_IN_ISFINITE:
6176 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6177 if (target)
6178 return target;
6179 break;
6180
6181 CASE_FLT_FN (BUILT_IN_LCEIL):
6182 CASE_FLT_FN (BUILT_IN_LLCEIL):
6183 CASE_FLT_FN (BUILT_IN_LFLOOR):
6184 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6185 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6186 if (target)
6187 return target;
6188 break;
6189
6190 CASE_FLT_FN (BUILT_IN_LRINT):
6191 CASE_FLT_FN (BUILT_IN_LLRINT):
6192 CASE_FLT_FN (BUILT_IN_LROUND):
6193 CASE_FLT_FN (BUILT_IN_LLROUND):
6194 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6195 if (target)
6196 return target;
6197 break;
6198
6199 CASE_FLT_FN (BUILT_IN_POW):
6200 target = expand_builtin_pow (exp, target, subtarget);
6201 if (target)
6202 return target;
6203 break;
6204
6205 CASE_FLT_FN (BUILT_IN_POWI):
6206 target = expand_builtin_powi (exp, target, subtarget);
6207 if (target)
6208 return target;
6209 break;
6210
6211 CASE_FLT_FN (BUILT_IN_ATAN2):
6212 CASE_FLT_FN (BUILT_IN_LDEXP):
6213 CASE_FLT_FN (BUILT_IN_SCALB):
6214 CASE_FLT_FN (BUILT_IN_SCALBN):
6215 CASE_FLT_FN (BUILT_IN_SCALBLN):
6216 if (! flag_unsafe_math_optimizations)
6217 break;
6218
6219 CASE_FLT_FN (BUILT_IN_FMOD):
6220 CASE_FLT_FN (BUILT_IN_REMAINDER):
6221 CASE_FLT_FN (BUILT_IN_DREM):
6222 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6223 if (target)
6224 return target;
6225 break;
6226
6227 CASE_FLT_FN (BUILT_IN_CEXPI):
6228 target = expand_builtin_cexpi (exp, target, subtarget);
6229 gcc_assert (target);
6230 return target;
6231
6232 CASE_FLT_FN (BUILT_IN_SIN):
6233 CASE_FLT_FN (BUILT_IN_COS):
6234 if (! flag_unsafe_math_optimizations)
6235 break;
6236 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6237 if (target)
6238 return target;
6239 break;
6240
6241 CASE_FLT_FN (BUILT_IN_SINCOS):
6242 if (! flag_unsafe_math_optimizations)
6243 break;
6244 target = expand_builtin_sincos (exp);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_APPLY_ARGS:
6250 return expand_builtin_apply_args ();
6251
6252 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6253 FUNCTION with a copy of the parameters described by
6254 ARGUMENTS, and ARGSIZE. It returns a block of memory
6255 allocated on the stack into which is stored all the registers
6256 that might possibly be used for returning the result of a
6257 function. ARGUMENTS is the value returned by
6258 __builtin_apply_args. ARGSIZE is the number of bytes of
6259 arguments that must be copied. ??? How should this value be
6260 computed? We'll also need a safe worst case value for varargs
6261 functions. */
6262 case BUILT_IN_APPLY:
6263 if (!validate_arglist (exp, POINTER_TYPE,
6264 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6265 && !validate_arglist (exp, REFERENCE_TYPE,
6266 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6267 return const0_rtx;
6268 else
6269 {
6270 rtx ops[3];
6271
6272 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6273 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6274 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6275
6276 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6277 }
6278
6279 /* __builtin_return (RESULT) causes the function to return the
6280 value described by RESULT. RESULT is address of the block of
6281 memory returned by __builtin_apply. */
6282 case BUILT_IN_RETURN:
6283 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6284 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6285 return const0_rtx;
6286
6287 case BUILT_IN_SAVEREGS:
6288 return expand_builtin_saveregs ();
6289
6290 case BUILT_IN_ARGS_INFO:
6291 return expand_builtin_args_info (exp);
6292
6293 /* Return the address of the first anonymous stack arg. */
6294 case BUILT_IN_NEXT_ARG:
6295 if (fold_builtin_next_arg (exp, false))
6296 return const0_rtx;
6297 return expand_builtin_next_arg ();
6298
6299 case BUILT_IN_CLEAR_CACHE:
6300 target = expand_builtin___clear_cache (exp);
6301 if (target)
6302 return target;
6303 break;
6304
6305 case BUILT_IN_CLASSIFY_TYPE:
6306 return expand_builtin_classify_type (exp);
6307
6308 case BUILT_IN_CONSTANT_P:
6309 return const0_rtx;
6310
6311 case BUILT_IN_FRAME_ADDRESS:
6312 case BUILT_IN_RETURN_ADDRESS:
6313 return expand_builtin_frame_address (fndecl, exp);
6314
6315 /* Returns the address of the area where the structure is returned.
6316 0 otherwise. */
6317 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6318 if (call_expr_nargs (exp) != 0
6319 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6320 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6321 return const0_rtx;
6322 else
6323 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6324
6325 case BUILT_IN_ALLOCA:
6326 target = expand_builtin_alloca (exp, target);
6327 if (target)
6328 return target;
6329 break;
6330
6331 case BUILT_IN_STACK_SAVE:
6332 return expand_stack_save ();
6333
6334 case BUILT_IN_STACK_RESTORE:
6335 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6336 return const0_rtx;
6337
6338 case BUILT_IN_BSWAP32:
6339 case BUILT_IN_BSWAP64:
6340 target = expand_builtin_bswap (exp, target, subtarget);
6341
6342 if (target)
6343 return target;
6344 break;
6345
6346 CASE_INT_FN (BUILT_IN_FFS):
6347 case BUILT_IN_FFSIMAX:
6348 target = expand_builtin_unop (target_mode, exp, target,
6349 subtarget, ffs_optab);
6350 if (target)
6351 return target;
6352 break;
6353
6354 CASE_INT_FN (BUILT_IN_CLZ):
6355 case BUILT_IN_CLZIMAX:
6356 target = expand_builtin_unop (target_mode, exp, target,
6357 subtarget, clz_optab);
6358 if (target)
6359 return target;
6360 break;
6361
6362 CASE_INT_FN (BUILT_IN_CTZ):
6363 case BUILT_IN_CTZIMAX:
6364 target = expand_builtin_unop (target_mode, exp, target,
6365 subtarget, ctz_optab);
6366 if (target)
6367 return target;
6368 break;
6369
6370 CASE_INT_FN (BUILT_IN_POPCOUNT):
6371 case BUILT_IN_POPCOUNTIMAX:
6372 target = expand_builtin_unop (target_mode, exp, target,
6373 subtarget, popcount_optab);
6374 if (target)
6375 return target;
6376 break;
6377
6378 CASE_INT_FN (BUILT_IN_PARITY):
6379 case BUILT_IN_PARITYIMAX:
6380 target = expand_builtin_unop (target_mode, exp, target,
6381 subtarget, parity_optab);
6382 if (target)
6383 return target;
6384 break;
6385
6386 case BUILT_IN_STRLEN:
6387 target = expand_builtin_strlen (exp, target, target_mode);
6388 if (target)
6389 return target;
6390 break;
6391
6392 case BUILT_IN_STRCPY:
6393 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6394 if (target)
6395 return target;
6396 break;
6397
6398 case BUILT_IN_STRNCPY:
6399 target = expand_builtin_strncpy (exp, target, mode);
6400 if (target)
6401 return target;
6402 break;
6403
6404 case BUILT_IN_STPCPY:
6405 target = expand_builtin_stpcpy (exp, target, mode);
6406 if (target)
6407 return target;
6408 break;
6409
6410 case BUILT_IN_STRCAT:
6411 target = expand_builtin_strcat (fndecl, exp, target, mode);
6412 if (target)
6413 return target;
6414 break;
6415
6416 case BUILT_IN_STRNCAT:
6417 target = expand_builtin_strncat (exp, target, mode);
6418 if (target)
6419 return target;
6420 break;
6421
6422 case BUILT_IN_STRSPN:
6423 target = expand_builtin_strspn (exp, target, mode);
6424 if (target)
6425 return target;
6426 break;
6427
6428 case BUILT_IN_STRCSPN:
6429 target = expand_builtin_strcspn (exp, target, mode);
6430 if (target)
6431 return target;
6432 break;
6433
6434 case BUILT_IN_STRSTR:
6435 target = expand_builtin_strstr (exp, target, mode);
6436 if (target)
6437 return target;
6438 break;
6439
6440 case BUILT_IN_STRPBRK:
6441 target = expand_builtin_strpbrk (exp, target, mode);
6442 if (target)
6443 return target;
6444 break;
6445
6446 case BUILT_IN_INDEX:
6447 case BUILT_IN_STRCHR:
6448 target = expand_builtin_strchr (exp, target, mode);
6449 if (target)
6450 return target;
6451 break;
6452
6453 case BUILT_IN_RINDEX:
6454 case BUILT_IN_STRRCHR:
6455 target = expand_builtin_strrchr (exp, target, mode);
6456 if (target)
6457 return target;
6458 break;
6459
6460 case BUILT_IN_MEMCPY:
6461 target = expand_builtin_memcpy (exp, target, mode);
6462 if (target)
6463 return target;
6464 break;
6465
6466 case BUILT_IN_MEMPCPY:
6467 target = expand_builtin_mempcpy (exp, target, mode);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_MEMMOVE:
6473 target = expand_builtin_memmove (exp, target, mode, ignore);
6474 if (target)
6475 return target;
6476 break;
6477
6478 case BUILT_IN_BCOPY:
6479 target = expand_builtin_bcopy (exp, ignore);
6480 if (target)
6481 return target;
6482 break;
6483
6484 case BUILT_IN_MEMSET:
6485 target = expand_builtin_memset (exp, target, mode);
6486 if (target)
6487 return target;
6488 break;
6489
6490 case BUILT_IN_BZERO:
6491 target = expand_builtin_bzero (exp);
6492 if (target)
6493 return target;
6494 break;
6495
6496 case BUILT_IN_STRCMP:
6497 target = expand_builtin_strcmp (exp, target, mode);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_STRNCMP:
6503 target = expand_builtin_strncmp (exp, target, mode);
6504 if (target)
6505 return target;
6506 break;
6507
6508 case BUILT_IN_MEMCHR:
6509 target = expand_builtin_memchr (exp, target, mode);
6510 if (target)
6511 return target;
6512 break;
6513
6514 case BUILT_IN_BCMP:
6515 case BUILT_IN_MEMCMP:
6516 target = expand_builtin_memcmp (exp, target, mode);
6517 if (target)
6518 return target;
6519 break;
6520
6521 case BUILT_IN_SETJMP:
6522 /* This should have been lowered to the builtins below. */
6523 gcc_unreachable ();
6524
6525 case BUILT_IN_SETJMP_SETUP:
6526 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6527 and the receiver label. */
6528 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6529 {
6530 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6531 VOIDmode, EXPAND_NORMAL);
6532 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6533 rtx label_r = label_rtx (label);
6534
6535 /* This is copied from the handling of non-local gotos. */
6536 expand_builtin_setjmp_setup (buf_addr, label_r);
6537 nonlocal_goto_handler_labels
6538 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6539 nonlocal_goto_handler_labels);
6540 /* ??? Do not let expand_label treat us as such since we would
6541 not want to be both on the list of non-local labels and on
6542 the list of forced labels. */
6543 FORCED_LABEL (label) = 0;
6544 return const0_rtx;
6545 }
6546 break;
6547
6548 case BUILT_IN_SETJMP_DISPATCHER:
6549 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6550 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6551 {
6552 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6553 rtx label_r = label_rtx (label);
6554
6555 /* Remove the dispatcher label from the list of non-local labels
6556 since the receiver labels have been added to it above. */
6557 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6558 return const0_rtx;
6559 }
6560 break;
6561
6562 case BUILT_IN_SETJMP_RECEIVER:
6563 /* __builtin_setjmp_receiver is passed the receiver label. */
6564 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6565 {
6566 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6567 rtx label_r = label_rtx (label);
6568
6569 expand_builtin_setjmp_receiver (label_r);
6570 return const0_rtx;
6571 }
6572 break;
6573
6574 /* __builtin_longjmp is passed a pointer to an array of five words.
6575 It's similar to the C library longjmp function but works with
6576 __builtin_setjmp above. */
6577 case BUILT_IN_LONGJMP:
6578 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6579 {
6580 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6581 VOIDmode, EXPAND_NORMAL);
6582 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6583
6584 if (value != const1_rtx)
6585 {
6586 error ("%<__builtin_longjmp%> second argument must be 1");
6587 return const0_rtx;
6588 }
6589
6590 expand_builtin_longjmp (buf_addr, value);
6591 return const0_rtx;
6592 }
6593 break;
6594
6595 case BUILT_IN_NONLOCAL_GOTO:
6596 target = expand_builtin_nonlocal_goto (exp);
6597 if (target)
6598 return target;
6599 break;
6600
6601 /* This updates the setjmp buffer that is its argument with the value
6602 of the current stack pointer. */
6603 case BUILT_IN_UPDATE_SETJMP_BUF:
6604 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6605 {
6606 rtx buf_addr
6607 = expand_normal (CALL_EXPR_ARG (exp, 0));
6608
6609 expand_builtin_update_setjmp_buf (buf_addr);
6610 return const0_rtx;
6611 }
6612 break;
6613
6614 case BUILT_IN_TRAP:
6615 expand_builtin_trap ();
6616 return const0_rtx;
6617
6618 case BUILT_IN_PRINTF:
6619 target = expand_builtin_printf (exp, target, mode, false);
6620 if (target)
6621 return target;
6622 break;
6623
6624 case BUILT_IN_PRINTF_UNLOCKED:
6625 target = expand_builtin_printf (exp, target, mode, true);
6626 if (target)
6627 return target;
6628 break;
6629
6630 case BUILT_IN_FPUTS:
6631 target = expand_builtin_fputs (exp, target, false);
6632 if (target)
6633 return target;
6634 break;
6635 case BUILT_IN_FPUTS_UNLOCKED:
6636 target = expand_builtin_fputs (exp, target, true);
6637 if (target)
6638 return target;
6639 break;
6640
6641 case BUILT_IN_FPRINTF:
6642 target = expand_builtin_fprintf (exp, target, mode, false);
6643 if (target)
6644 return target;
6645 break;
6646
6647 case BUILT_IN_FPRINTF_UNLOCKED:
6648 target = expand_builtin_fprintf (exp, target, mode, true);
6649 if (target)
6650 return target;
6651 break;
6652
6653 case BUILT_IN_SPRINTF:
6654 target = expand_builtin_sprintf (exp, target, mode);
6655 if (target)
6656 return target;
6657 break;
6658
6659 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6660 case BUILT_IN_SIGNBITD32:
6661 case BUILT_IN_SIGNBITD64:
6662 case BUILT_IN_SIGNBITD128:
6663 target = expand_builtin_signbit (exp, target);
6664 if (target)
6665 return target;
6666 break;
6667
6668 /* Various hooks for the DWARF 2 __throw routine. */
6669 case BUILT_IN_UNWIND_INIT:
6670 expand_builtin_unwind_init ();
6671 return const0_rtx;
6672 case BUILT_IN_DWARF_CFA:
6673 return virtual_cfa_rtx;
6674 #ifdef DWARF2_UNWIND_INFO
6675 case BUILT_IN_DWARF_SP_COLUMN:
6676 return expand_builtin_dwarf_sp_column ();
6677 case BUILT_IN_INIT_DWARF_REG_SIZES:
6678 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6679 return const0_rtx;
6680 #endif
6681 case BUILT_IN_FROB_RETURN_ADDR:
6682 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6683 case BUILT_IN_EXTRACT_RETURN_ADDR:
6684 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6685 case BUILT_IN_EH_RETURN:
6686 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6687 CALL_EXPR_ARG (exp, 1));
6688 return const0_rtx;
6689 #ifdef EH_RETURN_DATA_REGNO
6690 case BUILT_IN_EH_RETURN_DATA_REGNO:
6691 return expand_builtin_eh_return_data_regno (exp);
6692 #endif
6693 case BUILT_IN_EXTEND_POINTER:
6694 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6695
6696 case BUILT_IN_VA_START:
6697 case BUILT_IN_STDARG_START:
6698 return expand_builtin_va_start (exp);
6699 case BUILT_IN_VA_END:
6700 return expand_builtin_va_end (exp);
6701 case BUILT_IN_VA_COPY:
6702 return expand_builtin_va_copy (exp);
6703 case BUILT_IN_EXPECT:
6704 return expand_builtin_expect (exp, target);
6705 case BUILT_IN_PREFETCH:
6706 expand_builtin_prefetch (exp);
6707 return const0_rtx;
6708
6709 case BUILT_IN_PROFILE_FUNC_ENTER:
6710 return expand_builtin_profile_func (false);
6711 case BUILT_IN_PROFILE_FUNC_EXIT:
6712 return expand_builtin_profile_func (true);
6713
6714 case BUILT_IN_INIT_TRAMPOLINE:
6715 return expand_builtin_init_trampoline (exp);
6716 case BUILT_IN_ADJUST_TRAMPOLINE:
6717 return expand_builtin_adjust_trampoline (exp);
6718
6719 case BUILT_IN_FORK:
6720 case BUILT_IN_EXECL:
6721 case BUILT_IN_EXECV:
6722 case BUILT_IN_EXECLP:
6723 case BUILT_IN_EXECLE:
6724 case BUILT_IN_EXECVP:
6725 case BUILT_IN_EXECVE:
6726 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6727 if (target)
6728 return target;
6729 break;
6730
6731 case BUILT_IN_FETCH_AND_ADD_1:
6732 case BUILT_IN_FETCH_AND_ADD_2:
6733 case BUILT_IN_FETCH_AND_ADD_4:
6734 case BUILT_IN_FETCH_AND_ADD_8:
6735 case BUILT_IN_FETCH_AND_ADD_16:
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6737 target = expand_builtin_sync_operation (mode, exp, PLUS,
6738 false, target, ignore);
6739 if (target)
6740 return target;
6741 break;
6742
6743 case BUILT_IN_FETCH_AND_SUB_1:
6744 case BUILT_IN_FETCH_AND_SUB_2:
6745 case BUILT_IN_FETCH_AND_SUB_4:
6746 case BUILT_IN_FETCH_AND_SUB_8:
6747 case BUILT_IN_FETCH_AND_SUB_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6749 target = expand_builtin_sync_operation (mode, exp, MINUS,
6750 false, target, ignore);
6751 if (target)
6752 return target;
6753 break;
6754
6755 case BUILT_IN_FETCH_AND_OR_1:
6756 case BUILT_IN_FETCH_AND_OR_2:
6757 case BUILT_IN_FETCH_AND_OR_4:
6758 case BUILT_IN_FETCH_AND_OR_8:
6759 case BUILT_IN_FETCH_AND_OR_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6761 target = expand_builtin_sync_operation (mode, exp, IOR,
6762 false, target, ignore);
6763 if (target)
6764 return target;
6765 break;
6766
6767 case BUILT_IN_FETCH_AND_AND_1:
6768 case BUILT_IN_FETCH_AND_AND_2:
6769 case BUILT_IN_FETCH_AND_AND_4:
6770 case BUILT_IN_FETCH_AND_AND_8:
6771 case BUILT_IN_FETCH_AND_AND_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6773 target = expand_builtin_sync_operation (mode, exp, AND,
6774 false, target, ignore);
6775 if (target)
6776 return target;
6777 break;
6778
6779 case BUILT_IN_FETCH_AND_XOR_1:
6780 case BUILT_IN_FETCH_AND_XOR_2:
6781 case BUILT_IN_FETCH_AND_XOR_4:
6782 case BUILT_IN_FETCH_AND_XOR_8:
6783 case BUILT_IN_FETCH_AND_XOR_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6785 target = expand_builtin_sync_operation (mode, exp, XOR,
6786 false, target, ignore);
6787 if (target)
6788 return target;
6789 break;
6790
6791 case BUILT_IN_FETCH_AND_NAND_1:
6792 case BUILT_IN_FETCH_AND_NAND_2:
6793 case BUILT_IN_FETCH_AND_NAND_4:
6794 case BUILT_IN_FETCH_AND_NAND_8:
6795 case BUILT_IN_FETCH_AND_NAND_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6797 target = expand_builtin_sync_operation (mode, exp, NOT,
6798 false, target, ignore);
6799 if (target)
6800 return target;
6801 break;
6802
6803 case BUILT_IN_ADD_AND_FETCH_1:
6804 case BUILT_IN_ADD_AND_FETCH_2:
6805 case BUILT_IN_ADD_AND_FETCH_4:
6806 case BUILT_IN_ADD_AND_FETCH_8:
6807 case BUILT_IN_ADD_AND_FETCH_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6809 target = expand_builtin_sync_operation (mode, exp, PLUS,
6810 true, target, ignore);
6811 if (target)
6812 return target;
6813 break;
6814
6815 case BUILT_IN_SUB_AND_FETCH_1:
6816 case BUILT_IN_SUB_AND_FETCH_2:
6817 case BUILT_IN_SUB_AND_FETCH_4:
6818 case BUILT_IN_SUB_AND_FETCH_8:
6819 case BUILT_IN_SUB_AND_FETCH_16:
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6821 target = expand_builtin_sync_operation (mode, exp, MINUS,
6822 true, target, ignore);
6823 if (target)
6824 return target;
6825 break;
6826
6827 case BUILT_IN_OR_AND_FETCH_1:
6828 case BUILT_IN_OR_AND_FETCH_2:
6829 case BUILT_IN_OR_AND_FETCH_4:
6830 case BUILT_IN_OR_AND_FETCH_8:
6831 case BUILT_IN_OR_AND_FETCH_16:
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6833 target = expand_builtin_sync_operation (mode, exp, IOR,
6834 true, target, ignore);
6835 if (target)
6836 return target;
6837 break;
6838
6839 case BUILT_IN_AND_AND_FETCH_1:
6840 case BUILT_IN_AND_AND_FETCH_2:
6841 case BUILT_IN_AND_AND_FETCH_4:
6842 case BUILT_IN_AND_AND_FETCH_8:
6843 case BUILT_IN_AND_AND_FETCH_16:
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6845 target = expand_builtin_sync_operation (mode, exp, AND,
6846 true, target, ignore);
6847 if (target)
6848 return target;
6849 break;
6850
6851 case BUILT_IN_XOR_AND_FETCH_1:
6852 case BUILT_IN_XOR_AND_FETCH_2:
6853 case BUILT_IN_XOR_AND_FETCH_4:
6854 case BUILT_IN_XOR_AND_FETCH_8:
6855 case BUILT_IN_XOR_AND_FETCH_16:
6856 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6857 target = expand_builtin_sync_operation (mode, exp, XOR,
6858 true, target, ignore);
6859 if (target)
6860 return target;
6861 break;
6862
6863 case BUILT_IN_NAND_AND_FETCH_1:
6864 case BUILT_IN_NAND_AND_FETCH_2:
6865 case BUILT_IN_NAND_AND_FETCH_4:
6866 case BUILT_IN_NAND_AND_FETCH_8:
6867 case BUILT_IN_NAND_AND_FETCH_16:
6868 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6869 target = expand_builtin_sync_operation (mode, exp, NOT,
6870 true, target, ignore);
6871 if (target)
6872 return target;
6873 break;
6874
6875 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6876 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6877 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6878 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6879 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6880 if (mode == VOIDmode)
6881 mode = TYPE_MODE (boolean_type_node);
6882 if (!target || !register_operand (target, mode))
6883 target = gen_reg_rtx (mode);
6884
6885 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6886 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6887 if (target)
6888 return target;
6889 break;
6890
6891 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6892 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6893 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6894 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6895 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6896 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6897 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6898 if (target)
6899 return target;
6900 break;
6901
6902 case BUILT_IN_LOCK_TEST_AND_SET_1:
6903 case BUILT_IN_LOCK_TEST_AND_SET_2:
6904 case BUILT_IN_LOCK_TEST_AND_SET_4:
6905 case BUILT_IN_LOCK_TEST_AND_SET_8:
6906 case BUILT_IN_LOCK_TEST_AND_SET_16:
6907 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6908 target = expand_builtin_lock_test_and_set (mode, exp, target);
6909 if (target)
6910 return target;
6911 break;
6912
6913 case BUILT_IN_LOCK_RELEASE_1:
6914 case BUILT_IN_LOCK_RELEASE_2:
6915 case BUILT_IN_LOCK_RELEASE_4:
6916 case BUILT_IN_LOCK_RELEASE_8:
6917 case BUILT_IN_LOCK_RELEASE_16:
6918 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6919 expand_builtin_lock_release (mode, exp);
6920 return const0_rtx;
6921
6922 case BUILT_IN_SYNCHRONIZE:
6923 expand_builtin_synchronize ();
6924 return const0_rtx;
6925
6926 case BUILT_IN_OBJECT_SIZE:
6927 return expand_builtin_object_size (exp);
6928
6929 case BUILT_IN_MEMCPY_CHK:
6930 case BUILT_IN_MEMPCPY_CHK:
6931 case BUILT_IN_MEMMOVE_CHK:
6932 case BUILT_IN_MEMSET_CHK:
6933 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6934 if (target)
6935 return target;
6936 break;
6937
6938 case BUILT_IN_STRCPY_CHK:
6939 case BUILT_IN_STPCPY_CHK:
6940 case BUILT_IN_STRNCPY_CHK:
6941 case BUILT_IN_STRCAT_CHK:
6942 case BUILT_IN_STRNCAT_CHK:
6943 case BUILT_IN_SNPRINTF_CHK:
6944 case BUILT_IN_VSNPRINTF_CHK:
6945 maybe_emit_chk_warning (exp, fcode);
6946 break;
6947
6948 case BUILT_IN_SPRINTF_CHK:
6949 case BUILT_IN_VSPRINTF_CHK:
6950 maybe_emit_sprintf_chk_warning (exp, fcode);
6951 break;
6952
6953 default: /* just do library call, if unknown builtin */
6954 break;
6955 }
6956
6957 /* The switch statement above can drop through to cause the function
6958 to be called normally. */
6959 return expand_call (exp, target, ignore);
6960 }
6961
6962 /* Determine whether a tree node represents a call to a built-in
6963 function. If the tree T is a call to a built-in function with
6964 the right number of arguments of the appropriate types, return
6965 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6966 Otherwise the return value is END_BUILTINS. */
6967
6968 enum built_in_function
6969 builtin_mathfn_code (tree t)
6970 {
6971 tree fndecl, arg, parmlist;
6972 tree argtype, parmtype;
6973 call_expr_arg_iterator iter;
6974
6975 if (TREE_CODE (t) != CALL_EXPR
6976 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6977 return END_BUILTINS;
6978
6979 fndecl = get_callee_fndecl (t);
6980 if (fndecl == NULL_TREE
6981 || TREE_CODE (fndecl) != FUNCTION_DECL
6982 || ! DECL_BUILT_IN (fndecl)
6983 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6984 return END_BUILTINS;
6985
6986 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6987 init_call_expr_arg_iterator (t, &iter);
6988 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6989 {
6990 /* If a function doesn't take a variable number of arguments,
6991 the last element in the list will have type `void'. */
6992 parmtype = TREE_VALUE (parmlist);
6993 if (VOID_TYPE_P (parmtype))
6994 {
6995 if (more_call_expr_args_p (&iter))
6996 return END_BUILTINS;
6997 return DECL_FUNCTION_CODE (fndecl);
6998 }
6999
7000 if (! more_call_expr_args_p (&iter))
7001 return END_BUILTINS;
7002
7003 arg = next_call_expr_arg (&iter);
7004 argtype = TREE_TYPE (arg);
7005
7006 if (SCALAR_FLOAT_TYPE_P (parmtype))
7007 {
7008 if (! SCALAR_FLOAT_TYPE_P (argtype))
7009 return END_BUILTINS;
7010 }
7011 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7012 {
7013 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7014 return END_BUILTINS;
7015 }
7016 else if (POINTER_TYPE_P (parmtype))
7017 {
7018 if (! POINTER_TYPE_P (argtype))
7019 return END_BUILTINS;
7020 }
7021 else if (INTEGRAL_TYPE_P (parmtype))
7022 {
7023 if (! INTEGRAL_TYPE_P (argtype))
7024 return END_BUILTINS;
7025 }
7026 else
7027 return END_BUILTINS;
7028 }
7029
7030 /* Variable-length argument list. */
7031 return DECL_FUNCTION_CODE (fndecl);
7032 }
7033
7034 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7035 evaluate to a constant. */
7036
7037 static tree
7038 fold_builtin_constant_p (tree arg)
7039 {
7040 /* We return 1 for a numeric type that's known to be a constant
7041 value at compile-time or for an aggregate type that's a
7042 literal constant. */
7043 STRIP_NOPS (arg);
7044
7045 /* If we know this is a constant, emit the constant of one. */
7046 if (CONSTANT_CLASS_P (arg)
7047 || (TREE_CODE (arg) == CONSTRUCTOR
7048 && TREE_CONSTANT (arg)))
7049 return integer_one_node;
7050 if (TREE_CODE (arg) == ADDR_EXPR)
7051 {
7052 tree op = TREE_OPERAND (arg, 0);
7053 if (TREE_CODE (op) == STRING_CST
7054 || (TREE_CODE (op) == ARRAY_REF
7055 && integer_zerop (TREE_OPERAND (op, 1))
7056 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7057 return integer_one_node;
7058 }
7059
7060 /* If this expression has side effects, show we don't know it to be a
7061 constant. Likewise if it's a pointer or aggregate type since in
7062 those case we only want literals, since those are only optimized
7063 when generating RTL, not later.
7064 And finally, if we are compiling an initializer, not code, we
7065 need to return a definite result now; there's not going to be any
7066 more optimization done. */
7067 if (TREE_SIDE_EFFECTS (arg)
7068 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7069 || POINTER_TYPE_P (TREE_TYPE (arg))
7070 || cfun == 0
7071 || folding_initializer)
7072 return integer_zero_node;
7073
7074 return NULL_TREE;
7075 }
7076
7077 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7078 comparison against the argument will fold to a constant. In practice,
7079 this means a true constant or the address of a non-weak symbol. */
7080
7081 static tree
7082 fold_builtin_expect (tree arg)
7083 {
7084 tree inner;
7085
7086 /* If the argument isn't invariant, then there's nothing we can do. */
7087 if (!TREE_INVARIANT (arg))
7088 return NULL_TREE;
7089
7090 /* If we're looking at an address of a weak decl, then do not fold. */
7091 inner = arg;
7092 STRIP_NOPS (inner);
7093 if (TREE_CODE (inner) == ADDR_EXPR)
7094 {
7095 do
7096 {
7097 inner = TREE_OPERAND (inner, 0);
7098 }
7099 while (TREE_CODE (inner) == COMPONENT_REF
7100 || TREE_CODE (inner) == ARRAY_REF);
7101 if (DECL_P (inner) && DECL_WEAK (inner))
7102 return NULL_TREE;
7103 }
7104
7105 /* Otherwise, ARG already has the proper type for the return value. */
7106 return arg;
7107 }
7108
7109 /* Fold a call to __builtin_classify_type with argument ARG. */
7110
7111 static tree
7112 fold_builtin_classify_type (tree arg)
7113 {
7114 if (arg == 0)
7115 return build_int_cst (NULL_TREE, no_type_class);
7116
7117 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7118 }
7119
7120 /* Fold a call to __builtin_strlen with argument ARG. */
7121
7122 static tree
7123 fold_builtin_strlen (tree arg)
7124 {
7125 if (!validate_arg (arg, POINTER_TYPE))
7126 return NULL_TREE;
7127 else
7128 {
7129 tree len = c_strlen (arg, 0);
7130
7131 if (len)
7132 {
7133 /* Convert from the internal "sizetype" type to "size_t". */
7134 if (size_type_node)
7135 len = fold_convert (size_type_node, len);
7136 return len;
7137 }
7138
7139 return NULL_TREE;
7140 }
7141 }
7142
7143 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7144
7145 static tree
7146 fold_builtin_inf (tree type, int warn)
7147 {
7148 REAL_VALUE_TYPE real;
7149
7150 /* __builtin_inff is intended to be usable to define INFINITY on all
7151 targets. If an infinity is not available, INFINITY expands "to a
7152 positive constant of type float that overflows at translation
7153 time", footnote "In this case, using INFINITY will violate the
7154 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7155 Thus we pedwarn to ensure this constraint violation is
7156 diagnosed. */
7157 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7158 pedwarn ("target format does not support infinity");
7159
7160 real_inf (&real);
7161 return build_real (type, real);
7162 }
7163
7164 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7165
7166 static tree
7167 fold_builtin_nan (tree arg, tree type, int quiet)
7168 {
7169 REAL_VALUE_TYPE real;
7170 const char *str;
7171
7172 if (!validate_arg (arg, POINTER_TYPE))
7173 return NULL_TREE;
7174 str = c_getstr (arg);
7175 if (!str)
7176 return NULL_TREE;
7177
7178 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7179 return NULL_TREE;
7180
7181 return build_real (type, real);
7182 }
7183
7184 /* Return true if the floating point expression T has an integer value.
7185 We also allow +Inf, -Inf and NaN to be considered integer values. */
7186
7187 static bool
7188 integer_valued_real_p (tree t)
7189 {
7190 switch (TREE_CODE (t))
7191 {
7192 case FLOAT_EXPR:
7193 return true;
7194
7195 case ABS_EXPR:
7196 case SAVE_EXPR:
7197 case NON_LVALUE_EXPR:
7198 return integer_valued_real_p (TREE_OPERAND (t, 0));
7199
7200 case COMPOUND_EXPR:
7201 case MODIFY_EXPR:
7202 case BIND_EXPR:
7203 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7204
7205 case PLUS_EXPR:
7206 case MINUS_EXPR:
7207 case MULT_EXPR:
7208 case MIN_EXPR:
7209 case MAX_EXPR:
7210 return integer_valued_real_p (TREE_OPERAND (t, 0))
7211 && integer_valued_real_p (TREE_OPERAND (t, 1));
7212
7213 case COND_EXPR:
7214 return integer_valued_real_p (TREE_OPERAND (t, 1))
7215 && integer_valued_real_p (TREE_OPERAND (t, 2));
7216
7217 case REAL_CST:
7218 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7219
7220 case NOP_EXPR:
7221 {
7222 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7223 if (TREE_CODE (type) == INTEGER_TYPE)
7224 return true;
7225 if (TREE_CODE (type) == REAL_TYPE)
7226 return integer_valued_real_p (TREE_OPERAND (t, 0));
7227 break;
7228 }
7229
7230 case CALL_EXPR:
7231 switch (builtin_mathfn_code (t))
7232 {
7233 CASE_FLT_FN (BUILT_IN_CEIL):
7234 CASE_FLT_FN (BUILT_IN_FLOOR):
7235 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7236 CASE_FLT_FN (BUILT_IN_RINT):
7237 CASE_FLT_FN (BUILT_IN_ROUND):
7238 CASE_FLT_FN (BUILT_IN_TRUNC):
7239 return true;
7240
7241 CASE_FLT_FN (BUILT_IN_FMIN):
7242 CASE_FLT_FN (BUILT_IN_FMAX):
7243 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7244 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7245
7246 default:
7247 break;
7248 }
7249 break;
7250
7251 default:
7252 break;
7253 }
7254 return false;
7255 }
7256
7257 /* FNDECL is assumed to be a builtin where truncation can be propagated
7258 across (for instance floor((double)f) == (double)floorf (f).
7259 Do the transformation for a call with argument ARG. */
7260
7261 static tree
7262 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7263 {
7264 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7265
7266 if (!validate_arg (arg, REAL_TYPE))
7267 return NULL_TREE;
7268
7269 /* Integer rounding functions are idempotent. */
7270 if (fcode == builtin_mathfn_code (arg))
7271 return arg;
7272
7273 /* If argument is already integer valued, and we don't need to worry
7274 about setting errno, there's no need to perform rounding. */
7275 if (! flag_errno_math && integer_valued_real_p (arg))
7276 return arg;
7277
7278 if (optimize)
7279 {
7280 tree arg0 = strip_float_extensions (arg);
7281 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7282 tree newtype = TREE_TYPE (arg0);
7283 tree decl;
7284
7285 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7286 && (decl = mathfn_built_in (newtype, fcode)))
7287 return fold_convert (ftype,
7288 build_call_expr (decl, 1,
7289 fold_convert (newtype, arg0)));
7290 }
7291 return NULL_TREE;
7292 }
7293
7294 /* FNDECL is assumed to be builtin which can narrow the FP type of
7295 the argument, for instance lround((double)f) -> lroundf (f).
7296 Do the transformation for a call with argument ARG. */
7297
7298 static tree
7299 fold_fixed_mathfn (tree fndecl, tree arg)
7300 {
7301 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7302
7303 if (!validate_arg (arg, REAL_TYPE))
7304 return NULL_TREE;
7305
7306 /* If argument is already integer valued, and we don't need to worry
7307 about setting errno, there's no need to perform rounding. */
7308 if (! flag_errno_math && integer_valued_real_p (arg))
7309 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7310
7311 if (optimize)
7312 {
7313 tree ftype = TREE_TYPE (arg);
7314 tree arg0 = strip_float_extensions (arg);
7315 tree newtype = TREE_TYPE (arg0);
7316 tree decl;
7317
7318 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7319 && (decl = mathfn_built_in (newtype, fcode)))
7320 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7321 }
7322
7323 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7324 sizeof (long long) == sizeof (long). */
7325 if (TYPE_PRECISION (long_long_integer_type_node)
7326 == TYPE_PRECISION (long_integer_type_node))
7327 {
7328 tree newfn = NULL_TREE;
7329 switch (fcode)
7330 {
7331 CASE_FLT_FN (BUILT_IN_LLCEIL):
7332 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7333 break;
7334
7335 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7336 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7337 break;
7338
7339 CASE_FLT_FN (BUILT_IN_LLROUND):
7340 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7341 break;
7342
7343 CASE_FLT_FN (BUILT_IN_LLRINT):
7344 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7345 break;
7346
7347 default:
7348 break;
7349 }
7350
7351 if (newfn)
7352 {
7353 tree newcall = build_call_expr(newfn, 1, arg);
7354 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7355 }
7356 }
7357
7358 return NULL_TREE;
7359 }
7360
7361 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7362 return type. Return NULL_TREE if no simplification can be made. */
7363
7364 static tree
7365 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7366 {
7367 tree res;
7368
7369 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7370 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7371 return NULL_TREE;
7372
7373 /* Calculate the result when the argument is a constant. */
7374 if (TREE_CODE (arg) == COMPLEX_CST
7375 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7376 type, mpfr_hypot)))
7377 return res;
7378
7379 if (TREE_CODE (arg) == COMPLEX_EXPR)
7380 {
7381 tree real = TREE_OPERAND (arg, 0);
7382 tree imag = TREE_OPERAND (arg, 1);
7383
7384 /* If either part is zero, cabs is fabs of the other. */
7385 if (real_zerop (real))
7386 return fold_build1 (ABS_EXPR, type, imag);
7387 if (real_zerop (imag))
7388 return fold_build1 (ABS_EXPR, type, real);
7389
7390 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7391 if (flag_unsafe_math_optimizations
7392 && operand_equal_p (real, imag, OEP_PURE_SAME))
7393 {
7394 const REAL_VALUE_TYPE sqrt2_trunc
7395 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7396 STRIP_NOPS (real);
7397 return fold_build2 (MULT_EXPR, type,
7398 fold_build1 (ABS_EXPR, type, real),
7399 build_real (type, sqrt2_trunc));
7400 }
7401 }
7402
7403 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7404 if (TREE_CODE (arg) == NEGATE_EXPR
7405 || TREE_CODE (arg) == CONJ_EXPR)
7406 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7407
7408 /* Don't do this when optimizing for size. */
7409 if (flag_unsafe_math_optimizations
7410 && optimize && !optimize_size)
7411 {
7412 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7413
7414 if (sqrtfn != NULL_TREE)
7415 {
7416 tree rpart, ipart, result;
7417
7418 arg = builtin_save_expr (arg);
7419
7420 rpart = fold_build1 (REALPART_EXPR, type, arg);
7421 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7422
7423 rpart = builtin_save_expr (rpart);
7424 ipart = builtin_save_expr (ipart);
7425
7426 result = fold_build2 (PLUS_EXPR, type,
7427 fold_build2 (MULT_EXPR, type,
7428 rpart, rpart),
7429 fold_build2 (MULT_EXPR, type,
7430 ipart, ipart));
7431
7432 return build_call_expr (sqrtfn, 1, result);
7433 }
7434 }
7435
7436 return NULL_TREE;
7437 }
7438
7439 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7440 Return NULL_TREE if no simplification can be made. */
7441
7442 static tree
7443 fold_builtin_sqrt (tree arg, tree type)
7444 {
7445
7446 enum built_in_function fcode;
7447 tree res;
7448
7449 if (!validate_arg (arg, REAL_TYPE))
7450 return NULL_TREE;
7451
7452 /* Calculate the result when the argument is a constant. */
7453 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7454 return res;
7455
7456 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7457 fcode = builtin_mathfn_code (arg);
7458 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7459 {
7460 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7461 arg = fold_build2 (MULT_EXPR, type,
7462 CALL_EXPR_ARG (arg, 0),
7463 build_real (type, dconsthalf));
7464 return build_call_expr (expfn, 1, arg);
7465 }
7466
7467 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7468 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7469 {
7470 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7471
7472 if (powfn)
7473 {
7474 tree arg0 = CALL_EXPR_ARG (arg, 0);
7475 tree tree_root;
7476 /* The inner root was either sqrt or cbrt. */
7477 REAL_VALUE_TYPE dconstroot =
7478 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7479
7480 /* Adjust for the outer root. */
7481 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7482 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7483 tree_root = build_real (type, dconstroot);
7484 return build_call_expr (powfn, 2, arg0, tree_root);
7485 }
7486 }
7487
7488 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7489 if (flag_unsafe_math_optimizations
7490 && (fcode == BUILT_IN_POW
7491 || fcode == BUILT_IN_POWF
7492 || fcode == BUILT_IN_POWL))
7493 {
7494 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7495 tree arg0 = CALL_EXPR_ARG (arg, 0);
7496 tree arg1 = CALL_EXPR_ARG (arg, 1);
7497 tree narg1;
7498 if (!tree_expr_nonnegative_p (arg0))
7499 arg0 = build1 (ABS_EXPR, type, arg0);
7500 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7501 build_real (type, dconsthalf));
7502 return build_call_expr (powfn, 2, arg0, narg1);
7503 }
7504
7505 return NULL_TREE;
7506 }
7507
7508 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7509 Return NULL_TREE if no simplification can be made. */
7510
7511 static tree
7512 fold_builtin_cbrt (tree arg, tree type)
7513 {
7514 const enum built_in_function fcode = builtin_mathfn_code (arg);
7515 tree res;
7516
7517 if (!validate_arg (arg, REAL_TYPE))
7518 return NULL_TREE;
7519
7520 /* Calculate the result when the argument is a constant. */
7521 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7522 return res;
7523
7524 if (flag_unsafe_math_optimizations)
7525 {
7526 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7527 if (BUILTIN_EXPONENT_P (fcode))
7528 {
7529 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7530 const REAL_VALUE_TYPE third_trunc =
7531 real_value_truncate (TYPE_MODE (type), dconstthird);
7532 arg = fold_build2 (MULT_EXPR, type,
7533 CALL_EXPR_ARG (arg, 0),
7534 build_real (type, third_trunc));
7535 return build_call_expr (expfn, 1, arg);
7536 }
7537
7538 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7539 if (BUILTIN_SQRT_P (fcode))
7540 {
7541 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7542
7543 if (powfn)
7544 {
7545 tree arg0 = CALL_EXPR_ARG (arg, 0);
7546 tree tree_root;
7547 REAL_VALUE_TYPE dconstroot = dconstthird;
7548
7549 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7550 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7551 tree_root = build_real (type, dconstroot);
7552 return build_call_expr (powfn, 2, arg0, tree_root);
7553 }
7554 }
7555
7556 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7557 if (BUILTIN_CBRT_P (fcode))
7558 {
7559 tree arg0 = CALL_EXPR_ARG (arg, 0);
7560 if (tree_expr_nonnegative_p (arg0))
7561 {
7562 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7563
7564 if (powfn)
7565 {
7566 tree tree_root;
7567 REAL_VALUE_TYPE dconstroot;
7568
7569 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7570 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7571 tree_root = build_real (type, dconstroot);
7572 return build_call_expr (powfn, 2, arg0, tree_root);
7573 }
7574 }
7575 }
7576
7577 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7578 if (fcode == BUILT_IN_POW
7579 || fcode == BUILT_IN_POWF
7580 || fcode == BUILT_IN_POWL)
7581 {
7582 tree arg00 = CALL_EXPR_ARG (arg, 0);
7583 tree arg01 = CALL_EXPR_ARG (arg, 1);
7584 if (tree_expr_nonnegative_p (arg00))
7585 {
7586 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7587 const REAL_VALUE_TYPE dconstroot
7588 = real_value_truncate (TYPE_MODE (type), dconstthird);
7589 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7590 build_real (type, dconstroot));
7591 return build_call_expr (powfn, 2, arg00, narg01);
7592 }
7593 }
7594 }
7595 return NULL_TREE;
7596 }
7597
7598 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7599 TYPE is the type of the return value. Return NULL_TREE if no
7600 simplification can be made. */
7601
7602 static tree
7603 fold_builtin_cos (tree arg, tree type, tree fndecl)
7604 {
7605 tree res, narg;
7606
7607 if (!validate_arg (arg, REAL_TYPE))
7608 return NULL_TREE;
7609
7610 /* Calculate the result when the argument is a constant. */
7611 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7612 return res;
7613
7614 /* Optimize cos(-x) into cos (x). */
7615 if ((narg = fold_strip_sign_ops (arg)))
7616 return build_call_expr (fndecl, 1, narg);
7617
7618 return NULL_TREE;
7619 }
7620
7621 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7622 Return NULL_TREE if no simplification can be made. */
7623
7624 static tree
7625 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7626 {
7627 if (validate_arg (arg, REAL_TYPE))
7628 {
7629 tree res, narg;
7630
7631 /* Calculate the result when the argument is a constant. */
7632 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7633 return res;
7634
7635 /* Optimize cosh(-x) into cosh (x). */
7636 if ((narg = fold_strip_sign_ops (arg)))
7637 return build_call_expr (fndecl, 1, narg);
7638 }
7639
7640 return NULL_TREE;
7641 }
7642
7643 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7644 Return NULL_TREE if no simplification can be made. */
7645
7646 static tree
7647 fold_builtin_tan (tree arg, tree type)
7648 {
7649 enum built_in_function fcode;
7650 tree res;
7651
7652 if (!validate_arg (arg, REAL_TYPE))
7653 return NULL_TREE;
7654
7655 /* Calculate the result when the argument is a constant. */
7656 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7657 return res;
7658
7659 /* Optimize tan(atan(x)) = x. */
7660 fcode = builtin_mathfn_code (arg);
7661 if (flag_unsafe_math_optimizations
7662 && (fcode == BUILT_IN_ATAN
7663 || fcode == BUILT_IN_ATANF
7664 || fcode == BUILT_IN_ATANL))
7665 return CALL_EXPR_ARG (arg, 0);
7666
7667 return NULL_TREE;
7668 }
7669
7670 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7671 NULL_TREE if no simplification can be made. */
7672
7673 static tree
7674 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7675 {
7676 tree type;
7677 tree res, fn, call;
7678
7679 if (!validate_arg (arg0, REAL_TYPE)
7680 || !validate_arg (arg1, POINTER_TYPE)
7681 || !validate_arg (arg2, POINTER_TYPE))
7682 return NULL_TREE;
7683
7684 type = TREE_TYPE (arg0);
7685
7686 /* Calculate the result when the argument is a constant. */
7687 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7688 return res;
7689
7690 /* Canonicalize sincos to cexpi. */
7691 if (!TARGET_C99_FUNCTIONS)
7692 return NULL_TREE;
7693 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7694 if (!fn)
7695 return NULL_TREE;
7696
7697 call = build_call_expr (fn, 1, arg0);
7698 call = builtin_save_expr (call);
7699
7700 return build2 (COMPOUND_EXPR, type,
7701 build2 (MODIFY_EXPR, void_type_node,
7702 build_fold_indirect_ref (arg1),
7703 build1 (IMAGPART_EXPR, type, call)),
7704 build2 (MODIFY_EXPR, void_type_node,
7705 build_fold_indirect_ref (arg2),
7706 build1 (REALPART_EXPR, type, call)));
7707 }
7708
7709 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7710 NULL_TREE if no simplification can be made. */
7711
7712 static tree
7713 fold_builtin_cexp (tree arg0, tree type)
7714 {
7715 tree rtype;
7716 tree realp, imagp, ifn;
7717
7718 if (!validate_arg (arg0, COMPLEX_TYPE))
7719 return NULL_TREE;
7720
7721 rtype = TREE_TYPE (TREE_TYPE (arg0));
7722
7723 /* In case we can figure out the real part of arg0 and it is constant zero
7724 fold to cexpi. */
7725 if (!TARGET_C99_FUNCTIONS)
7726 return NULL_TREE;
7727 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7728 if (!ifn)
7729 return NULL_TREE;
7730
7731 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7732 && real_zerop (realp))
7733 {
7734 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7735 return build_call_expr (ifn, 1, narg);
7736 }
7737
7738 /* In case we can easily decompose real and imaginary parts split cexp
7739 to exp (r) * cexpi (i). */
7740 if (flag_unsafe_math_optimizations
7741 && realp)
7742 {
7743 tree rfn, rcall, icall;
7744
7745 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7746 if (!rfn)
7747 return NULL_TREE;
7748
7749 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7750 if (!imagp)
7751 return NULL_TREE;
7752
7753 icall = build_call_expr (ifn, 1, imagp);
7754 icall = builtin_save_expr (icall);
7755 rcall = build_call_expr (rfn, 1, realp);
7756 rcall = builtin_save_expr (rcall);
7757 return build2 (COMPLEX_EXPR, type,
7758 build2 (MULT_EXPR, rtype,
7759 rcall,
7760 build1 (REALPART_EXPR, rtype, icall)),
7761 build2 (MULT_EXPR, rtype,
7762 rcall,
7763 build1 (IMAGPART_EXPR, rtype, icall)));
7764 }
7765
7766 return NULL_TREE;
7767 }
7768
7769 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7770 Return NULL_TREE if no simplification can be made. */
7771
7772 static tree
7773 fold_builtin_trunc (tree fndecl, tree arg)
7774 {
7775 if (!validate_arg (arg, REAL_TYPE))
7776 return NULL_TREE;
7777
7778 /* Optimize trunc of constant value. */
7779 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7780 {
7781 REAL_VALUE_TYPE r, x;
7782 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7783
7784 x = TREE_REAL_CST (arg);
7785 real_trunc (&r, TYPE_MODE (type), &x);
7786 return build_real (type, r);
7787 }
7788
7789 return fold_trunc_transparent_mathfn (fndecl, arg);
7790 }
7791
7792 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7793 Return NULL_TREE if no simplification can be made. */
7794
7795 static tree
7796 fold_builtin_floor (tree fndecl, tree arg)
7797 {
7798 if (!validate_arg (arg, REAL_TYPE))
7799 return NULL_TREE;
7800
7801 /* Optimize floor of constant value. */
7802 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7803 {
7804 REAL_VALUE_TYPE x;
7805
7806 x = TREE_REAL_CST (arg);
7807 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7808 {
7809 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7810 REAL_VALUE_TYPE r;
7811
7812 real_floor (&r, TYPE_MODE (type), &x);
7813 return build_real (type, r);
7814 }
7815 }
7816
7817 /* Fold floor (x) where x is nonnegative to trunc (x). */
7818 if (tree_expr_nonnegative_p (arg))
7819 {
7820 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7821 if (truncfn)
7822 return build_call_expr (truncfn, 1, arg);
7823 }
7824
7825 return fold_trunc_transparent_mathfn (fndecl, arg);
7826 }
7827
7828 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7829 Return NULL_TREE if no simplification can be made. */
7830
7831 static tree
7832 fold_builtin_ceil (tree fndecl, tree arg)
7833 {
7834 if (!validate_arg (arg, REAL_TYPE))
7835 return NULL_TREE;
7836
7837 /* Optimize ceil of constant value. */
7838 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7839 {
7840 REAL_VALUE_TYPE x;
7841
7842 x = TREE_REAL_CST (arg);
7843 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7844 {
7845 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7846 REAL_VALUE_TYPE r;
7847
7848 real_ceil (&r, TYPE_MODE (type), &x);
7849 return build_real (type, r);
7850 }
7851 }
7852
7853 return fold_trunc_transparent_mathfn (fndecl, arg);
7854 }
7855
7856 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7857 Return NULL_TREE if no simplification can be made. */
7858
7859 static tree
7860 fold_builtin_round (tree fndecl, tree arg)
7861 {
7862 if (!validate_arg (arg, REAL_TYPE))
7863 return NULL_TREE;
7864
7865 /* Optimize round of constant value. */
7866 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7867 {
7868 REAL_VALUE_TYPE x;
7869
7870 x = TREE_REAL_CST (arg);
7871 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7872 {
7873 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7874 REAL_VALUE_TYPE r;
7875
7876 real_round (&r, TYPE_MODE (type), &x);
7877 return build_real (type, r);
7878 }
7879 }
7880
7881 return fold_trunc_transparent_mathfn (fndecl, arg);
7882 }
7883
7884 /* Fold function call to builtin lround, lroundf or lroundl (or the
7885 corresponding long long versions) and other rounding functions. ARG
7886 is the argument to the call. Return NULL_TREE if no simplification
7887 can be made. */
7888
7889 static tree
7890 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7891 {
7892 if (!validate_arg (arg, REAL_TYPE))
7893 return NULL_TREE;
7894
7895 /* Optimize lround of constant value. */
7896 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7897 {
7898 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7899
7900 if (real_isfinite (&x))
7901 {
7902 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7903 tree ftype = TREE_TYPE (arg);
7904 unsigned HOST_WIDE_INT lo2;
7905 HOST_WIDE_INT hi, lo;
7906 REAL_VALUE_TYPE r;
7907
7908 switch (DECL_FUNCTION_CODE (fndecl))
7909 {
7910 CASE_FLT_FN (BUILT_IN_LFLOOR):
7911 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7912 real_floor (&r, TYPE_MODE (ftype), &x);
7913 break;
7914
7915 CASE_FLT_FN (BUILT_IN_LCEIL):
7916 CASE_FLT_FN (BUILT_IN_LLCEIL):
7917 real_ceil (&r, TYPE_MODE (ftype), &x);
7918 break;
7919
7920 CASE_FLT_FN (BUILT_IN_LROUND):
7921 CASE_FLT_FN (BUILT_IN_LLROUND):
7922 real_round (&r, TYPE_MODE (ftype), &x);
7923 break;
7924
7925 default:
7926 gcc_unreachable ();
7927 }
7928
7929 REAL_VALUE_TO_INT (&lo, &hi, r);
7930 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7931 return build_int_cst_wide (itype, lo2, hi);
7932 }
7933 }
7934
7935 switch (DECL_FUNCTION_CODE (fndecl))
7936 {
7937 CASE_FLT_FN (BUILT_IN_LFLOOR):
7938 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7939 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7940 if (tree_expr_nonnegative_p (arg))
7941 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7942 arg);
7943 break;
7944 default:;
7945 }
7946
7947 return fold_fixed_mathfn (fndecl, arg);
7948 }
7949
7950 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7951 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7952 the argument to the call. Return NULL_TREE if no simplification can
7953 be made. */
7954
7955 static tree
7956 fold_builtin_bitop (tree fndecl, tree arg)
7957 {
7958 if (!validate_arg (arg, INTEGER_TYPE))
7959 return NULL_TREE;
7960
7961 /* Optimize for constant argument. */
7962 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7963 {
7964 HOST_WIDE_INT hi, width, result;
7965 unsigned HOST_WIDE_INT lo;
7966 tree type;
7967
7968 type = TREE_TYPE (arg);
7969 width = TYPE_PRECISION (type);
7970 lo = TREE_INT_CST_LOW (arg);
7971
7972 /* Clear all the bits that are beyond the type's precision. */
7973 if (width > HOST_BITS_PER_WIDE_INT)
7974 {
7975 hi = TREE_INT_CST_HIGH (arg);
7976 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7977 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7978 }
7979 else
7980 {
7981 hi = 0;
7982 if (width < HOST_BITS_PER_WIDE_INT)
7983 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7984 }
7985
7986 switch (DECL_FUNCTION_CODE (fndecl))
7987 {
7988 CASE_INT_FN (BUILT_IN_FFS):
7989 if (lo != 0)
7990 result = exact_log2 (lo & -lo) + 1;
7991 else if (hi != 0)
7992 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7993 else
7994 result = 0;
7995 break;
7996
7997 CASE_INT_FN (BUILT_IN_CLZ):
7998 if (hi != 0)
7999 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8000 else if (lo != 0)
8001 result = width - floor_log2 (lo) - 1;
8002 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8003 result = width;
8004 break;
8005
8006 CASE_INT_FN (BUILT_IN_CTZ):
8007 if (lo != 0)
8008 result = exact_log2 (lo & -lo);
8009 else if (hi != 0)
8010 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8011 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8012 result = width;
8013 break;
8014
8015 CASE_INT_FN (BUILT_IN_POPCOUNT):
8016 result = 0;
8017 while (lo)
8018 result++, lo &= lo - 1;
8019 while (hi)
8020 result++, hi &= hi - 1;
8021 break;
8022
8023 CASE_INT_FN (BUILT_IN_PARITY):
8024 result = 0;
8025 while (lo)
8026 result++, lo &= lo - 1;
8027 while (hi)
8028 result++, hi &= hi - 1;
8029 result &= 1;
8030 break;
8031
8032 default:
8033 gcc_unreachable ();
8034 }
8035
8036 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8037 }
8038
8039 return NULL_TREE;
8040 }
8041
8042 /* Fold function call to builtin_bswap and the long and long long
8043 variants. Return NULL_TREE if no simplification can be made. */
8044 static tree
8045 fold_builtin_bswap (tree fndecl, tree arg)
8046 {
8047 if (! validate_arg (arg, INTEGER_TYPE))
8048 return NULL_TREE;
8049
8050 /* Optimize constant value. */
8051 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8052 {
8053 HOST_WIDE_INT hi, width, r_hi = 0;
8054 unsigned HOST_WIDE_INT lo, r_lo = 0;
8055 tree type;
8056
8057 type = TREE_TYPE (arg);
8058 width = TYPE_PRECISION (type);
8059 lo = TREE_INT_CST_LOW (arg);
8060 hi = TREE_INT_CST_HIGH (arg);
8061
8062 switch (DECL_FUNCTION_CODE (fndecl))
8063 {
8064 case BUILT_IN_BSWAP32:
8065 case BUILT_IN_BSWAP64:
8066 {
8067 int s;
8068
8069 for (s = 0; s < width; s += 8)
8070 {
8071 int d = width - s - 8;
8072 unsigned HOST_WIDE_INT byte;
8073
8074 if (s < HOST_BITS_PER_WIDE_INT)
8075 byte = (lo >> s) & 0xff;
8076 else
8077 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8078
8079 if (d < HOST_BITS_PER_WIDE_INT)
8080 r_lo |= byte << d;
8081 else
8082 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8083 }
8084 }
8085
8086 break;
8087
8088 default:
8089 gcc_unreachable ();
8090 }
8091
8092 if (width < HOST_BITS_PER_WIDE_INT)
8093 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8094 else
8095 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8096 }
8097
8098 return NULL_TREE;
8099 }
8100
8101 /* Return true if EXPR is the real constant contained in VALUE. */
8102
8103 static bool
8104 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8105 {
8106 STRIP_NOPS (expr);
8107
8108 return ((TREE_CODE (expr) == REAL_CST
8109 && !TREE_OVERFLOW (expr)
8110 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8111 || (TREE_CODE (expr) == COMPLEX_CST
8112 && real_dconstp (TREE_REALPART (expr), value)
8113 && real_zerop (TREE_IMAGPART (expr))));
8114 }
8115
8116 /* A subroutine of fold_builtin to fold the various logarithmic
8117 functions. Return NULL_TREE if no simplification can me made.
8118 FUNC is the corresponding MPFR logarithm function. */
8119
8120 static tree
8121 fold_builtin_logarithm (tree fndecl, tree arg,
8122 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8123 {
8124 if (validate_arg (arg, REAL_TYPE))
8125 {
8126 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8127 tree res;
8128 const enum built_in_function fcode = builtin_mathfn_code (arg);
8129
8130 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8131 instead we'll look for 'e' truncated to MODE. So only do
8132 this if flag_unsafe_math_optimizations is set. */
8133 if (flag_unsafe_math_optimizations && func == mpfr_log)
8134 {
8135 const REAL_VALUE_TYPE e_truncated =
8136 real_value_truncate (TYPE_MODE (type), dconste);
8137 if (real_dconstp (arg, &e_truncated))
8138 return build_real (type, dconst1);
8139 }
8140
8141 /* Calculate the result when the argument is a constant. */
8142 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8143 return res;
8144
8145 /* Special case, optimize logN(expN(x)) = x. */
8146 if (flag_unsafe_math_optimizations
8147 && ((func == mpfr_log
8148 && (fcode == BUILT_IN_EXP
8149 || fcode == BUILT_IN_EXPF
8150 || fcode == BUILT_IN_EXPL))
8151 || (func == mpfr_log2
8152 && (fcode == BUILT_IN_EXP2
8153 || fcode == BUILT_IN_EXP2F
8154 || fcode == BUILT_IN_EXP2L))
8155 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8156 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8157
8158 /* Optimize logN(func()) for various exponential functions. We
8159 want to determine the value "x" and the power "exponent" in
8160 order to transform logN(x**exponent) into exponent*logN(x). */
8161 if (flag_unsafe_math_optimizations)
8162 {
8163 tree exponent = 0, x = 0;
8164
8165 switch (fcode)
8166 {
8167 CASE_FLT_FN (BUILT_IN_EXP):
8168 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8169 x = build_real (type,
8170 real_value_truncate (TYPE_MODE (type), dconste));
8171 exponent = CALL_EXPR_ARG (arg, 0);
8172 break;
8173 CASE_FLT_FN (BUILT_IN_EXP2):
8174 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8175 x = build_real (type, dconst2);
8176 exponent = CALL_EXPR_ARG (arg, 0);
8177 break;
8178 CASE_FLT_FN (BUILT_IN_EXP10):
8179 CASE_FLT_FN (BUILT_IN_POW10):
8180 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8181 x = build_real (type, dconst10);
8182 exponent = CALL_EXPR_ARG (arg, 0);
8183 break;
8184 CASE_FLT_FN (BUILT_IN_SQRT):
8185 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8186 x = CALL_EXPR_ARG (arg, 0);
8187 exponent = build_real (type, dconsthalf);
8188 break;
8189 CASE_FLT_FN (BUILT_IN_CBRT):
8190 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8191 x = CALL_EXPR_ARG (arg, 0);
8192 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8193 dconstthird));
8194 break;
8195 CASE_FLT_FN (BUILT_IN_POW):
8196 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8197 x = CALL_EXPR_ARG (arg, 0);
8198 exponent = CALL_EXPR_ARG (arg, 1);
8199 break;
8200 default:
8201 break;
8202 }
8203
8204 /* Now perform the optimization. */
8205 if (x && exponent)
8206 {
8207 tree logfn = build_call_expr (fndecl, 1, x);
8208 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8209 }
8210 }
8211 }
8212
8213 return NULL_TREE;
8214 }
8215
8216 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8217 NULL_TREE if no simplification can be made. */
8218
8219 static tree
8220 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8221 {
8222 tree res, narg0, narg1;
8223
8224 if (!validate_arg (arg0, REAL_TYPE)
8225 || !validate_arg (arg1, REAL_TYPE))
8226 return NULL_TREE;
8227
8228 /* Calculate the result when the argument is a constant. */
8229 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8230 return res;
8231
8232 /* If either argument to hypot has a negate or abs, strip that off.
8233 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8234 narg0 = fold_strip_sign_ops (arg0);
8235 narg1 = fold_strip_sign_ops (arg1);
8236 if (narg0 || narg1)
8237 {
8238 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8239 narg1 ? narg1 : arg1);
8240 }
8241
8242 /* If either argument is zero, hypot is fabs of the other. */
8243 if (real_zerop (arg0))
8244 return fold_build1 (ABS_EXPR, type, arg1);
8245 else if (real_zerop (arg1))
8246 return fold_build1 (ABS_EXPR, type, arg0);
8247
8248 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8249 if (flag_unsafe_math_optimizations
8250 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8251 {
8252 const REAL_VALUE_TYPE sqrt2_trunc
8253 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8254 return fold_build2 (MULT_EXPR, type,
8255 fold_build1 (ABS_EXPR, type, arg0),
8256 build_real (type, sqrt2_trunc));
8257 }
8258
8259 return NULL_TREE;
8260 }
8261
8262
8263 /* Fold a builtin function call to pow, powf, or powl. Return
8264 NULL_TREE if no simplification can be made. */
8265 static tree
8266 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8267 {
8268 tree res;
8269
8270 if (!validate_arg (arg0, REAL_TYPE)
8271 || !validate_arg (arg1, REAL_TYPE))
8272 return NULL_TREE;
8273
8274 /* Calculate the result when the argument is a constant. */
8275 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8276 return res;
8277
8278 /* Optimize pow(1.0,y) = 1.0. */
8279 if (real_onep (arg0))
8280 return omit_one_operand (type, build_real (type, dconst1), arg1);
8281
8282 if (TREE_CODE (arg1) == REAL_CST
8283 && !TREE_OVERFLOW (arg1))
8284 {
8285 REAL_VALUE_TYPE cint;
8286 REAL_VALUE_TYPE c;
8287 HOST_WIDE_INT n;
8288
8289 c = TREE_REAL_CST (arg1);
8290
8291 /* Optimize pow(x,0.0) = 1.0. */
8292 if (REAL_VALUES_EQUAL (c, dconst0))
8293 return omit_one_operand (type, build_real (type, dconst1),
8294 arg0);
8295
8296 /* Optimize pow(x,1.0) = x. */
8297 if (REAL_VALUES_EQUAL (c, dconst1))
8298 return arg0;
8299
8300 /* Optimize pow(x,-1.0) = 1.0/x. */
8301 if (REAL_VALUES_EQUAL (c, dconstm1))
8302 return fold_build2 (RDIV_EXPR, type,
8303 build_real (type, dconst1), arg0);
8304
8305 /* Optimize pow(x,0.5) = sqrt(x). */
8306 if (flag_unsafe_math_optimizations
8307 && REAL_VALUES_EQUAL (c, dconsthalf))
8308 {
8309 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8310
8311 if (sqrtfn != NULL_TREE)
8312 return build_call_expr (sqrtfn, 1, arg0);
8313 }
8314
8315 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8316 if (flag_unsafe_math_optimizations)
8317 {
8318 const REAL_VALUE_TYPE dconstroot
8319 = real_value_truncate (TYPE_MODE (type), dconstthird);
8320
8321 if (REAL_VALUES_EQUAL (c, dconstroot))
8322 {
8323 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8324 if (cbrtfn != NULL_TREE)
8325 return build_call_expr (cbrtfn, 1, arg0);
8326 }
8327 }
8328
8329 /* Check for an integer exponent. */
8330 n = real_to_integer (&c);
8331 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8332 if (real_identical (&c, &cint))
8333 {
8334 /* Attempt to evaluate pow at compile-time. */
8335 if (TREE_CODE (arg0) == REAL_CST
8336 && !TREE_OVERFLOW (arg0))
8337 {
8338 REAL_VALUE_TYPE x;
8339 bool inexact;
8340
8341 x = TREE_REAL_CST (arg0);
8342 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8343 if (flag_unsafe_math_optimizations || !inexact)
8344 return build_real (type, x);
8345 }
8346
8347 /* Strip sign ops from even integer powers. */
8348 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8349 {
8350 tree narg0 = fold_strip_sign_ops (arg0);
8351 if (narg0)
8352 return build_call_expr (fndecl, 2, narg0, arg1);
8353 }
8354 }
8355 }
8356
8357 if (flag_unsafe_math_optimizations)
8358 {
8359 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8360
8361 /* Optimize pow(expN(x),y) = expN(x*y). */
8362 if (BUILTIN_EXPONENT_P (fcode))
8363 {
8364 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8365 tree arg = CALL_EXPR_ARG (arg0, 0);
8366 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8367 return build_call_expr (expfn, 1, arg);
8368 }
8369
8370 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8371 if (BUILTIN_SQRT_P (fcode))
8372 {
8373 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8374 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8375 build_real (type, dconsthalf));
8376 return build_call_expr (fndecl, 2, narg0, narg1);
8377 }
8378
8379 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8380 if (BUILTIN_CBRT_P (fcode))
8381 {
8382 tree arg = CALL_EXPR_ARG (arg0, 0);
8383 if (tree_expr_nonnegative_p (arg))
8384 {
8385 const REAL_VALUE_TYPE dconstroot
8386 = real_value_truncate (TYPE_MODE (type), dconstthird);
8387 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8388 build_real (type, dconstroot));
8389 return build_call_expr (fndecl, 2, arg, narg1);
8390 }
8391 }
8392
8393 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8394 if (fcode == BUILT_IN_POW
8395 || fcode == BUILT_IN_POWF
8396 || fcode == BUILT_IN_POWL)
8397 {
8398 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8399 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8400 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8401 return build_call_expr (fndecl, 2, arg00, narg1);
8402 }
8403 }
8404
8405 return NULL_TREE;
8406 }
8407
8408 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8409 Return NULL_TREE if no simplification can be made. */
8410 static tree
8411 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8412 tree arg0, tree arg1, tree type)
8413 {
8414 if (!validate_arg (arg0, REAL_TYPE)
8415 || !validate_arg (arg1, INTEGER_TYPE))
8416 return NULL_TREE;
8417
8418 /* Optimize pow(1.0,y) = 1.0. */
8419 if (real_onep (arg0))
8420 return omit_one_operand (type, build_real (type, dconst1), arg1);
8421
8422 if (host_integerp (arg1, 0))
8423 {
8424 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8425
8426 /* Evaluate powi at compile-time. */
8427 if (TREE_CODE (arg0) == REAL_CST
8428 && !TREE_OVERFLOW (arg0))
8429 {
8430 REAL_VALUE_TYPE x;
8431 x = TREE_REAL_CST (arg0);
8432 real_powi (&x, TYPE_MODE (type), &x, c);
8433 return build_real (type, x);
8434 }
8435
8436 /* Optimize pow(x,0) = 1.0. */
8437 if (c == 0)
8438 return omit_one_operand (type, build_real (type, dconst1),
8439 arg0);
8440
8441 /* Optimize pow(x,1) = x. */
8442 if (c == 1)
8443 return arg0;
8444
8445 /* Optimize pow(x,-1) = 1.0/x. */
8446 if (c == -1)
8447 return fold_build2 (RDIV_EXPR, type,
8448 build_real (type, dconst1), arg0);
8449 }
8450
8451 return NULL_TREE;
8452 }
8453
8454 /* A subroutine of fold_builtin to fold the various exponent
8455 functions. Return NULL_TREE if no simplification can be made.
8456 FUNC is the corresponding MPFR exponent function. */
8457
8458 static tree
8459 fold_builtin_exponent (tree fndecl, tree arg,
8460 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8461 {
8462 if (validate_arg (arg, REAL_TYPE))
8463 {
8464 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8465 tree res;
8466
8467 /* Calculate the result when the argument is a constant. */
8468 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8469 return res;
8470
8471 /* Optimize expN(logN(x)) = x. */
8472 if (flag_unsafe_math_optimizations)
8473 {
8474 const enum built_in_function fcode = builtin_mathfn_code (arg);
8475
8476 if ((func == mpfr_exp
8477 && (fcode == BUILT_IN_LOG
8478 || fcode == BUILT_IN_LOGF
8479 || fcode == BUILT_IN_LOGL))
8480 || (func == mpfr_exp2
8481 && (fcode == BUILT_IN_LOG2
8482 || fcode == BUILT_IN_LOG2F
8483 || fcode == BUILT_IN_LOG2L))
8484 || (func == mpfr_exp10
8485 && (fcode == BUILT_IN_LOG10
8486 || fcode == BUILT_IN_LOG10F
8487 || fcode == BUILT_IN_LOG10L)))
8488 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8489 }
8490 }
8491
8492 return NULL_TREE;
8493 }
8494
8495 /* Return true if VAR is a VAR_DECL or a component thereof. */
8496
8497 static bool
8498 var_decl_component_p (tree var)
8499 {
8500 tree inner = var;
8501 while (handled_component_p (inner))
8502 inner = TREE_OPERAND (inner, 0);
8503 return SSA_VAR_P (inner);
8504 }
8505
8506 /* Fold function call to builtin memset. Return
8507 NULL_TREE if no simplification can be made. */
8508
8509 static tree
8510 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8511 {
8512 tree var, ret;
8513 unsigned HOST_WIDE_INT length, cval;
8514
8515 if (! validate_arg (dest, POINTER_TYPE)
8516 || ! validate_arg (c, INTEGER_TYPE)
8517 || ! validate_arg (len, INTEGER_TYPE))
8518 return NULL_TREE;
8519
8520 if (! host_integerp (len, 1))
8521 return NULL_TREE;
8522
8523 /* If the LEN parameter is zero, return DEST. */
8524 if (integer_zerop (len))
8525 return omit_one_operand (type, dest, c);
8526
8527 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8528 return NULL_TREE;
8529
8530 var = dest;
8531 STRIP_NOPS (var);
8532 if (TREE_CODE (var) != ADDR_EXPR)
8533 return NULL_TREE;
8534
8535 var = TREE_OPERAND (var, 0);
8536 if (TREE_THIS_VOLATILE (var))
8537 return NULL_TREE;
8538
8539 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8540 && !POINTER_TYPE_P (TREE_TYPE (var)))
8541 return NULL_TREE;
8542
8543 if (! var_decl_component_p (var))
8544 return NULL_TREE;
8545
8546 length = tree_low_cst (len, 1);
8547 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8548 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8549 < (int) length)
8550 return NULL_TREE;
8551
8552 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8553 return NULL_TREE;
8554
8555 if (integer_zerop (c))
8556 cval = 0;
8557 else
8558 {
8559 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8560 return NULL_TREE;
8561
8562 cval = tree_low_cst (c, 1);
8563 cval &= 0xff;
8564 cval |= cval << 8;
8565 cval |= cval << 16;
8566 cval |= (cval << 31) << 1;
8567 }
8568
8569 ret = build_int_cst_type (TREE_TYPE (var), cval);
8570 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8571 if (ignore)
8572 return ret;
8573
8574 return omit_one_operand (type, dest, ret);
8575 }
8576
8577 /* Fold function call to builtin memset. Return
8578 NULL_TREE if no simplification can be made. */
8579
8580 static tree
8581 fold_builtin_bzero (tree dest, tree size, bool ignore)
8582 {
8583 if (! validate_arg (dest, POINTER_TYPE)
8584 || ! validate_arg (size, INTEGER_TYPE))
8585 return NULL_TREE;
8586
8587 if (!ignore)
8588 return NULL_TREE;
8589
8590 /* New argument list transforming bzero(ptr x, int y) to
8591 memset(ptr x, int 0, size_t y). This is done this way
8592 so that if it isn't expanded inline, we fallback to
8593 calling bzero instead of memset. */
8594
8595 return fold_builtin_memset (dest, integer_zero_node,
8596 fold_convert (sizetype, size),
8597 void_type_node, ignore);
8598 }
8599
8600 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8601 NULL_TREE if no simplification can be made.
8602 If ENDP is 0, return DEST (like memcpy).
8603 If ENDP is 1, return DEST+LEN (like mempcpy).
8604 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8605 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8606 (memmove). */
8607
8608 static tree
8609 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8610 {
8611 tree destvar, srcvar, expr;
8612
8613 if (! validate_arg (dest, POINTER_TYPE)
8614 || ! validate_arg (src, POINTER_TYPE)
8615 || ! validate_arg (len, INTEGER_TYPE))
8616 return NULL_TREE;
8617
8618 /* If the LEN parameter is zero, return DEST. */
8619 if (integer_zerop (len))
8620 return omit_one_operand (type, dest, src);
8621
8622 /* If SRC and DEST are the same (and not volatile), return
8623 DEST{,+LEN,+LEN-1}. */
8624 if (operand_equal_p (src, dest, 0))
8625 expr = len;
8626 else
8627 {
8628 tree srctype, desttype;
8629 if (endp == 3)
8630 {
8631 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8632 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8633
8634 /* Both DEST and SRC must be pointer types.
8635 ??? This is what old code did. Is the testing for pointer types
8636 really mandatory?
8637
8638 If either SRC is readonly or length is 1, we can use memcpy. */
8639 if (dest_align && src_align
8640 && (readonly_data_expr (src)
8641 || (host_integerp (len, 1)
8642 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8643 tree_low_cst (len, 1)))))
8644 {
8645 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8646 if (!fn)
8647 return NULL_TREE;
8648 return build_call_expr (fn, 3, dest, src, len);
8649 }
8650 return NULL_TREE;
8651 }
8652
8653 if (!host_integerp (len, 0))
8654 return NULL_TREE;
8655 /* FIXME:
8656 This logic lose for arguments like (type *)malloc (sizeof (type)),
8657 since we strip the casts of up to VOID return value from malloc.
8658 Perhaps we ought to inherit type from non-VOID argument here? */
8659 STRIP_NOPS (src);
8660 STRIP_NOPS (dest);
8661 srctype = TREE_TYPE (TREE_TYPE (src));
8662 desttype = TREE_TYPE (TREE_TYPE (dest));
8663 if (!srctype || !desttype
8664 || !TYPE_SIZE_UNIT (srctype)
8665 || !TYPE_SIZE_UNIT (desttype)
8666 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8667 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8668 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8669 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8670 return NULL_TREE;
8671
8672 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8673 < (int) TYPE_ALIGN (desttype)
8674 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8675 < (int) TYPE_ALIGN (srctype)))
8676 return NULL_TREE;
8677
8678 if (!ignore)
8679 dest = builtin_save_expr (dest);
8680
8681 srcvar = build_fold_indirect_ref (src);
8682 if (TREE_THIS_VOLATILE (srcvar))
8683 return NULL_TREE;
8684 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8685 return NULL_TREE;
8686 /* With memcpy, it is possible to bypass aliasing rules, so without
8687 this check i. e. execute/20060930-2.c would be misoptimized, because
8688 it use conflicting alias set to hold argument for the memcpy call.
8689 This check is probably unnecesary with -fno-strict-aliasing.
8690 Similarly for destvar. See also PR29286. */
8691 if (!var_decl_component_p (srcvar)
8692 /* Accept: memcpy (*char_var, "test", 1); that simplify
8693 to char_var='t'; */
8694 || is_gimple_min_invariant (srcvar)
8695 || readonly_data_expr (src))
8696 return NULL_TREE;
8697
8698 destvar = build_fold_indirect_ref (dest);
8699 if (TREE_THIS_VOLATILE (destvar))
8700 return NULL_TREE;
8701 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8702 return NULL_TREE;
8703 if (!var_decl_component_p (destvar))
8704 return NULL_TREE;
8705
8706 if (srctype == desttype
8707 || (gimple_in_ssa_p (cfun)
8708 && useless_type_conversion_p (desttype, srctype)))
8709 expr = srcvar;
8710 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8711 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8712 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8713 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8714 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8715 else
8716 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8717 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8718 }
8719
8720 if (ignore)
8721 return expr;
8722
8723 if (endp == 0 || endp == 3)
8724 return omit_one_operand (type, dest, expr);
8725
8726 if (expr == len)
8727 expr = NULL_TREE;
8728
8729 if (endp == 2)
8730 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8731 ssize_int (1));
8732
8733 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8734 dest = fold_convert (type, dest);
8735 if (expr)
8736 dest = omit_one_operand (type, dest, expr);
8737 return dest;
8738 }
8739
8740 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8741 If LEN is not NULL, it represents the length of the string to be
8742 copied. Return NULL_TREE if no simplification can be made. */
8743
8744 tree
8745 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8746 {
8747 tree fn;
8748
8749 if (!validate_arg (dest, POINTER_TYPE)
8750 || !validate_arg (src, POINTER_TYPE))
8751 return NULL_TREE;
8752
8753 /* If SRC and DEST are the same (and not volatile), return DEST. */
8754 if (operand_equal_p (src, dest, 0))
8755 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8756
8757 if (optimize_size)
8758 return NULL_TREE;
8759
8760 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8761 if (!fn)
8762 return NULL_TREE;
8763
8764 if (!len)
8765 {
8766 len = c_strlen (src, 1);
8767 if (! len || TREE_SIDE_EFFECTS (len))
8768 return NULL_TREE;
8769 }
8770
8771 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8772 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8773 build_call_expr (fn, 3, dest, src, len));
8774 }
8775
8776 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8777 If SLEN is not NULL, it represents the length of the source string.
8778 Return NULL_TREE if no simplification can be made. */
8779
8780 tree
8781 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8782 {
8783 tree fn;
8784
8785 if (!validate_arg (dest, POINTER_TYPE)
8786 || !validate_arg (src, POINTER_TYPE)
8787 || !validate_arg (len, INTEGER_TYPE))
8788 return NULL_TREE;
8789
8790 /* If the LEN parameter is zero, return DEST. */
8791 if (integer_zerop (len))
8792 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8793
8794 /* We can't compare slen with len as constants below if len is not a
8795 constant. */
8796 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8797 return NULL_TREE;
8798
8799 if (!slen)
8800 slen = c_strlen (src, 1);
8801
8802 /* Now, we must be passed a constant src ptr parameter. */
8803 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8804 return NULL_TREE;
8805
8806 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8807
8808 /* We do not support simplification of this case, though we do
8809 support it when expanding trees into RTL. */
8810 /* FIXME: generate a call to __builtin_memset. */
8811 if (tree_int_cst_lt (slen, len))
8812 return NULL_TREE;
8813
8814 /* OK transform into builtin memcpy. */
8815 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8816 if (!fn)
8817 return NULL_TREE;
8818 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8819 build_call_expr (fn, 3, dest, src, len));
8820 }
8821
8822 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8823 arguments to the call, and TYPE is its return type.
8824 Return NULL_TREE if no simplification can be made. */
8825
8826 static tree
8827 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8828 {
8829 if (!validate_arg (arg1, POINTER_TYPE)
8830 || !validate_arg (arg2, INTEGER_TYPE)
8831 || !validate_arg (len, INTEGER_TYPE))
8832 return NULL_TREE;
8833 else
8834 {
8835 const char *p1;
8836
8837 if (TREE_CODE (arg2) != INTEGER_CST
8838 || !host_integerp (len, 1))
8839 return NULL_TREE;
8840
8841 p1 = c_getstr (arg1);
8842 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8843 {
8844 char c;
8845 const char *r;
8846 tree tem;
8847
8848 if (target_char_cast (arg2, &c))
8849 return NULL_TREE;
8850
8851 r = memchr (p1, c, tree_low_cst (len, 1));
8852
8853 if (r == NULL)
8854 return build_int_cst (TREE_TYPE (arg1), 0);
8855
8856 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8857 size_int (r - p1));
8858 return fold_convert (type, tem);
8859 }
8860 return NULL_TREE;
8861 }
8862 }
8863
8864 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8865 Return NULL_TREE if no simplification can be made. */
8866
8867 static tree
8868 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8869 {
8870 const char *p1, *p2;
8871
8872 if (!validate_arg (arg1, POINTER_TYPE)
8873 || !validate_arg (arg2, POINTER_TYPE)
8874 || !validate_arg (len, INTEGER_TYPE))
8875 return NULL_TREE;
8876
8877 /* If the LEN parameter is zero, return zero. */
8878 if (integer_zerop (len))
8879 return omit_two_operands (integer_type_node, integer_zero_node,
8880 arg1, arg2);
8881
8882 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8883 if (operand_equal_p (arg1, arg2, 0))
8884 return omit_one_operand (integer_type_node, integer_zero_node, len);
8885
8886 p1 = c_getstr (arg1);
8887 p2 = c_getstr (arg2);
8888
8889 /* If all arguments are constant, and the value of len is not greater
8890 than the lengths of arg1 and arg2, evaluate at compile-time. */
8891 if (host_integerp (len, 1) && p1 && p2
8892 && compare_tree_int (len, strlen (p1) + 1) <= 0
8893 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8894 {
8895 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8896
8897 if (r > 0)
8898 return integer_one_node;
8899 else if (r < 0)
8900 return integer_minus_one_node;
8901 else
8902 return integer_zero_node;
8903 }
8904
8905 /* If len parameter is one, return an expression corresponding to
8906 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8907 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8908 {
8909 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8910 tree cst_uchar_ptr_node
8911 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8912
8913 tree ind1 = fold_convert (integer_type_node,
8914 build1 (INDIRECT_REF, cst_uchar_node,
8915 fold_convert (cst_uchar_ptr_node,
8916 arg1)));
8917 tree ind2 = fold_convert (integer_type_node,
8918 build1 (INDIRECT_REF, cst_uchar_node,
8919 fold_convert (cst_uchar_ptr_node,
8920 arg2)));
8921 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8922 }
8923
8924 return NULL_TREE;
8925 }
8926
8927 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8928 Return NULL_TREE if no simplification can be made. */
8929
8930 static tree
8931 fold_builtin_strcmp (tree arg1, tree arg2)
8932 {
8933 const char *p1, *p2;
8934
8935 if (!validate_arg (arg1, POINTER_TYPE)
8936 || !validate_arg (arg2, POINTER_TYPE))
8937 return NULL_TREE;
8938
8939 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8940 if (operand_equal_p (arg1, arg2, 0))
8941 return integer_zero_node;
8942
8943 p1 = c_getstr (arg1);
8944 p2 = c_getstr (arg2);
8945
8946 if (p1 && p2)
8947 {
8948 const int i = strcmp (p1, p2);
8949 if (i < 0)
8950 return integer_minus_one_node;
8951 else if (i > 0)
8952 return integer_one_node;
8953 else
8954 return integer_zero_node;
8955 }
8956
8957 /* If the second arg is "", return *(const unsigned char*)arg1. */
8958 if (p2 && *p2 == '\0')
8959 {
8960 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8961 tree cst_uchar_ptr_node
8962 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8963
8964 return fold_convert (integer_type_node,
8965 build1 (INDIRECT_REF, cst_uchar_node,
8966 fold_convert (cst_uchar_ptr_node,
8967 arg1)));
8968 }
8969
8970 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8971 if (p1 && *p1 == '\0')
8972 {
8973 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8974 tree cst_uchar_ptr_node
8975 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8976
8977 tree temp = fold_convert (integer_type_node,
8978 build1 (INDIRECT_REF, cst_uchar_node,
8979 fold_convert (cst_uchar_ptr_node,
8980 arg2)));
8981 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8982 }
8983
8984 return NULL_TREE;
8985 }
8986
8987 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8988 Return NULL_TREE if no simplification can be made. */
8989
8990 static tree
8991 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8992 {
8993 const char *p1, *p2;
8994
8995 if (!validate_arg (arg1, POINTER_TYPE)
8996 || !validate_arg (arg2, POINTER_TYPE)
8997 || !validate_arg (len, INTEGER_TYPE))
8998 return NULL_TREE;
8999
9000 /* If the LEN parameter is zero, return zero. */
9001 if (integer_zerop (len))
9002 return omit_two_operands (integer_type_node, integer_zero_node,
9003 arg1, arg2);
9004
9005 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9006 if (operand_equal_p (arg1, arg2, 0))
9007 return omit_one_operand (integer_type_node, integer_zero_node, len);
9008
9009 p1 = c_getstr (arg1);
9010 p2 = c_getstr (arg2);
9011
9012 if (host_integerp (len, 1) && p1 && p2)
9013 {
9014 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9015 if (i > 0)
9016 return integer_one_node;
9017 else if (i < 0)
9018 return integer_minus_one_node;
9019 else
9020 return integer_zero_node;
9021 }
9022
9023 /* If the second arg is "", and the length is greater than zero,
9024 return *(const unsigned char*)arg1. */
9025 if (p2 && *p2 == '\0'
9026 && TREE_CODE (len) == INTEGER_CST
9027 && tree_int_cst_sgn (len) == 1)
9028 {
9029 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9030 tree cst_uchar_ptr_node
9031 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9032
9033 return fold_convert (integer_type_node,
9034 build1 (INDIRECT_REF, cst_uchar_node,
9035 fold_convert (cst_uchar_ptr_node,
9036 arg1)));
9037 }
9038
9039 /* If the first arg is "", and the length is greater than zero,
9040 return -*(const unsigned char*)arg2. */
9041 if (p1 && *p1 == '\0'
9042 && TREE_CODE (len) == INTEGER_CST
9043 && tree_int_cst_sgn (len) == 1)
9044 {
9045 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9046 tree cst_uchar_ptr_node
9047 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9048
9049 tree temp = fold_convert (integer_type_node,
9050 build1 (INDIRECT_REF, cst_uchar_node,
9051 fold_convert (cst_uchar_ptr_node,
9052 arg2)));
9053 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9054 }
9055
9056 /* If len parameter is one, return an expression corresponding to
9057 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9058 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9059 {
9060 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9061 tree cst_uchar_ptr_node
9062 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9063
9064 tree ind1 = fold_convert (integer_type_node,
9065 build1 (INDIRECT_REF, cst_uchar_node,
9066 fold_convert (cst_uchar_ptr_node,
9067 arg1)));
9068 tree ind2 = fold_convert (integer_type_node,
9069 build1 (INDIRECT_REF, cst_uchar_node,
9070 fold_convert (cst_uchar_ptr_node,
9071 arg2)));
9072 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9073 }
9074
9075 return NULL_TREE;
9076 }
9077
9078 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9079 ARG. Return NULL_TREE if no simplification can be made. */
9080
9081 static tree
9082 fold_builtin_signbit (tree arg, tree type)
9083 {
9084 tree temp;
9085
9086 if (!validate_arg (arg, REAL_TYPE))
9087 return NULL_TREE;
9088
9089 /* If ARG is a compile-time constant, determine the result. */
9090 if (TREE_CODE (arg) == REAL_CST
9091 && !TREE_OVERFLOW (arg))
9092 {
9093 REAL_VALUE_TYPE c;
9094
9095 c = TREE_REAL_CST (arg);
9096 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9097 return fold_convert (type, temp);
9098 }
9099
9100 /* If ARG is non-negative, the result is always zero. */
9101 if (tree_expr_nonnegative_p (arg))
9102 return omit_one_operand (type, integer_zero_node, arg);
9103
9104 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9106 return fold_build2 (LT_EXPR, type, arg,
9107 build_real (TREE_TYPE (arg), dconst0));
9108
9109 return NULL_TREE;
9110 }
9111
9112 /* Fold function call to builtin copysign, copysignf or copysignl with
9113 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9114 be made. */
9115
9116 static tree
9117 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9118 {
9119 tree tem;
9120
9121 if (!validate_arg (arg1, REAL_TYPE)
9122 || !validate_arg (arg2, REAL_TYPE))
9123 return NULL_TREE;
9124
9125 /* copysign(X,X) is X. */
9126 if (operand_equal_p (arg1, arg2, 0))
9127 return fold_convert (type, arg1);
9128
9129 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9130 if (TREE_CODE (arg1) == REAL_CST
9131 && TREE_CODE (arg2) == REAL_CST
9132 && !TREE_OVERFLOW (arg1)
9133 && !TREE_OVERFLOW (arg2))
9134 {
9135 REAL_VALUE_TYPE c1, c2;
9136
9137 c1 = TREE_REAL_CST (arg1);
9138 c2 = TREE_REAL_CST (arg2);
9139 /* c1.sign := c2.sign. */
9140 real_copysign (&c1, &c2);
9141 return build_real (type, c1);
9142 }
9143
9144 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9145 Remember to evaluate Y for side-effects. */
9146 if (tree_expr_nonnegative_p (arg2))
9147 return omit_one_operand (type,
9148 fold_build1 (ABS_EXPR, type, arg1),
9149 arg2);
9150
9151 /* Strip sign changing operations for the first argument. */
9152 tem = fold_strip_sign_ops (arg1);
9153 if (tem)
9154 return build_call_expr (fndecl, 2, tem, arg2);
9155
9156 return NULL_TREE;
9157 }
9158
9159 /* Fold a call to builtin isascii with argument ARG. */
9160
9161 static tree
9162 fold_builtin_isascii (tree arg)
9163 {
9164 if (!validate_arg (arg, INTEGER_TYPE))
9165 return NULL_TREE;
9166 else
9167 {
9168 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9169 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9170 build_int_cst (NULL_TREE,
9171 ~ (unsigned HOST_WIDE_INT) 0x7f));
9172 return fold_build2 (EQ_EXPR, integer_type_node,
9173 arg, integer_zero_node);
9174 }
9175 }
9176
9177 /* Fold a call to builtin toascii with argument ARG. */
9178
9179 static tree
9180 fold_builtin_toascii (tree arg)
9181 {
9182 if (!validate_arg (arg, INTEGER_TYPE))
9183 return NULL_TREE;
9184
9185 /* Transform toascii(c) -> (c & 0x7f). */
9186 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9187 build_int_cst (NULL_TREE, 0x7f));
9188 }
9189
9190 /* Fold a call to builtin isdigit with argument ARG. */
9191
9192 static tree
9193 fold_builtin_isdigit (tree arg)
9194 {
9195 if (!validate_arg (arg, INTEGER_TYPE))
9196 return NULL_TREE;
9197 else
9198 {
9199 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9200 /* According to the C standard, isdigit is unaffected by locale.
9201 However, it definitely is affected by the target character set. */
9202 unsigned HOST_WIDE_INT target_digit0
9203 = lang_hooks.to_target_charset ('0');
9204
9205 if (target_digit0 == 0)
9206 return NULL_TREE;
9207
9208 arg = fold_convert (unsigned_type_node, arg);
9209 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9210 build_int_cst (unsigned_type_node, target_digit0));
9211 return fold_build2 (LE_EXPR, integer_type_node, arg,
9212 build_int_cst (unsigned_type_node, 9));
9213 }
9214 }
9215
9216 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9217
9218 static tree
9219 fold_builtin_fabs (tree arg, tree type)
9220 {
9221 if (!validate_arg (arg, REAL_TYPE))
9222 return NULL_TREE;
9223
9224 arg = fold_convert (type, arg);
9225 if (TREE_CODE (arg) == REAL_CST)
9226 return fold_abs_const (arg, type);
9227 return fold_build1 (ABS_EXPR, type, arg);
9228 }
9229
9230 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9231
9232 static tree
9233 fold_builtin_abs (tree arg, tree type)
9234 {
9235 if (!validate_arg (arg, INTEGER_TYPE))
9236 return NULL_TREE;
9237
9238 arg = fold_convert (type, arg);
9239 if (TREE_CODE (arg) == INTEGER_CST)
9240 return fold_abs_const (arg, type);
9241 return fold_build1 (ABS_EXPR, type, arg);
9242 }
9243
9244 /* Fold a call to builtin fmin or fmax. */
9245
9246 static tree
9247 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9248 {
9249 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9250 {
9251 /* Calculate the result when the argument is a constant. */
9252 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9253
9254 if (res)
9255 return res;
9256
9257 /* If either argument is NaN, return the other one. Avoid the
9258 transformation if we get (and honor) a signalling NaN. Using
9259 omit_one_operand() ensures we create a non-lvalue. */
9260 if (TREE_CODE (arg0) == REAL_CST
9261 && real_isnan (&TREE_REAL_CST (arg0))
9262 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9263 || ! TREE_REAL_CST (arg0).signalling))
9264 return omit_one_operand (type, arg1, arg0);
9265 if (TREE_CODE (arg1) == REAL_CST
9266 && real_isnan (&TREE_REAL_CST (arg1))
9267 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9268 || ! TREE_REAL_CST (arg1).signalling))
9269 return omit_one_operand (type, arg0, arg1);
9270
9271 /* Transform fmin/fmax(x,x) -> x. */
9272 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9273 return omit_one_operand (type, arg0, arg1);
9274
9275 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9276 functions to return the numeric arg if the other one is NaN.
9277 These tree codes don't honor that, so only transform if
9278 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9279 handled, so we don't have to worry about it either. */
9280 if (flag_finite_math_only)
9281 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9282 fold_convert (type, arg0),
9283 fold_convert (type, arg1));
9284 }
9285 return NULL_TREE;
9286 }
9287
9288 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9289
9290 static tree
9291 fold_builtin_carg (tree arg, tree type)
9292 {
9293 if (validate_arg (arg, COMPLEX_TYPE))
9294 {
9295 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9296
9297 if (atan2_fn)
9298 {
9299 tree new_arg = builtin_save_expr (arg);
9300 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9301 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9302 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9303 }
9304 }
9305
9306 return NULL_TREE;
9307 }
9308
9309 /* Fold a call to builtin logb/ilogb. */
9310
9311 static tree
9312 fold_builtin_logb (tree arg, tree rettype)
9313 {
9314 if (! validate_arg (arg, REAL_TYPE))
9315 return NULL_TREE;
9316
9317 STRIP_NOPS (arg);
9318
9319 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9320 {
9321 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9322
9323 switch (value->cl)
9324 {
9325 case rvc_nan:
9326 case rvc_inf:
9327 /* If arg is Inf or NaN and we're logb, return it. */
9328 if (TREE_CODE (rettype) == REAL_TYPE)
9329 return fold_convert (rettype, arg);
9330 /* Fall through... */
9331 case rvc_zero:
9332 /* Zero may set errno and/or raise an exception for logb, also
9333 for ilogb we don't know FP_ILOGB0. */
9334 return NULL_TREE;
9335 case rvc_normal:
9336 /* For normal numbers, proceed iff radix == 2. In GCC,
9337 normalized significands are in the range [0.5, 1.0). We
9338 want the exponent as if they were [1.0, 2.0) so get the
9339 exponent and subtract 1. */
9340 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9341 return fold_convert (rettype, build_int_cst (NULL_TREE,
9342 REAL_EXP (value)-1));
9343 break;
9344 }
9345 }
9346
9347 return NULL_TREE;
9348 }
9349
9350 /* Fold a call to builtin significand, if radix == 2. */
9351
9352 static tree
9353 fold_builtin_significand (tree arg, tree rettype)
9354 {
9355 if (! validate_arg (arg, REAL_TYPE))
9356 return NULL_TREE;
9357
9358 STRIP_NOPS (arg);
9359
9360 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9361 {
9362 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9363
9364 switch (value->cl)
9365 {
9366 case rvc_zero:
9367 case rvc_nan:
9368 case rvc_inf:
9369 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9370 return fold_convert (rettype, arg);
9371 case rvc_normal:
9372 /* For normal numbers, proceed iff radix == 2. */
9373 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9374 {
9375 REAL_VALUE_TYPE result = *value;
9376 /* In GCC, normalized significands are in the range [0.5,
9377 1.0). We want them to be [1.0, 2.0) so set the
9378 exponent to 1. */
9379 SET_REAL_EXP (&result, 1);
9380 return build_real (rettype, result);
9381 }
9382 break;
9383 }
9384 }
9385
9386 return NULL_TREE;
9387 }
9388
9389 /* Fold a call to builtin frexp, we can assume the base is 2. */
9390
9391 static tree
9392 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9393 {
9394 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9395 return NULL_TREE;
9396
9397 STRIP_NOPS (arg0);
9398
9399 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9400 return NULL_TREE;
9401
9402 arg1 = build_fold_indirect_ref (arg1);
9403
9404 /* Proceed if a valid pointer type was passed in. */
9405 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9406 {
9407 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9408 tree frac, exp;
9409
9410 switch (value->cl)
9411 {
9412 case rvc_zero:
9413 /* For +-0, return (*exp = 0, +-0). */
9414 exp = integer_zero_node;
9415 frac = arg0;
9416 break;
9417 case rvc_nan:
9418 case rvc_inf:
9419 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9420 return omit_one_operand (rettype, arg0, arg1);
9421 case rvc_normal:
9422 {
9423 /* Since the frexp function always expects base 2, and in
9424 GCC normalized significands are already in the range
9425 [0.5, 1.0), we have exactly what frexp wants. */
9426 REAL_VALUE_TYPE frac_rvt = *value;
9427 SET_REAL_EXP (&frac_rvt, 0);
9428 frac = build_real (rettype, frac_rvt);
9429 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9430 }
9431 break;
9432 default:
9433 gcc_unreachable ();
9434 }
9435
9436 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9437 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9438 TREE_SIDE_EFFECTS (arg1) = 1;
9439 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9440 }
9441
9442 return NULL_TREE;
9443 }
9444
9445 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9446 then we can assume the base is two. If it's false, then we have to
9447 check the mode of the TYPE parameter in certain cases. */
9448
9449 static tree
9450 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9451 {
9452 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9453 {
9454 STRIP_NOPS (arg0);
9455 STRIP_NOPS (arg1);
9456
9457 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9458 if (real_zerop (arg0) || integer_zerop (arg1)
9459 || (TREE_CODE (arg0) == REAL_CST
9460 && !real_isfinite (&TREE_REAL_CST (arg0))))
9461 return omit_one_operand (type, arg0, arg1);
9462
9463 /* If both arguments are constant, then try to evaluate it. */
9464 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9465 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9466 && host_integerp (arg1, 0))
9467 {
9468 /* Bound the maximum adjustment to twice the range of the
9469 mode's valid exponents. Use abs to ensure the range is
9470 positive as a sanity check. */
9471 const long max_exp_adj = 2 *
9472 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9473 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9474
9475 /* Get the user-requested adjustment. */
9476 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9477
9478 /* The requested adjustment must be inside this range. This
9479 is a preliminary cap to avoid things like overflow, we
9480 may still fail to compute the result for other reasons. */
9481 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9482 {
9483 REAL_VALUE_TYPE initial_result;
9484
9485 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9486
9487 /* Ensure we didn't overflow. */
9488 if (! real_isinf (&initial_result))
9489 {
9490 const REAL_VALUE_TYPE trunc_result
9491 = real_value_truncate (TYPE_MODE (type), initial_result);
9492
9493 /* Only proceed if the target mode can hold the
9494 resulting value. */
9495 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9496 return build_real (type, trunc_result);
9497 }
9498 }
9499 }
9500 }
9501
9502 return NULL_TREE;
9503 }
9504
9505 /* Fold a call to builtin modf. */
9506
9507 static tree
9508 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9509 {
9510 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9511 return NULL_TREE;
9512
9513 STRIP_NOPS (arg0);
9514
9515 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9516 return NULL_TREE;
9517
9518 arg1 = build_fold_indirect_ref (arg1);
9519
9520 /* Proceed if a valid pointer type was passed in. */
9521 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9522 {
9523 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9524 REAL_VALUE_TYPE trunc, frac;
9525
9526 switch (value->cl)
9527 {
9528 case rvc_nan:
9529 case rvc_zero:
9530 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9531 trunc = frac = *value;
9532 break;
9533 case rvc_inf:
9534 /* For +-Inf, return (*arg1 = arg0, +-0). */
9535 frac = dconst0;
9536 frac.sign = value->sign;
9537 trunc = *value;
9538 break;
9539 case rvc_normal:
9540 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9541 real_trunc (&trunc, VOIDmode, value);
9542 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9543 /* If the original number was negative and already
9544 integral, then the fractional part is -0.0. */
9545 if (value->sign && frac.cl == rvc_zero)
9546 frac.sign = value->sign;
9547 break;
9548 }
9549
9550 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9551 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9552 build_real (rettype, trunc));
9553 TREE_SIDE_EFFECTS (arg1) = 1;
9554 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9555 build_real (rettype, frac));
9556 }
9557
9558 return NULL_TREE;
9559 }
9560
9561 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9562 ARG is the argument for the call. */
9563
9564 static tree
9565 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9566 {
9567 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9568 REAL_VALUE_TYPE r;
9569
9570 if (!validate_arg (arg, REAL_TYPE))
9571 {
9572 error ("non-floating-point argument to function %qs",
9573 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9574 return error_mark_node;
9575 }
9576
9577 switch (builtin_index)
9578 {
9579 case BUILT_IN_ISINF:
9580 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9581 return omit_one_operand (type, integer_zero_node, arg);
9582
9583 if (TREE_CODE (arg) == REAL_CST)
9584 {
9585 r = TREE_REAL_CST (arg);
9586 if (real_isinf (&r))
9587 return real_compare (GT_EXPR, &r, &dconst0)
9588 ? integer_one_node : integer_minus_one_node;
9589 else
9590 return integer_zero_node;
9591 }
9592
9593 return NULL_TREE;
9594
9595 case BUILT_IN_ISFINITE:
9596 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9597 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9598 return omit_one_operand (type, integer_one_node, arg);
9599
9600 if (TREE_CODE (arg) == REAL_CST)
9601 {
9602 r = TREE_REAL_CST (arg);
9603 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9604 }
9605
9606 return NULL_TREE;
9607
9608 case BUILT_IN_ISNAN:
9609 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9610 return omit_one_operand (type, integer_zero_node, arg);
9611
9612 if (TREE_CODE (arg) == REAL_CST)
9613 {
9614 r = TREE_REAL_CST (arg);
9615 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9616 }
9617
9618 arg = builtin_save_expr (arg);
9619 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9620
9621 default:
9622 gcc_unreachable ();
9623 }
9624 }
9625
9626 /* Fold a call to an unordered comparison function such as
9627 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9628 being called and ARG0 and ARG1 are the arguments for the call.
9629 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9630 the opposite of the desired result. UNORDERED_CODE is used
9631 for modes that can hold NaNs and ORDERED_CODE is used for
9632 the rest. */
9633
9634 static tree
9635 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9636 enum tree_code unordered_code,
9637 enum tree_code ordered_code)
9638 {
9639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9640 enum tree_code code;
9641 tree type0, type1;
9642 enum tree_code code0, code1;
9643 tree cmp_type = NULL_TREE;
9644
9645 type0 = TREE_TYPE (arg0);
9646 type1 = TREE_TYPE (arg1);
9647
9648 code0 = TREE_CODE (type0);
9649 code1 = TREE_CODE (type1);
9650
9651 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9652 /* Choose the wider of two real types. */
9653 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9654 ? type0 : type1;
9655 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9656 cmp_type = type0;
9657 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9658 cmp_type = type1;
9659 else
9660 {
9661 error ("non-floating-point argument to function %qs",
9662 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9663 return error_mark_node;
9664 }
9665
9666 arg0 = fold_convert (cmp_type, arg0);
9667 arg1 = fold_convert (cmp_type, arg1);
9668
9669 if (unordered_code == UNORDERED_EXPR)
9670 {
9671 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9672 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9673 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9674 }
9675
9676 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9677 : ordered_code;
9678 return fold_build1 (TRUTH_NOT_EXPR, type,
9679 fold_build2 (code, type, arg0, arg1));
9680 }
9681
9682 /* Fold a call to built-in function FNDECL with 0 arguments.
9683 IGNORE is true if the result of the function call is ignored. This
9684 function returns NULL_TREE if no simplification was possible. */
9685
9686 static tree
9687 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9688 {
9689 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9690 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9691 switch (fcode)
9692 {
9693 CASE_FLT_FN (BUILT_IN_INF):
9694 case BUILT_IN_INFD32:
9695 case BUILT_IN_INFD64:
9696 case BUILT_IN_INFD128:
9697 return fold_builtin_inf (type, true);
9698
9699 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9700 return fold_builtin_inf (type, false);
9701
9702 case BUILT_IN_CLASSIFY_TYPE:
9703 return fold_builtin_classify_type (NULL_TREE);
9704
9705 default:
9706 break;
9707 }
9708 return NULL_TREE;
9709 }
9710
9711 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9712 IGNORE is true if the result of the function call is ignored. This
9713 function returns NULL_TREE if no simplification was possible. */
9714
9715 static tree
9716 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9717 {
9718 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9719 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9720 switch (fcode)
9721 {
9722
9723 case BUILT_IN_CONSTANT_P:
9724 {
9725 tree val = fold_builtin_constant_p (arg0);
9726
9727 /* Gimplification will pull the CALL_EXPR for the builtin out of
9728 an if condition. When not optimizing, we'll not CSE it back.
9729 To avoid link error types of regressions, return false now. */
9730 if (!val && !optimize)
9731 val = integer_zero_node;
9732
9733 return val;
9734 }
9735
9736 case BUILT_IN_CLASSIFY_TYPE:
9737 return fold_builtin_classify_type (arg0);
9738
9739 case BUILT_IN_STRLEN:
9740 return fold_builtin_strlen (arg0);
9741
9742 CASE_FLT_FN (BUILT_IN_FABS):
9743 return fold_builtin_fabs (arg0, type);
9744
9745 case BUILT_IN_ABS:
9746 case BUILT_IN_LABS:
9747 case BUILT_IN_LLABS:
9748 case BUILT_IN_IMAXABS:
9749 return fold_builtin_abs (arg0, type);
9750
9751 CASE_FLT_FN (BUILT_IN_CONJ):
9752 if (validate_arg (arg0, COMPLEX_TYPE))
9753 return fold_build1 (CONJ_EXPR, type, arg0);
9754 break;
9755
9756 CASE_FLT_FN (BUILT_IN_CREAL):
9757 if (validate_arg (arg0, COMPLEX_TYPE))
9758 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9759 break;
9760
9761 CASE_FLT_FN (BUILT_IN_CIMAG):
9762 if (validate_arg (arg0, COMPLEX_TYPE))
9763 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9764 break;
9765
9766 CASE_FLT_FN (BUILT_IN_CCOS):
9767 CASE_FLT_FN (BUILT_IN_CCOSH):
9768 /* These functions are "even", i.e. f(x) == f(-x). */
9769 if (validate_arg (arg0, COMPLEX_TYPE))
9770 {
9771 tree narg = fold_strip_sign_ops (arg0);
9772 if (narg)
9773 return build_call_expr (fndecl, 1, narg);
9774 }
9775 break;
9776
9777 CASE_FLT_FN (BUILT_IN_CABS):
9778 return fold_builtin_cabs (arg0, type, fndecl);
9779
9780 CASE_FLT_FN (BUILT_IN_CARG):
9781 return fold_builtin_carg (arg0, type);
9782
9783 CASE_FLT_FN (BUILT_IN_SQRT):
9784 return fold_builtin_sqrt (arg0, type);
9785
9786 CASE_FLT_FN (BUILT_IN_CBRT):
9787 return fold_builtin_cbrt (arg0, type);
9788
9789 CASE_FLT_FN (BUILT_IN_ASIN):
9790 if (validate_arg (arg0, REAL_TYPE))
9791 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9792 &dconstm1, &dconst1, true);
9793 break;
9794
9795 CASE_FLT_FN (BUILT_IN_ACOS):
9796 if (validate_arg (arg0, REAL_TYPE))
9797 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9798 &dconstm1, &dconst1, true);
9799 break;
9800
9801 CASE_FLT_FN (BUILT_IN_ATAN):
9802 if (validate_arg (arg0, REAL_TYPE))
9803 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9804 break;
9805
9806 CASE_FLT_FN (BUILT_IN_ASINH):
9807 if (validate_arg (arg0, REAL_TYPE))
9808 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9809 break;
9810
9811 CASE_FLT_FN (BUILT_IN_ACOSH):
9812 if (validate_arg (arg0, REAL_TYPE))
9813 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9814 &dconst1, NULL, true);
9815 break;
9816
9817 CASE_FLT_FN (BUILT_IN_ATANH):
9818 if (validate_arg (arg0, REAL_TYPE))
9819 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9820 &dconstm1, &dconst1, false);
9821 break;
9822
9823 CASE_FLT_FN (BUILT_IN_SIN):
9824 if (validate_arg (arg0, REAL_TYPE))
9825 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9826 break;
9827
9828 CASE_FLT_FN (BUILT_IN_COS):
9829 return fold_builtin_cos (arg0, type, fndecl);
9830 break;
9831
9832 CASE_FLT_FN (BUILT_IN_TAN):
9833 return fold_builtin_tan (arg0, type);
9834
9835 CASE_FLT_FN (BUILT_IN_CEXP):
9836 return fold_builtin_cexp (arg0, type);
9837
9838 CASE_FLT_FN (BUILT_IN_CEXPI):
9839 if (validate_arg (arg0, REAL_TYPE))
9840 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9841 break;
9842
9843 CASE_FLT_FN (BUILT_IN_SINH):
9844 if (validate_arg (arg0, REAL_TYPE))
9845 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9846 break;
9847
9848 CASE_FLT_FN (BUILT_IN_COSH):
9849 return fold_builtin_cosh (arg0, type, fndecl);
9850
9851 CASE_FLT_FN (BUILT_IN_TANH):
9852 if (validate_arg (arg0, REAL_TYPE))
9853 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9854 break;
9855
9856 CASE_FLT_FN (BUILT_IN_ERF):
9857 if (validate_arg (arg0, REAL_TYPE))
9858 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9859 break;
9860
9861 CASE_FLT_FN (BUILT_IN_ERFC):
9862 if (validate_arg (arg0, REAL_TYPE))
9863 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9864 break;
9865
9866 CASE_FLT_FN (BUILT_IN_TGAMMA):
9867 if (validate_arg (arg0, REAL_TYPE))
9868 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9869 break;
9870
9871 CASE_FLT_FN (BUILT_IN_EXP):
9872 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9873
9874 CASE_FLT_FN (BUILT_IN_EXP2):
9875 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9876
9877 CASE_FLT_FN (BUILT_IN_EXP10):
9878 CASE_FLT_FN (BUILT_IN_POW10):
9879 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9880
9881 CASE_FLT_FN (BUILT_IN_EXPM1):
9882 if (validate_arg (arg0, REAL_TYPE))
9883 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9884 break;
9885
9886 CASE_FLT_FN (BUILT_IN_LOG):
9887 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9888
9889 CASE_FLT_FN (BUILT_IN_LOG2):
9890 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9891
9892 CASE_FLT_FN (BUILT_IN_LOG10):
9893 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9894
9895 CASE_FLT_FN (BUILT_IN_LOG1P):
9896 if (validate_arg (arg0, REAL_TYPE))
9897 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9898 &dconstm1, NULL, false);
9899 break;
9900
9901 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9902 CASE_FLT_FN (BUILT_IN_J0):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9905 NULL, NULL, 0);
9906 break;
9907
9908 CASE_FLT_FN (BUILT_IN_J1):
9909 if (validate_arg (arg0, REAL_TYPE))
9910 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9911 NULL, NULL, 0);
9912 break;
9913
9914 CASE_FLT_FN (BUILT_IN_Y0):
9915 if (validate_arg (arg0, REAL_TYPE))
9916 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9917 &dconst0, NULL, false);
9918 break;
9919
9920 CASE_FLT_FN (BUILT_IN_Y1):
9921 if (validate_arg (arg0, REAL_TYPE))
9922 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9923 &dconst0, NULL, false);
9924 break;
9925 #endif
9926
9927 CASE_FLT_FN (BUILT_IN_NAN):
9928 case BUILT_IN_NAND32:
9929 case BUILT_IN_NAND64:
9930 case BUILT_IN_NAND128:
9931 return fold_builtin_nan (arg0, type, true);
9932
9933 CASE_FLT_FN (BUILT_IN_NANS):
9934 return fold_builtin_nan (arg0, type, false);
9935
9936 CASE_FLT_FN (BUILT_IN_FLOOR):
9937 return fold_builtin_floor (fndecl, arg0);
9938
9939 CASE_FLT_FN (BUILT_IN_CEIL):
9940 return fold_builtin_ceil (fndecl, arg0);
9941
9942 CASE_FLT_FN (BUILT_IN_TRUNC):
9943 return fold_builtin_trunc (fndecl, arg0);
9944
9945 CASE_FLT_FN (BUILT_IN_ROUND):
9946 return fold_builtin_round (fndecl, arg0);
9947
9948 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9949 CASE_FLT_FN (BUILT_IN_RINT):
9950 return fold_trunc_transparent_mathfn (fndecl, arg0);
9951
9952 CASE_FLT_FN (BUILT_IN_LCEIL):
9953 CASE_FLT_FN (BUILT_IN_LLCEIL):
9954 CASE_FLT_FN (BUILT_IN_LFLOOR):
9955 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9956 CASE_FLT_FN (BUILT_IN_LROUND):
9957 CASE_FLT_FN (BUILT_IN_LLROUND):
9958 return fold_builtin_int_roundingfn (fndecl, arg0);
9959
9960 CASE_FLT_FN (BUILT_IN_LRINT):
9961 CASE_FLT_FN (BUILT_IN_LLRINT):
9962 return fold_fixed_mathfn (fndecl, arg0);
9963
9964 case BUILT_IN_BSWAP32:
9965 case BUILT_IN_BSWAP64:
9966 return fold_builtin_bswap (fndecl, arg0);
9967
9968 CASE_INT_FN (BUILT_IN_FFS):
9969 CASE_INT_FN (BUILT_IN_CLZ):
9970 CASE_INT_FN (BUILT_IN_CTZ):
9971 CASE_INT_FN (BUILT_IN_POPCOUNT):
9972 CASE_INT_FN (BUILT_IN_PARITY):
9973 return fold_builtin_bitop (fndecl, arg0);
9974
9975 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9976 return fold_builtin_signbit (arg0, type);
9977
9978 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9979 return fold_builtin_significand (arg0, type);
9980
9981 CASE_FLT_FN (BUILT_IN_ILOGB):
9982 CASE_FLT_FN (BUILT_IN_LOGB):
9983 return fold_builtin_logb (arg0, type);
9984
9985 case BUILT_IN_ISASCII:
9986 return fold_builtin_isascii (arg0);
9987
9988 case BUILT_IN_TOASCII:
9989 return fold_builtin_toascii (arg0);
9990
9991 case BUILT_IN_ISDIGIT:
9992 return fold_builtin_isdigit (arg0);
9993
9994 CASE_FLT_FN (BUILT_IN_FINITE):
9995 case BUILT_IN_FINITED32:
9996 case BUILT_IN_FINITED64:
9997 case BUILT_IN_FINITED128:
9998 case BUILT_IN_ISFINITE:
9999 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10000
10001 CASE_FLT_FN (BUILT_IN_ISINF):
10002 case BUILT_IN_ISINFD32:
10003 case BUILT_IN_ISINFD64:
10004 case BUILT_IN_ISINFD128:
10005 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10006
10007 CASE_FLT_FN (BUILT_IN_ISNAN):
10008 case BUILT_IN_ISNAND32:
10009 case BUILT_IN_ISNAND64:
10010 case BUILT_IN_ISNAND128:
10011 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10012
10013 case BUILT_IN_PRINTF:
10014 case BUILT_IN_PRINTF_UNLOCKED:
10015 case BUILT_IN_VPRINTF:
10016 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10017
10018 default:
10019 break;
10020 }
10021
10022 return NULL_TREE;
10023
10024 }
10025
10026 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10027 IGNORE is true if the result of the function call is ignored. This
10028 function returns NULL_TREE if no simplification was possible. */
10029
10030 static tree
10031 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10032 {
10033 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10034 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10035
10036 switch (fcode)
10037 {
10038 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10039 CASE_FLT_FN (BUILT_IN_JN):
10040 if (validate_arg (arg0, INTEGER_TYPE)
10041 && validate_arg (arg1, REAL_TYPE))
10042 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10043 break;
10044
10045 CASE_FLT_FN (BUILT_IN_YN):
10046 if (validate_arg (arg0, INTEGER_TYPE)
10047 && validate_arg (arg1, REAL_TYPE))
10048 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10049 &dconst0, false);
10050 break;
10051
10052 CASE_FLT_FN (BUILT_IN_DREM):
10053 CASE_FLT_FN (BUILT_IN_REMAINDER):
10054 if (validate_arg (arg0, REAL_TYPE)
10055 && validate_arg(arg1, REAL_TYPE))
10056 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10057 break;
10058
10059 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10060 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10061 if (validate_arg (arg0, REAL_TYPE)
10062 && validate_arg(arg1, POINTER_TYPE))
10063 return do_mpfr_lgamma_r (arg0, arg1, type);
10064 break;
10065 #endif
10066
10067 CASE_FLT_FN (BUILT_IN_ATAN2):
10068 if (validate_arg (arg0, REAL_TYPE)
10069 && validate_arg(arg1, REAL_TYPE))
10070 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10071 break;
10072
10073 CASE_FLT_FN (BUILT_IN_FDIM):
10074 if (validate_arg (arg0, REAL_TYPE)
10075 && validate_arg(arg1, REAL_TYPE))
10076 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10077 break;
10078
10079 CASE_FLT_FN (BUILT_IN_HYPOT):
10080 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10081
10082 CASE_FLT_FN (BUILT_IN_LDEXP):
10083 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10084 CASE_FLT_FN (BUILT_IN_SCALBN):
10085 CASE_FLT_FN (BUILT_IN_SCALBLN):
10086 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10087
10088 CASE_FLT_FN (BUILT_IN_FREXP):
10089 return fold_builtin_frexp (arg0, arg1, type);
10090
10091 CASE_FLT_FN (BUILT_IN_MODF):
10092 return fold_builtin_modf (arg0, arg1, type);
10093
10094 case BUILT_IN_BZERO:
10095 return fold_builtin_bzero (arg0, arg1, ignore);
10096
10097 case BUILT_IN_FPUTS:
10098 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10099
10100 case BUILT_IN_FPUTS_UNLOCKED:
10101 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10102
10103 case BUILT_IN_STRSTR:
10104 return fold_builtin_strstr (arg0, arg1, type);
10105
10106 case BUILT_IN_STRCAT:
10107 return fold_builtin_strcat (arg0, arg1);
10108
10109 case BUILT_IN_STRSPN:
10110 return fold_builtin_strspn (arg0, arg1);
10111
10112 case BUILT_IN_STRCSPN:
10113 return fold_builtin_strcspn (arg0, arg1);
10114
10115 case BUILT_IN_STRCHR:
10116 case BUILT_IN_INDEX:
10117 return fold_builtin_strchr (arg0, arg1, type);
10118
10119 case BUILT_IN_STRRCHR:
10120 case BUILT_IN_RINDEX:
10121 return fold_builtin_strrchr (arg0, arg1, type);
10122
10123 case BUILT_IN_STRCPY:
10124 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10125
10126 case BUILT_IN_STRCMP:
10127 return fold_builtin_strcmp (arg0, arg1);
10128
10129 case BUILT_IN_STRPBRK:
10130 return fold_builtin_strpbrk (arg0, arg1, type);
10131
10132 case BUILT_IN_EXPECT:
10133 return fold_builtin_expect (arg0);
10134
10135 CASE_FLT_FN (BUILT_IN_POW):
10136 return fold_builtin_pow (fndecl, arg0, arg1, type);
10137
10138 CASE_FLT_FN (BUILT_IN_POWI):
10139 return fold_builtin_powi (fndecl, arg0, arg1, type);
10140
10141 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10142 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10143
10144 CASE_FLT_FN (BUILT_IN_FMIN):
10145 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10146
10147 CASE_FLT_FN (BUILT_IN_FMAX):
10148 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10149
10150 case BUILT_IN_ISGREATER:
10151 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10152 case BUILT_IN_ISGREATEREQUAL:
10153 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10154 case BUILT_IN_ISLESS:
10155 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10156 case BUILT_IN_ISLESSEQUAL:
10157 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10158 case BUILT_IN_ISLESSGREATER:
10159 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10160 case BUILT_IN_ISUNORDERED:
10161 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10162 NOP_EXPR);
10163
10164 /* We do the folding for va_start in the expander. */
10165 case BUILT_IN_VA_START:
10166 break;
10167
10168 case BUILT_IN_SPRINTF:
10169 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10170
10171 case BUILT_IN_OBJECT_SIZE:
10172 return fold_builtin_object_size (arg0, arg1);
10173
10174 case BUILT_IN_PRINTF:
10175 case BUILT_IN_PRINTF_UNLOCKED:
10176 case BUILT_IN_VPRINTF:
10177 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10178
10179 case BUILT_IN_PRINTF_CHK:
10180 case BUILT_IN_VPRINTF_CHK:
10181 if (!validate_arg (arg0, INTEGER_TYPE)
10182 || TREE_SIDE_EFFECTS (arg0))
10183 return NULL_TREE;
10184 else
10185 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10186 break;
10187
10188 case BUILT_IN_FPRINTF:
10189 case BUILT_IN_FPRINTF_UNLOCKED:
10190 case BUILT_IN_VFPRINTF:
10191 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10192 ignore, fcode);
10193
10194 default:
10195 break;
10196 }
10197 return NULL_TREE;
10198 }
10199
10200 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10201 and ARG2. IGNORE is true if the result of the function call is ignored.
10202 This function returns NULL_TREE if no simplification was possible. */
10203
10204 static tree
10205 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10206 {
10207 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10208 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10209 switch (fcode)
10210 {
10211
10212 CASE_FLT_FN (BUILT_IN_SINCOS):
10213 return fold_builtin_sincos (arg0, arg1, arg2);
10214
10215 CASE_FLT_FN (BUILT_IN_FMA):
10216 if (validate_arg (arg0, REAL_TYPE)
10217 && validate_arg(arg1, REAL_TYPE)
10218 && validate_arg(arg2, REAL_TYPE))
10219 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10220 break;
10221
10222 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10223 CASE_FLT_FN (BUILT_IN_REMQUO):
10224 if (validate_arg (arg0, REAL_TYPE)
10225 && validate_arg(arg1, REAL_TYPE)
10226 && validate_arg(arg2, POINTER_TYPE))
10227 return do_mpfr_remquo (arg0, arg1, arg2);
10228 break;
10229 #endif
10230
10231 case BUILT_IN_MEMSET:
10232 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10233
10234 case BUILT_IN_BCOPY:
10235 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10236
10237 case BUILT_IN_MEMCPY:
10238 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10239
10240 case BUILT_IN_MEMPCPY:
10241 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10242
10243 case BUILT_IN_MEMMOVE:
10244 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10245
10246 case BUILT_IN_STRNCAT:
10247 return fold_builtin_strncat (arg0, arg1, arg2);
10248
10249 case BUILT_IN_STRNCPY:
10250 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10251
10252 case BUILT_IN_STRNCMP:
10253 return fold_builtin_strncmp (arg0, arg1, arg2);
10254
10255 case BUILT_IN_MEMCHR:
10256 return fold_builtin_memchr (arg0, arg1, arg2, type);
10257
10258 case BUILT_IN_BCMP:
10259 case BUILT_IN_MEMCMP:
10260 return fold_builtin_memcmp (arg0, arg1, arg2);;
10261
10262 case BUILT_IN_SPRINTF:
10263 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10264
10265 case BUILT_IN_STRCPY_CHK:
10266 case BUILT_IN_STPCPY_CHK:
10267 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10268 ignore, fcode);
10269
10270 case BUILT_IN_STRCAT_CHK:
10271 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10272
10273 case BUILT_IN_PRINTF_CHK:
10274 case BUILT_IN_VPRINTF_CHK:
10275 if (!validate_arg (arg0, INTEGER_TYPE)
10276 || TREE_SIDE_EFFECTS (arg0))
10277 return NULL_TREE;
10278 else
10279 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10280 break;
10281
10282 case BUILT_IN_FPRINTF:
10283 case BUILT_IN_FPRINTF_UNLOCKED:
10284 case BUILT_IN_VFPRINTF:
10285 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10286
10287 case BUILT_IN_FPRINTF_CHK:
10288 case BUILT_IN_VFPRINTF_CHK:
10289 if (!validate_arg (arg1, INTEGER_TYPE)
10290 || TREE_SIDE_EFFECTS (arg1))
10291 return NULL_TREE;
10292 else
10293 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10294 ignore, fcode);
10295
10296 default:
10297 break;
10298 }
10299 return NULL_TREE;
10300 }
10301
10302 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10303 ARG2, and ARG3. IGNORE is true if the result of the function call is
10304 ignored. This function returns NULL_TREE if no simplification was
10305 possible. */
10306
10307 static tree
10308 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10309 bool ignore)
10310 {
10311 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10312
10313 switch (fcode)
10314 {
10315 case BUILT_IN_MEMCPY_CHK:
10316 case BUILT_IN_MEMPCPY_CHK:
10317 case BUILT_IN_MEMMOVE_CHK:
10318 case BUILT_IN_MEMSET_CHK:
10319 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10320 NULL_TREE, ignore,
10321 DECL_FUNCTION_CODE (fndecl));
10322
10323 case BUILT_IN_STRNCPY_CHK:
10324 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10325
10326 case BUILT_IN_STRNCAT_CHK:
10327 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10328
10329 case BUILT_IN_FPRINTF_CHK:
10330 case BUILT_IN_VFPRINTF_CHK:
10331 if (!validate_arg (arg1, INTEGER_TYPE)
10332 || TREE_SIDE_EFFECTS (arg1))
10333 return NULL_TREE;
10334 else
10335 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10336 ignore, fcode);
10337 break;
10338
10339 default:
10340 break;
10341 }
10342 return NULL_TREE;
10343 }
10344
10345 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10346 arguments, where NARGS <= 4. IGNORE is true if the result of the
10347 function call is ignored. This function returns NULL_TREE if no
10348 simplification was possible. Note that this only folds builtins with
10349 fixed argument patterns. Foldings that do varargs-to-varargs
10350 transformations, or that match calls with more than 4 arguments,
10351 need to be handled with fold_builtin_varargs instead. */
10352
10353 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10354
10355 static tree
10356 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10357 {
10358 tree ret = NULL_TREE;
10359 switch (nargs)
10360 {
10361 case 0:
10362 ret = fold_builtin_0 (fndecl, ignore);
10363 break;
10364 case 1:
10365 ret = fold_builtin_1 (fndecl, args[0], ignore);
10366 break;
10367 case 2:
10368 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10369 break;
10370 case 3:
10371 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10372 break;
10373 case 4:
10374 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10375 ignore);
10376 break;
10377 default:
10378 break;
10379 }
10380 if (ret)
10381 {
10382 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10383 TREE_NO_WARNING (ret) = 1;
10384 return ret;
10385 }
10386 return NULL_TREE;
10387 }
10388
10389 /* Builtins with folding operations that operate on "..." arguments
10390 need special handling; we need to store the arguments in a convenient
10391 data structure before attempting any folding. Fortunately there are
10392 only a few builtins that fall into this category. FNDECL is the
10393 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10394 result of the function call is ignored. */
10395
10396 static tree
10397 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10398 {
10399 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10400 tree ret = NULL_TREE;
10401
10402 switch (fcode)
10403 {
10404 case BUILT_IN_SPRINTF_CHK:
10405 case BUILT_IN_VSPRINTF_CHK:
10406 ret = fold_builtin_sprintf_chk (exp, fcode);
10407 break;
10408
10409 case BUILT_IN_SNPRINTF_CHK:
10410 case BUILT_IN_VSNPRINTF_CHK:
10411 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10412
10413 default:
10414 break;
10415 }
10416 if (ret)
10417 {
10418 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10419 TREE_NO_WARNING (ret) = 1;
10420 return ret;
10421 }
10422 return NULL_TREE;
10423 }
10424
10425 /* A wrapper function for builtin folding that prevents warnings for
10426 "statement without effect" and the like, caused by removing the
10427 call node earlier than the warning is generated. */
10428
10429 tree
10430 fold_call_expr (tree exp, bool ignore)
10431 {
10432 tree ret = NULL_TREE;
10433 tree fndecl = get_callee_fndecl (exp);
10434 if (fndecl
10435 && TREE_CODE (fndecl) == FUNCTION_DECL
10436 && DECL_BUILT_IN (fndecl))
10437 {
10438 /* FIXME: Don't use a list in this interface. */
10439 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10440 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10441 else
10442 {
10443 int nargs = call_expr_nargs (exp);
10444 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10445 {
10446 tree *args = CALL_EXPR_ARGP (exp);
10447 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10448 }
10449 if (!ret)
10450 ret = fold_builtin_varargs (fndecl, exp, ignore);
10451 if (ret)
10452 {
10453 /* Propagate location information from original call to
10454 expansion of builtin. Otherwise things like
10455 maybe_emit_chk_warning, that operate on the expansion
10456 of a builtin, will use the wrong location information. */
10457 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10458 {
10459 tree realret = ret;
10460 if (TREE_CODE (ret) == NOP_EXPR)
10461 realret = TREE_OPERAND (ret, 0);
10462 if (CAN_HAVE_LOCATION_P (realret)
10463 && !EXPR_HAS_LOCATION (realret))
10464 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10465 }
10466 return ret;
10467 }
10468 }
10469 }
10470 return NULL_TREE;
10471 }
10472
10473 /* Conveniently construct a function call expression. FNDECL names the
10474 function to be called and ARGLIST is a TREE_LIST of arguments. */
10475
10476 tree
10477 build_function_call_expr (tree fndecl, tree arglist)
10478 {
10479 tree fntype = TREE_TYPE (fndecl);
10480 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10481 int n = list_length (arglist);
10482 tree *argarray = (tree *) alloca (n * sizeof (tree));
10483 int i;
10484
10485 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10486 argarray[i] = TREE_VALUE (arglist);
10487 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10488 }
10489
10490 /* Conveniently construct a function call expression. FNDECL names the
10491 function to be called, N is the number of arguments, and the "..."
10492 parameters are the argument expressions. */
10493
10494 tree
10495 build_call_expr (tree fndecl, int n, ...)
10496 {
10497 va_list ap;
10498 tree fntype = TREE_TYPE (fndecl);
10499 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10500 tree *argarray = (tree *) alloca (n * sizeof (tree));
10501 int i;
10502
10503 va_start (ap, n);
10504 for (i = 0; i < n; i++)
10505 argarray[i] = va_arg (ap, tree);
10506 va_end (ap);
10507 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10508 }
10509
10510 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10511 N arguments are passed in the array ARGARRAY. */
10512
10513 tree
10514 fold_builtin_call_array (tree type,
10515 tree fn,
10516 int n,
10517 tree *argarray)
10518 {
10519 tree ret = NULL_TREE;
10520 int i;
10521 tree exp;
10522
10523 if (TREE_CODE (fn) == ADDR_EXPR)
10524 {
10525 tree fndecl = TREE_OPERAND (fn, 0);
10526 if (TREE_CODE (fndecl) == FUNCTION_DECL
10527 && DECL_BUILT_IN (fndecl))
10528 {
10529 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10530 {
10531 tree arglist = NULL_TREE;
10532 for (i = n - 1; i >= 0; i--)
10533 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10534 ret = targetm.fold_builtin (fndecl, arglist, false);
10535 if (ret)
10536 return ret;
10537 }
10538 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10539 {
10540 /* First try the transformations that don't require consing up
10541 an exp. */
10542 ret = fold_builtin_n (fndecl, argarray, n, false);
10543 if (ret)
10544 return ret;
10545 }
10546
10547 /* If we got this far, we need to build an exp. */
10548 exp = build_call_array (type, fn, n, argarray);
10549 ret = fold_builtin_varargs (fndecl, exp, false);
10550 return ret ? ret : exp;
10551 }
10552 }
10553
10554 return build_call_array (type, fn, n, argarray);
10555 }
10556
10557 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10558 along with N new arguments specified as the "..." parameters. SKIP
10559 is the number of arguments in EXP to be omitted. This function is used
10560 to do varargs-to-varargs transformations. */
10561
10562 static tree
10563 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10564 {
10565 int oldnargs = call_expr_nargs (exp);
10566 int nargs = oldnargs - skip + n;
10567 tree fntype = TREE_TYPE (fndecl);
10568 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10569 tree *buffer;
10570
10571 if (n > 0)
10572 {
10573 int i, j;
10574 va_list ap;
10575
10576 buffer = alloca (nargs * sizeof (tree));
10577 va_start (ap, n);
10578 for (i = 0; i < n; i++)
10579 buffer[i] = va_arg (ap, tree);
10580 va_end (ap);
10581 for (j = skip; j < oldnargs; j++, i++)
10582 buffer[i] = CALL_EXPR_ARG (exp, j);
10583 }
10584 else
10585 buffer = CALL_EXPR_ARGP (exp) + skip;
10586
10587 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10588 }
10589
10590 /* Validate a single argument ARG against a tree code CODE representing
10591 a type. */
10592
10593 static bool
10594 validate_arg (tree arg, enum tree_code code)
10595 {
10596 if (!arg)
10597 return false;
10598 else if (code == POINTER_TYPE)
10599 return POINTER_TYPE_P (TREE_TYPE (arg));
10600 return code == TREE_CODE (TREE_TYPE (arg));
10601 }
10602
10603 /* This function validates the types of a function call argument list
10604 against a specified list of tree_codes. If the last specifier is a 0,
10605 that represents an ellipses, otherwise the last specifier must be a
10606 VOID_TYPE. */
10607
10608 bool
10609 validate_arglist (tree callexpr, ...)
10610 {
10611 enum tree_code code;
10612 bool res = 0;
10613 va_list ap;
10614 call_expr_arg_iterator iter;
10615 tree arg;
10616
10617 va_start (ap, callexpr);
10618 init_call_expr_arg_iterator (callexpr, &iter);
10619
10620 do
10621 {
10622 code = va_arg (ap, enum tree_code);
10623 switch (code)
10624 {
10625 case 0:
10626 /* This signifies an ellipses, any further arguments are all ok. */
10627 res = true;
10628 goto end;
10629 case VOID_TYPE:
10630 /* This signifies an endlink, if no arguments remain, return
10631 true, otherwise return false. */
10632 res = !more_call_expr_args_p (&iter);
10633 goto end;
10634 default:
10635 /* If no parameters remain or the parameter's code does not
10636 match the specified code, return false. Otherwise continue
10637 checking any remaining arguments. */
10638 arg = next_call_expr_arg (&iter);
10639 if (!validate_arg (arg, code))
10640 goto end;
10641 break;
10642 }
10643 }
10644 while (1);
10645
10646 /* We need gotos here since we can only have one VA_CLOSE in a
10647 function. */
10648 end: ;
10649 va_end (ap);
10650
10651 return res;
10652 }
10653
10654 /* Default target-specific builtin expander that does nothing. */
10655
10656 rtx
10657 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10658 rtx target ATTRIBUTE_UNUSED,
10659 rtx subtarget ATTRIBUTE_UNUSED,
10660 enum machine_mode mode ATTRIBUTE_UNUSED,
10661 int ignore ATTRIBUTE_UNUSED)
10662 {
10663 return NULL_RTX;
10664 }
10665
10666 /* Returns true is EXP represents data that would potentially reside
10667 in a readonly section. */
10668
10669 static bool
10670 readonly_data_expr (tree exp)
10671 {
10672 STRIP_NOPS (exp);
10673
10674 if (TREE_CODE (exp) != ADDR_EXPR)
10675 return false;
10676
10677 exp = get_base_address (TREE_OPERAND (exp, 0));
10678 if (!exp)
10679 return false;
10680
10681 /* Make sure we call decl_readonly_section only for trees it
10682 can handle (since it returns true for everything it doesn't
10683 understand). */
10684 if (TREE_CODE (exp) == STRING_CST
10685 || TREE_CODE (exp) == CONSTRUCTOR
10686 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10687 return decl_readonly_section (exp, 0);
10688 else
10689 return false;
10690 }
10691
10692 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10693 to the call, and TYPE is its return type.
10694
10695 Return NULL_TREE if no simplification was possible, otherwise return the
10696 simplified form of the call as a tree.
10697
10698 The simplified form may be a constant or other expression which
10699 computes the same value, but in a more efficient manner (including
10700 calls to other builtin functions).
10701
10702 The call may contain arguments which need to be evaluated, but
10703 which are not useful to determine the result of the call. In
10704 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10705 COMPOUND_EXPR will be an argument which must be evaluated.
10706 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10707 COMPOUND_EXPR in the chain will contain the tree for the simplified
10708 form of the builtin function call. */
10709
10710 static tree
10711 fold_builtin_strstr (tree s1, tree s2, tree type)
10712 {
10713 if (!validate_arg (s1, POINTER_TYPE)
10714 || !validate_arg (s2, POINTER_TYPE))
10715 return NULL_TREE;
10716 else
10717 {
10718 tree fn;
10719 const char *p1, *p2;
10720
10721 p2 = c_getstr (s2);
10722 if (p2 == NULL)
10723 return NULL_TREE;
10724
10725 p1 = c_getstr (s1);
10726 if (p1 != NULL)
10727 {
10728 const char *r = strstr (p1, p2);
10729 tree tem;
10730
10731 if (r == NULL)
10732 return build_int_cst (TREE_TYPE (s1), 0);
10733
10734 /* Return an offset into the constant string argument. */
10735 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10736 s1, size_int (r - p1));
10737 return fold_convert (type, tem);
10738 }
10739
10740 /* The argument is const char *, and the result is char *, so we need
10741 a type conversion here to avoid a warning. */
10742 if (p2[0] == '\0')
10743 return fold_convert (type, s1);
10744
10745 if (p2[1] != '\0')
10746 return NULL_TREE;
10747
10748 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10749 if (!fn)
10750 return NULL_TREE;
10751
10752 /* New argument list transforming strstr(s1, s2) to
10753 strchr(s1, s2[0]). */
10754 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10755 }
10756 }
10757
10758 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10759 the call, and TYPE is its return type.
10760
10761 Return NULL_TREE if no simplification was possible, otherwise return the
10762 simplified form of the call as a tree.
10763
10764 The simplified form may be a constant or other expression which
10765 computes the same value, but in a more efficient manner (including
10766 calls to other builtin functions).
10767
10768 The call may contain arguments which need to be evaluated, but
10769 which are not useful to determine the result of the call. In
10770 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10771 COMPOUND_EXPR will be an argument which must be evaluated.
10772 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10773 COMPOUND_EXPR in the chain will contain the tree for the simplified
10774 form of the builtin function call. */
10775
10776 static tree
10777 fold_builtin_strchr (tree s1, tree s2, tree type)
10778 {
10779 if (!validate_arg (s1, POINTER_TYPE)
10780 || !validate_arg (s2, INTEGER_TYPE))
10781 return NULL_TREE;
10782 else
10783 {
10784 const char *p1;
10785
10786 if (TREE_CODE (s2) != INTEGER_CST)
10787 return NULL_TREE;
10788
10789 p1 = c_getstr (s1);
10790 if (p1 != NULL)
10791 {
10792 char c;
10793 const char *r;
10794 tree tem;
10795
10796 if (target_char_cast (s2, &c))
10797 return NULL_TREE;
10798
10799 r = strchr (p1, c);
10800
10801 if (r == NULL)
10802 return build_int_cst (TREE_TYPE (s1), 0);
10803
10804 /* Return an offset into the constant string argument. */
10805 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10806 s1, size_int (r - p1));
10807 return fold_convert (type, tem);
10808 }
10809 return NULL_TREE;
10810 }
10811 }
10812
10813 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10814 the call, and TYPE is its return type.
10815
10816 Return NULL_TREE if no simplification was possible, otherwise return the
10817 simplified form of the call as a tree.
10818
10819 The simplified form may be a constant or other expression which
10820 computes the same value, but in a more efficient manner (including
10821 calls to other builtin functions).
10822
10823 The call may contain arguments which need to be evaluated, but
10824 which are not useful to determine the result of the call. In
10825 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10826 COMPOUND_EXPR will be an argument which must be evaluated.
10827 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10828 COMPOUND_EXPR in the chain will contain the tree for the simplified
10829 form of the builtin function call. */
10830
10831 static tree
10832 fold_builtin_strrchr (tree s1, tree s2, tree type)
10833 {
10834 if (!validate_arg (s1, POINTER_TYPE)
10835 || !validate_arg (s2, INTEGER_TYPE))
10836 return NULL_TREE;
10837 else
10838 {
10839 tree fn;
10840 const char *p1;
10841
10842 if (TREE_CODE (s2) != INTEGER_CST)
10843 return NULL_TREE;
10844
10845 p1 = c_getstr (s1);
10846 if (p1 != NULL)
10847 {
10848 char c;
10849 const char *r;
10850 tree tem;
10851
10852 if (target_char_cast (s2, &c))
10853 return NULL_TREE;
10854
10855 r = strrchr (p1, c);
10856
10857 if (r == NULL)
10858 return build_int_cst (TREE_TYPE (s1), 0);
10859
10860 /* Return an offset into the constant string argument. */
10861 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10862 s1, size_int (r - p1));
10863 return fold_convert (type, tem);
10864 }
10865
10866 if (! integer_zerop (s2))
10867 return NULL_TREE;
10868
10869 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10870 if (!fn)
10871 return NULL_TREE;
10872
10873 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10874 return build_call_expr (fn, 2, s1, s2);
10875 }
10876 }
10877
10878 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10879 to the call, and TYPE is its return type.
10880
10881 Return NULL_TREE if no simplification was possible, otherwise return the
10882 simplified form of the call as a tree.
10883
10884 The simplified form may be a constant or other expression which
10885 computes the same value, but in a more efficient manner (including
10886 calls to other builtin functions).
10887
10888 The call may contain arguments which need to be evaluated, but
10889 which are not useful to determine the result of the call. In
10890 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10891 COMPOUND_EXPR will be an argument which must be evaluated.
10892 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10893 COMPOUND_EXPR in the chain will contain the tree for the simplified
10894 form of the builtin function call. */
10895
10896 static tree
10897 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10898 {
10899 if (!validate_arg (s1, POINTER_TYPE)
10900 || !validate_arg (s2, POINTER_TYPE))
10901 return NULL_TREE;
10902 else
10903 {
10904 tree fn;
10905 const char *p1, *p2;
10906
10907 p2 = c_getstr (s2);
10908 if (p2 == NULL)
10909 return NULL_TREE;
10910
10911 p1 = c_getstr (s1);
10912 if (p1 != NULL)
10913 {
10914 const char *r = strpbrk (p1, p2);
10915 tree tem;
10916
10917 if (r == NULL)
10918 return build_int_cst (TREE_TYPE (s1), 0);
10919
10920 /* Return an offset into the constant string argument. */
10921 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10922 s1, size_int (r - p1));
10923 return fold_convert (type, tem);
10924 }
10925
10926 if (p2[0] == '\0')
10927 /* strpbrk(x, "") == NULL.
10928 Evaluate and ignore s1 in case it had side-effects. */
10929 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10930
10931 if (p2[1] != '\0')
10932 return NULL_TREE; /* Really call strpbrk. */
10933
10934 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10935 if (!fn)
10936 return NULL_TREE;
10937
10938 /* New argument list transforming strpbrk(s1, s2) to
10939 strchr(s1, s2[0]). */
10940 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10941 }
10942 }
10943
10944 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10945 to the call.
10946
10947 Return NULL_TREE if no simplification was possible, otherwise return the
10948 simplified form of the call as a tree.
10949
10950 The simplified form may be a constant or other expression which
10951 computes the same value, but in a more efficient manner (including
10952 calls to other builtin functions).
10953
10954 The call may contain arguments which need to be evaluated, but
10955 which are not useful to determine the result of the call. In
10956 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10957 COMPOUND_EXPR will be an argument which must be evaluated.
10958 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10959 COMPOUND_EXPR in the chain will contain the tree for the simplified
10960 form of the builtin function call. */
10961
10962 static tree
10963 fold_builtin_strcat (tree dst, tree src)
10964 {
10965 if (!validate_arg (dst, POINTER_TYPE)
10966 || !validate_arg (src, POINTER_TYPE))
10967 return NULL_TREE;
10968 else
10969 {
10970 const char *p = c_getstr (src);
10971
10972 /* If the string length is zero, return the dst parameter. */
10973 if (p && *p == '\0')
10974 return dst;
10975
10976 return NULL_TREE;
10977 }
10978 }
10979
10980 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10981 arguments to the call.
10982
10983 Return NULL_TREE if no simplification was possible, otherwise return the
10984 simplified form of the call as a tree.
10985
10986 The simplified form may be a constant or other expression which
10987 computes the same value, but in a more efficient manner (including
10988 calls to other builtin functions).
10989
10990 The call may contain arguments which need to be evaluated, but
10991 which are not useful to determine the result of the call. In
10992 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10993 COMPOUND_EXPR will be an argument which must be evaluated.
10994 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10995 COMPOUND_EXPR in the chain will contain the tree for the simplified
10996 form of the builtin function call. */
10997
10998 static tree
10999 fold_builtin_strncat (tree dst, tree src, tree len)
11000 {
11001 if (!validate_arg (dst, POINTER_TYPE)
11002 || !validate_arg (src, POINTER_TYPE)
11003 || !validate_arg (len, INTEGER_TYPE))
11004 return NULL_TREE;
11005 else
11006 {
11007 const char *p = c_getstr (src);
11008
11009 /* If the requested length is zero, or the src parameter string
11010 length is zero, return the dst parameter. */
11011 if (integer_zerop (len) || (p && *p == '\0'))
11012 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11013
11014 /* If the requested len is greater than or equal to the string
11015 length, call strcat. */
11016 if (TREE_CODE (len) == INTEGER_CST && p
11017 && compare_tree_int (len, strlen (p)) >= 0)
11018 {
11019 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11020
11021 /* If the replacement _DECL isn't initialized, don't do the
11022 transformation. */
11023 if (!fn)
11024 return NULL_TREE;
11025
11026 return build_call_expr (fn, 2, dst, src);
11027 }
11028 return NULL_TREE;
11029 }
11030 }
11031
11032 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11033 to the call.
11034
11035 Return NULL_TREE if no simplification was possible, otherwise return the
11036 simplified form of the call as a tree.
11037
11038 The simplified form may be a constant or other expression which
11039 computes the same value, but in a more efficient manner (including
11040 calls to other builtin functions).
11041
11042 The call may contain arguments which need to be evaluated, but
11043 which are not useful to determine the result of the call. In
11044 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11045 COMPOUND_EXPR will be an argument which must be evaluated.
11046 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11047 COMPOUND_EXPR in the chain will contain the tree for the simplified
11048 form of the builtin function call. */
11049
11050 static tree
11051 fold_builtin_strspn (tree s1, tree s2)
11052 {
11053 if (!validate_arg (s1, POINTER_TYPE)
11054 || !validate_arg (s2, POINTER_TYPE))
11055 return NULL_TREE;
11056 else
11057 {
11058 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11059
11060 /* If both arguments are constants, evaluate at compile-time. */
11061 if (p1 && p2)
11062 {
11063 const size_t r = strspn (p1, p2);
11064 return size_int (r);
11065 }
11066
11067 /* If either argument is "", return NULL_TREE. */
11068 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11069 /* Evaluate and ignore both arguments in case either one has
11070 side-effects. */
11071 return omit_two_operands (integer_type_node, integer_zero_node,
11072 s1, s2);
11073 return NULL_TREE;
11074 }
11075 }
11076
11077 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11078 to the call.
11079
11080 Return NULL_TREE if no simplification was possible, otherwise return the
11081 simplified form of the call as a tree.
11082
11083 The simplified form may be a constant or other expression which
11084 computes the same value, but in a more efficient manner (including
11085 calls to other builtin functions).
11086
11087 The call may contain arguments which need to be evaluated, but
11088 which are not useful to determine the result of the call. In
11089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11090 COMPOUND_EXPR will be an argument which must be evaluated.
11091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11092 COMPOUND_EXPR in the chain will contain the tree for the simplified
11093 form of the builtin function call. */
11094
11095 static tree
11096 fold_builtin_strcspn (tree s1, tree s2)
11097 {
11098 if (!validate_arg (s1, POINTER_TYPE)
11099 || !validate_arg (s2, POINTER_TYPE))
11100 return NULL_TREE;
11101 else
11102 {
11103 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11104
11105 /* If both arguments are constants, evaluate at compile-time. */
11106 if (p1 && p2)
11107 {
11108 const size_t r = strcspn (p1, p2);
11109 return size_int (r);
11110 }
11111
11112 /* If the first argument is "", return NULL_TREE. */
11113 if (p1 && *p1 == '\0')
11114 {
11115 /* Evaluate and ignore argument s2 in case it has
11116 side-effects. */
11117 return omit_one_operand (integer_type_node,
11118 integer_zero_node, s2);
11119 }
11120
11121 /* If the second argument is "", return __builtin_strlen(s1). */
11122 if (p2 && *p2 == '\0')
11123 {
11124 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11125
11126 /* If the replacement _DECL isn't initialized, don't do the
11127 transformation. */
11128 if (!fn)
11129 return NULL_TREE;
11130
11131 return build_call_expr (fn, 1, s1);
11132 }
11133 return NULL_TREE;
11134 }
11135 }
11136
11137 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11138 to the call. IGNORE is true if the value returned
11139 by the builtin will be ignored. UNLOCKED is true is true if this
11140 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11141 the known length of the string. Return NULL_TREE if no simplification
11142 was possible. */
11143
11144 tree
11145 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11146 {
11147 /* If we're using an unlocked function, assume the other unlocked
11148 functions exist explicitly. */
11149 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11150 : implicit_built_in_decls[BUILT_IN_FPUTC];
11151 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11152 : implicit_built_in_decls[BUILT_IN_FWRITE];
11153
11154 /* If the return value is used, don't do the transformation. */
11155 if (!ignore)
11156 return NULL_TREE;
11157
11158 /* Verify the arguments in the original call. */
11159 if (!validate_arg (arg0, POINTER_TYPE)
11160 || !validate_arg (arg1, POINTER_TYPE))
11161 return NULL_TREE;
11162
11163 if (! len)
11164 len = c_strlen (arg0, 0);
11165
11166 /* Get the length of the string passed to fputs. If the length
11167 can't be determined, punt. */
11168 if (!len
11169 || TREE_CODE (len) != INTEGER_CST)
11170 return NULL_TREE;
11171
11172 switch (compare_tree_int (len, 1))
11173 {
11174 case -1: /* length is 0, delete the call entirely . */
11175 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11176
11177 case 0: /* length is 1, call fputc. */
11178 {
11179 const char *p = c_getstr (arg0);
11180
11181 if (p != NULL)
11182 {
11183 if (fn_fputc)
11184 return build_call_expr (fn_fputc, 2,
11185 build_int_cst (NULL_TREE, p[0]), arg1);
11186 else
11187 return NULL_TREE;
11188 }
11189 }
11190 /* FALLTHROUGH */
11191 case 1: /* length is greater than 1, call fwrite. */
11192 {
11193 /* If optimizing for size keep fputs. */
11194 if (optimize_size)
11195 return NULL_TREE;
11196 /* New argument list transforming fputs(string, stream) to
11197 fwrite(string, 1, len, stream). */
11198 if (fn_fwrite)
11199 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11200 else
11201 return NULL_TREE;
11202 }
11203 default:
11204 gcc_unreachable ();
11205 }
11206 return NULL_TREE;
11207 }
11208
11209 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11210 produced. False otherwise. This is done so that we don't output the error
11211 or warning twice or three times. */
11212 bool
11213 fold_builtin_next_arg (tree exp, bool va_start_p)
11214 {
11215 tree fntype = TREE_TYPE (current_function_decl);
11216 int nargs = call_expr_nargs (exp);
11217 tree arg;
11218
11219 if (TYPE_ARG_TYPES (fntype) == 0
11220 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11221 == void_type_node))
11222 {
11223 error ("%<va_start%> used in function with fixed args");
11224 return true;
11225 }
11226
11227 if (va_start_p)
11228 {
11229 if (va_start_p && (nargs != 2))
11230 {
11231 error ("wrong number of arguments to function %<va_start%>");
11232 return true;
11233 }
11234 arg = CALL_EXPR_ARG (exp, 1);
11235 }
11236 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11237 when we checked the arguments and if needed issued a warning. */
11238 else
11239 {
11240 if (nargs == 0)
11241 {
11242 /* Evidently an out of date version of <stdarg.h>; can't validate
11243 va_start's second argument, but can still work as intended. */
11244 warning (0, "%<__builtin_next_arg%> called without an argument");
11245 return true;
11246 }
11247 else if (nargs > 1)
11248 {
11249 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11250 return true;
11251 }
11252 arg = CALL_EXPR_ARG (exp, 0);
11253 }
11254
11255 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11256 or __builtin_next_arg (0) the first time we see it, after checking
11257 the arguments and if needed issuing a warning. */
11258 if (!integer_zerop (arg))
11259 {
11260 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11261
11262 /* Strip off all nops for the sake of the comparison. This
11263 is not quite the same as STRIP_NOPS. It does more.
11264 We must also strip off INDIRECT_EXPR for C++ reference
11265 parameters. */
11266 while (TREE_CODE (arg) == NOP_EXPR
11267 || TREE_CODE (arg) == CONVERT_EXPR
11268 || TREE_CODE (arg) == NON_LVALUE_EXPR
11269 || TREE_CODE (arg) == INDIRECT_REF)
11270 arg = TREE_OPERAND (arg, 0);
11271 if (arg != last_parm)
11272 {
11273 /* FIXME: Sometimes with the tree optimizers we can get the
11274 not the last argument even though the user used the last
11275 argument. We just warn and set the arg to be the last
11276 argument so that we will get wrong-code because of
11277 it. */
11278 warning (0, "second parameter of %<va_start%> not last named argument");
11279 }
11280 /* We want to verify the second parameter just once before the tree
11281 optimizers are run and then avoid keeping it in the tree,
11282 as otherwise we could warn even for correct code like:
11283 void foo (int i, ...)
11284 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11285 if (va_start_p)
11286 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11287 else
11288 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11289 }
11290 return false;
11291 }
11292
11293
11294 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11295 ORIG may be null if this is a 2-argument call. We don't attempt to
11296 simplify calls with more than 3 arguments.
11297
11298 Return NULL_TREE if no simplification was possible, otherwise return the
11299 simplified form of the call as a tree. If IGNORED is true, it means that
11300 the caller does not use the returned value of the function. */
11301
11302 static tree
11303 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11304 {
11305 tree call, retval;
11306 const char *fmt_str = NULL;
11307
11308 /* Verify the required arguments in the original call. We deal with two
11309 types of sprintf() calls: 'sprintf (str, fmt)' and
11310 'sprintf (dest, "%s", orig)'. */
11311 if (!validate_arg (dest, POINTER_TYPE)
11312 || !validate_arg (fmt, POINTER_TYPE))
11313 return NULL_TREE;
11314 if (orig && !validate_arg (orig, POINTER_TYPE))
11315 return NULL_TREE;
11316
11317 /* Check whether the format is a literal string constant. */
11318 fmt_str = c_getstr (fmt);
11319 if (fmt_str == NULL)
11320 return NULL_TREE;
11321
11322 call = NULL_TREE;
11323 retval = NULL_TREE;
11324
11325 if (!init_target_chars ())
11326 return NULL_TREE;
11327
11328 /* If the format doesn't contain % args or %%, use strcpy. */
11329 if (strchr (fmt_str, target_percent) == NULL)
11330 {
11331 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11332
11333 if (!fn)
11334 return NULL_TREE;
11335
11336 /* Don't optimize sprintf (buf, "abc", ptr++). */
11337 if (orig)
11338 return NULL_TREE;
11339
11340 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11341 'format' is known to contain no % formats. */
11342 call = build_call_expr (fn, 2, dest, fmt);
11343 if (!ignored)
11344 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11345 }
11346
11347 /* If the format is "%s", use strcpy if the result isn't used. */
11348 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11349 {
11350 tree fn;
11351 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11352
11353 if (!fn)
11354 return NULL_TREE;
11355
11356 /* Don't crash on sprintf (str1, "%s"). */
11357 if (!orig)
11358 return NULL_TREE;
11359
11360 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11361 if (!ignored)
11362 {
11363 retval = c_strlen (orig, 1);
11364 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11365 return NULL_TREE;
11366 }
11367 call = build_call_expr (fn, 2, dest, orig);
11368 }
11369
11370 if (call && retval)
11371 {
11372 retval = fold_convert
11373 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11374 retval);
11375 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11376 }
11377 else
11378 return call;
11379 }
11380
11381 /* Expand a call EXP to __builtin_object_size. */
11382
11383 rtx
11384 expand_builtin_object_size (tree exp)
11385 {
11386 tree ost;
11387 int object_size_type;
11388 tree fndecl = get_callee_fndecl (exp);
11389 location_t locus = EXPR_LOCATION (exp);
11390
11391 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11392 {
11393 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11394 &locus, fndecl);
11395 expand_builtin_trap ();
11396 return const0_rtx;
11397 }
11398
11399 ost = CALL_EXPR_ARG (exp, 1);
11400 STRIP_NOPS (ost);
11401
11402 if (TREE_CODE (ost) != INTEGER_CST
11403 || tree_int_cst_sgn (ost) < 0
11404 || compare_tree_int (ost, 3) > 0)
11405 {
11406 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11407 &locus, fndecl);
11408 expand_builtin_trap ();
11409 return const0_rtx;
11410 }
11411
11412 object_size_type = tree_low_cst (ost, 0);
11413
11414 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11415 }
11416
11417 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11418 FCODE is the BUILT_IN_* to use.
11419 Return NULL_RTX if we failed; the caller should emit a normal call,
11420 otherwise try to get the result in TARGET, if convenient (and in
11421 mode MODE if that's convenient). */
11422
11423 static rtx
11424 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11425 enum built_in_function fcode)
11426 {
11427 tree dest, src, len, size;
11428
11429 if (!validate_arglist (exp,
11430 POINTER_TYPE,
11431 fcode == BUILT_IN_MEMSET_CHK
11432 ? INTEGER_TYPE : POINTER_TYPE,
11433 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11434 return NULL_RTX;
11435
11436 dest = CALL_EXPR_ARG (exp, 0);
11437 src = CALL_EXPR_ARG (exp, 1);
11438 len = CALL_EXPR_ARG (exp, 2);
11439 size = CALL_EXPR_ARG (exp, 3);
11440
11441 if (! host_integerp (size, 1))
11442 return NULL_RTX;
11443
11444 if (host_integerp (len, 1) || integer_all_onesp (size))
11445 {
11446 tree fn;
11447
11448 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11449 {
11450 location_t locus = EXPR_LOCATION (exp);
11451 warning (0, "%Hcall to %D will always overflow destination buffer",
11452 &locus, get_callee_fndecl (exp));
11453 return NULL_RTX;
11454 }
11455
11456 fn = NULL_TREE;
11457 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11458 mem{cpy,pcpy,move,set} is available. */
11459 switch (fcode)
11460 {
11461 case BUILT_IN_MEMCPY_CHK:
11462 fn = built_in_decls[BUILT_IN_MEMCPY];
11463 break;
11464 case BUILT_IN_MEMPCPY_CHK:
11465 fn = built_in_decls[BUILT_IN_MEMPCPY];
11466 break;
11467 case BUILT_IN_MEMMOVE_CHK:
11468 fn = built_in_decls[BUILT_IN_MEMMOVE];
11469 break;
11470 case BUILT_IN_MEMSET_CHK:
11471 fn = built_in_decls[BUILT_IN_MEMSET];
11472 break;
11473 default:
11474 break;
11475 }
11476
11477 if (! fn)
11478 return NULL_RTX;
11479
11480 fn = build_call_expr (fn, 3, dest, src, len);
11481 if (TREE_CODE (fn) == CALL_EXPR)
11482 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11483 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11484 }
11485 else if (fcode == BUILT_IN_MEMSET_CHK)
11486 return NULL_RTX;
11487 else
11488 {
11489 unsigned int dest_align
11490 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11491
11492 /* If DEST is not a pointer type, call the normal function. */
11493 if (dest_align == 0)
11494 return NULL_RTX;
11495
11496 /* If SRC and DEST are the same (and not volatile), do nothing. */
11497 if (operand_equal_p (src, dest, 0))
11498 {
11499 tree expr;
11500
11501 if (fcode != BUILT_IN_MEMPCPY_CHK)
11502 {
11503 /* Evaluate and ignore LEN in case it has side-effects. */
11504 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11505 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11506 }
11507
11508 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11509 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11510 }
11511
11512 /* __memmove_chk special case. */
11513 if (fcode == BUILT_IN_MEMMOVE_CHK)
11514 {
11515 unsigned int src_align
11516 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11517
11518 if (src_align == 0)
11519 return NULL_RTX;
11520
11521 /* If src is categorized for a readonly section we can use
11522 normal __memcpy_chk. */
11523 if (readonly_data_expr (src))
11524 {
11525 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11526 if (!fn)
11527 return NULL_RTX;
11528 fn = build_call_expr (fn, 4, dest, src, len, size);
11529 if (TREE_CODE (fn) == CALL_EXPR)
11530 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11531 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11532 }
11533 }
11534 return NULL_RTX;
11535 }
11536 }
11537
11538 /* Emit warning if a buffer overflow is detected at compile time. */
11539
11540 static void
11541 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11542 {
11543 int is_strlen = 0;
11544 tree len, size;
11545 location_t locus;
11546
11547 switch (fcode)
11548 {
11549 case BUILT_IN_STRCPY_CHK:
11550 case BUILT_IN_STPCPY_CHK:
11551 /* For __strcat_chk the warning will be emitted only if overflowing
11552 by at least strlen (dest) + 1 bytes. */
11553 case BUILT_IN_STRCAT_CHK:
11554 len = CALL_EXPR_ARG (exp, 1);
11555 size = CALL_EXPR_ARG (exp, 2);
11556 is_strlen = 1;
11557 break;
11558 case BUILT_IN_STRNCAT_CHK:
11559 case BUILT_IN_STRNCPY_CHK:
11560 len = CALL_EXPR_ARG (exp, 2);
11561 size = CALL_EXPR_ARG (exp, 3);
11562 break;
11563 case BUILT_IN_SNPRINTF_CHK:
11564 case BUILT_IN_VSNPRINTF_CHK:
11565 len = CALL_EXPR_ARG (exp, 1);
11566 size = CALL_EXPR_ARG (exp, 3);
11567 break;
11568 default:
11569 gcc_unreachable ();
11570 }
11571
11572 if (!len || !size)
11573 return;
11574
11575 if (! host_integerp (size, 1) || integer_all_onesp (size))
11576 return;
11577
11578 if (is_strlen)
11579 {
11580 len = c_strlen (len, 1);
11581 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11582 return;
11583 }
11584 else if (fcode == BUILT_IN_STRNCAT_CHK)
11585 {
11586 tree src = CALL_EXPR_ARG (exp, 1);
11587 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11588 return;
11589 src = c_strlen (src, 1);
11590 if (! src || ! host_integerp (src, 1))
11591 {
11592 locus = EXPR_LOCATION (exp);
11593 warning (0, "%Hcall to %D might overflow destination buffer",
11594 &locus, get_callee_fndecl (exp));
11595 return;
11596 }
11597 else if (tree_int_cst_lt (src, size))
11598 return;
11599 }
11600 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11601 return;
11602
11603 locus = EXPR_LOCATION (exp);
11604 warning (0, "%Hcall to %D will always overflow destination buffer",
11605 &locus, get_callee_fndecl (exp));
11606 }
11607
11608 /* Emit warning if a buffer overflow is detected at compile time
11609 in __sprintf_chk/__vsprintf_chk calls. */
11610
11611 static void
11612 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11613 {
11614 tree dest, size, len, fmt, flag;
11615 const char *fmt_str;
11616 int nargs = call_expr_nargs (exp);
11617
11618 /* Verify the required arguments in the original call. */
11619
11620 if (nargs < 4)
11621 return;
11622 dest = CALL_EXPR_ARG (exp, 0);
11623 flag = CALL_EXPR_ARG (exp, 1);
11624 size = CALL_EXPR_ARG (exp, 2);
11625 fmt = CALL_EXPR_ARG (exp, 3);
11626
11627 if (! host_integerp (size, 1) || integer_all_onesp (size))
11628 return;
11629
11630 /* Check whether the format is a literal string constant. */
11631 fmt_str = c_getstr (fmt);
11632 if (fmt_str == NULL)
11633 return;
11634
11635 if (!init_target_chars ())
11636 return;
11637
11638 /* If the format doesn't contain % args or %%, we know its size. */
11639 if (strchr (fmt_str, target_percent) == 0)
11640 len = build_int_cstu (size_type_node, strlen (fmt_str));
11641 /* If the format is "%s" and first ... argument is a string literal,
11642 we know it too. */
11643 else if (fcode == BUILT_IN_SPRINTF_CHK
11644 && strcmp (fmt_str, target_percent_s) == 0)
11645 {
11646 tree arg;
11647
11648 if (nargs < 5)
11649 return;
11650 arg = CALL_EXPR_ARG (exp, 4);
11651 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11652 return;
11653
11654 len = c_strlen (arg, 1);
11655 if (!len || ! host_integerp (len, 1))
11656 return;
11657 }
11658 else
11659 return;
11660
11661 if (! tree_int_cst_lt (len, size))
11662 {
11663 location_t locus = EXPR_LOCATION (exp);
11664 warning (0, "%Hcall to %D will always overflow destination buffer",
11665 &locus, get_callee_fndecl (exp));
11666 }
11667 }
11668
11669 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11670 if possible. */
11671
11672 tree
11673 fold_builtin_object_size (tree ptr, tree ost)
11674 {
11675 tree ret = NULL_TREE;
11676 int object_size_type;
11677
11678 if (!validate_arg (ptr, POINTER_TYPE)
11679 || !validate_arg (ost, INTEGER_TYPE))
11680 return NULL_TREE;
11681
11682 STRIP_NOPS (ost);
11683
11684 if (TREE_CODE (ost) != INTEGER_CST
11685 || tree_int_cst_sgn (ost) < 0
11686 || compare_tree_int (ost, 3) > 0)
11687 return NULL_TREE;
11688
11689 object_size_type = tree_low_cst (ost, 0);
11690
11691 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11692 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11693 and (size_t) 0 for types 2 and 3. */
11694 if (TREE_SIDE_EFFECTS (ptr))
11695 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11696
11697 if (TREE_CODE (ptr) == ADDR_EXPR)
11698 ret = build_int_cstu (size_type_node,
11699 compute_builtin_object_size (ptr, object_size_type));
11700
11701 else if (TREE_CODE (ptr) == SSA_NAME)
11702 {
11703 unsigned HOST_WIDE_INT bytes;
11704
11705 /* If object size is not known yet, delay folding until
11706 later. Maybe subsequent passes will help determining
11707 it. */
11708 bytes = compute_builtin_object_size (ptr, object_size_type);
11709 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11710 ? -1 : 0))
11711 ret = build_int_cstu (size_type_node, bytes);
11712 }
11713
11714 if (ret)
11715 {
11716 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11717 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11718 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11719 ret = NULL_TREE;
11720 }
11721
11722 return ret;
11723 }
11724
11725 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11726 DEST, SRC, LEN, and SIZE are the arguments to the call.
11727 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11728 code of the builtin. If MAXLEN is not NULL, it is maximum length
11729 passed as third argument. */
11730
11731 tree
11732 fold_builtin_memory_chk (tree fndecl,
11733 tree dest, tree src, tree len, tree size,
11734 tree maxlen, bool ignore,
11735 enum built_in_function fcode)
11736 {
11737 tree fn;
11738
11739 if (!validate_arg (dest, POINTER_TYPE)
11740 || !validate_arg (src,
11741 (fcode == BUILT_IN_MEMSET_CHK
11742 ? INTEGER_TYPE : POINTER_TYPE))
11743 || !validate_arg (len, INTEGER_TYPE)
11744 || !validate_arg (size, INTEGER_TYPE))
11745 return NULL_TREE;
11746
11747 /* If SRC and DEST are the same (and not volatile), return DEST
11748 (resp. DEST+LEN for __mempcpy_chk). */
11749 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11750 {
11751 if (fcode != BUILT_IN_MEMPCPY_CHK)
11752 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11753 else
11754 {
11755 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11756 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11757 }
11758 }
11759
11760 if (! host_integerp (size, 1))
11761 return NULL_TREE;
11762
11763 if (! integer_all_onesp (size))
11764 {
11765 if (! host_integerp (len, 1))
11766 {
11767 /* If LEN is not constant, try MAXLEN too.
11768 For MAXLEN only allow optimizing into non-_ocs function
11769 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11770 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11771 {
11772 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11773 {
11774 /* (void) __mempcpy_chk () can be optimized into
11775 (void) __memcpy_chk (). */
11776 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11777 if (!fn)
11778 return NULL_TREE;
11779
11780 return build_call_expr (fn, 4, dest, src, len, size);
11781 }
11782 return NULL_TREE;
11783 }
11784 }
11785 else
11786 maxlen = len;
11787
11788 if (tree_int_cst_lt (size, maxlen))
11789 return NULL_TREE;
11790 }
11791
11792 fn = NULL_TREE;
11793 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11794 mem{cpy,pcpy,move,set} is available. */
11795 switch (fcode)
11796 {
11797 case BUILT_IN_MEMCPY_CHK:
11798 fn = built_in_decls[BUILT_IN_MEMCPY];
11799 break;
11800 case BUILT_IN_MEMPCPY_CHK:
11801 fn = built_in_decls[BUILT_IN_MEMPCPY];
11802 break;
11803 case BUILT_IN_MEMMOVE_CHK:
11804 fn = built_in_decls[BUILT_IN_MEMMOVE];
11805 break;
11806 case BUILT_IN_MEMSET_CHK:
11807 fn = built_in_decls[BUILT_IN_MEMSET];
11808 break;
11809 default:
11810 break;
11811 }
11812
11813 if (!fn)
11814 return NULL_TREE;
11815
11816 return build_call_expr (fn, 3, dest, src, len);
11817 }
11818
11819 /* Fold a call to the __st[rp]cpy_chk builtin.
11820 DEST, SRC, and SIZE are the arguments to the call.
11821 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11822 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11823 strings passed as second argument. */
11824
11825 tree
11826 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11827 tree maxlen, bool ignore,
11828 enum built_in_function fcode)
11829 {
11830 tree len, fn;
11831
11832 if (!validate_arg (dest, POINTER_TYPE)
11833 || !validate_arg (src, POINTER_TYPE)
11834 || !validate_arg (size, INTEGER_TYPE))
11835 return NULL_TREE;
11836
11837 /* If SRC and DEST are the same (and not volatile), return DEST. */
11838 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11839 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11840
11841 if (! host_integerp (size, 1))
11842 return NULL_TREE;
11843
11844 if (! integer_all_onesp (size))
11845 {
11846 len = c_strlen (src, 1);
11847 if (! len || ! host_integerp (len, 1))
11848 {
11849 /* If LEN is not constant, try MAXLEN too.
11850 For MAXLEN only allow optimizing into non-_ocs function
11851 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11852 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11853 {
11854 if (fcode == BUILT_IN_STPCPY_CHK)
11855 {
11856 if (! ignore)
11857 return NULL_TREE;
11858
11859 /* If return value of __stpcpy_chk is ignored,
11860 optimize into __strcpy_chk. */
11861 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11862 if (!fn)
11863 return NULL_TREE;
11864
11865 return build_call_expr (fn, 3, dest, src, size);
11866 }
11867
11868 if (! len || TREE_SIDE_EFFECTS (len))
11869 return NULL_TREE;
11870
11871 /* If c_strlen returned something, but not a constant,
11872 transform __strcpy_chk into __memcpy_chk. */
11873 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11874 if (!fn)
11875 return NULL_TREE;
11876
11877 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11878 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11879 build_call_expr (fn, 4,
11880 dest, src, len, size));
11881 }
11882 }
11883 else
11884 maxlen = len;
11885
11886 if (! tree_int_cst_lt (maxlen, size))
11887 return NULL_TREE;
11888 }
11889
11890 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11891 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11892 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11893 if (!fn)
11894 return NULL_TREE;
11895
11896 return build_call_expr (fn, 2, dest, src);
11897 }
11898
11899 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11900 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11901 length passed as third argument. */
11902
11903 tree
11904 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11905 tree maxlen)
11906 {
11907 tree fn;
11908
11909 if (!validate_arg (dest, POINTER_TYPE)
11910 || !validate_arg (src, POINTER_TYPE)
11911 || !validate_arg (len, INTEGER_TYPE)
11912 || !validate_arg (size, INTEGER_TYPE))
11913 return NULL_TREE;
11914
11915 if (! host_integerp (size, 1))
11916 return NULL_TREE;
11917
11918 if (! integer_all_onesp (size))
11919 {
11920 if (! host_integerp (len, 1))
11921 {
11922 /* If LEN is not constant, try MAXLEN too.
11923 For MAXLEN only allow optimizing into non-_ocs function
11924 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11925 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11926 return NULL_TREE;
11927 }
11928 else
11929 maxlen = len;
11930
11931 if (tree_int_cst_lt (size, maxlen))
11932 return NULL_TREE;
11933 }
11934
11935 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11936 fn = built_in_decls[BUILT_IN_STRNCPY];
11937 if (!fn)
11938 return NULL_TREE;
11939
11940 return build_call_expr (fn, 3, dest, src, len);
11941 }
11942
11943 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11944 are the arguments to the call. */
11945
11946 static tree
11947 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11948 {
11949 tree fn;
11950 const char *p;
11951
11952 if (!validate_arg (dest, POINTER_TYPE)
11953 || !validate_arg (src, POINTER_TYPE)
11954 || !validate_arg (size, INTEGER_TYPE))
11955 return NULL_TREE;
11956
11957 p = c_getstr (src);
11958 /* If the SRC parameter is "", return DEST. */
11959 if (p && *p == '\0')
11960 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11961
11962 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11963 return NULL_TREE;
11964
11965 /* If __builtin_strcat_chk is used, assume strcat is available. */
11966 fn = built_in_decls[BUILT_IN_STRCAT];
11967 if (!fn)
11968 return NULL_TREE;
11969
11970 return build_call_expr (fn, 2, dest, src);
11971 }
11972
11973 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11974 LEN, and SIZE. */
11975
11976 static tree
11977 fold_builtin_strncat_chk (tree fndecl,
11978 tree dest, tree src, tree len, tree size)
11979 {
11980 tree fn;
11981 const char *p;
11982
11983 if (!validate_arg (dest, POINTER_TYPE)
11984 || !validate_arg (src, POINTER_TYPE)
11985 || !validate_arg (size, INTEGER_TYPE)
11986 || !validate_arg (size, INTEGER_TYPE))
11987 return NULL_TREE;
11988
11989 p = c_getstr (src);
11990 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11991 if (p && *p == '\0')
11992 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11993 else if (integer_zerop (len))
11994 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11995
11996 if (! host_integerp (size, 1))
11997 return NULL_TREE;
11998
11999 if (! integer_all_onesp (size))
12000 {
12001 tree src_len = c_strlen (src, 1);
12002 if (src_len
12003 && host_integerp (src_len, 1)
12004 && host_integerp (len, 1)
12005 && ! tree_int_cst_lt (len, src_len))
12006 {
12007 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12008 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12009 if (!fn)
12010 return NULL_TREE;
12011
12012 return build_call_expr (fn, 3, dest, src, size);
12013 }
12014 return NULL_TREE;
12015 }
12016
12017 /* If __builtin_strncat_chk is used, assume strncat is available. */
12018 fn = built_in_decls[BUILT_IN_STRNCAT];
12019 if (!fn)
12020 return NULL_TREE;
12021
12022 return build_call_expr (fn, 3, dest, src, len);
12023 }
12024
12025 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12026 a normal call should be emitted rather than expanding the function
12027 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12028
12029 static tree
12030 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12031 {
12032 tree dest, size, len, fn, fmt, flag;
12033 const char *fmt_str;
12034 int nargs = call_expr_nargs (exp);
12035
12036 /* Verify the required arguments in the original call. */
12037 if (nargs < 4)
12038 return NULL_TREE;
12039 dest = CALL_EXPR_ARG (exp, 0);
12040 if (!validate_arg (dest, POINTER_TYPE))
12041 return NULL_TREE;
12042 flag = CALL_EXPR_ARG (exp, 1);
12043 if (!validate_arg (flag, INTEGER_TYPE))
12044 return NULL_TREE;
12045 size = CALL_EXPR_ARG (exp, 2);
12046 if (!validate_arg (size, INTEGER_TYPE))
12047 return NULL_TREE;
12048 fmt = CALL_EXPR_ARG (exp, 3);
12049 if (!validate_arg (fmt, POINTER_TYPE))
12050 return NULL_TREE;
12051
12052 if (! host_integerp (size, 1))
12053 return NULL_TREE;
12054
12055 len = NULL_TREE;
12056
12057 if (!init_target_chars ())
12058 return NULL_TREE;
12059
12060 /* Check whether the format is a literal string constant. */
12061 fmt_str = c_getstr (fmt);
12062 if (fmt_str != NULL)
12063 {
12064 /* If the format doesn't contain % args or %%, we know the size. */
12065 if (strchr (fmt_str, target_percent) == 0)
12066 {
12067 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12068 len = build_int_cstu (size_type_node, strlen (fmt_str));
12069 }
12070 /* If the format is "%s" and first ... argument is a string literal,
12071 we know the size too. */
12072 else if (fcode == BUILT_IN_SPRINTF_CHK
12073 && strcmp (fmt_str, target_percent_s) == 0)
12074 {
12075 tree arg;
12076
12077 if (nargs == 5)
12078 {
12079 arg = CALL_EXPR_ARG (exp, 4);
12080 if (validate_arg (arg, POINTER_TYPE))
12081 {
12082 len = c_strlen (arg, 1);
12083 if (! len || ! host_integerp (len, 1))
12084 len = NULL_TREE;
12085 }
12086 }
12087 }
12088 }
12089
12090 if (! integer_all_onesp (size))
12091 {
12092 if (! len || ! tree_int_cst_lt (len, size))
12093 return NULL_TREE;
12094 }
12095
12096 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12097 or if format doesn't contain % chars or is "%s". */
12098 if (! integer_zerop (flag))
12099 {
12100 if (fmt_str == NULL)
12101 return NULL_TREE;
12102 if (strchr (fmt_str, target_percent) != NULL
12103 && strcmp (fmt_str, target_percent_s))
12104 return NULL_TREE;
12105 }
12106
12107 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12108 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12109 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12110 if (!fn)
12111 return NULL_TREE;
12112
12113 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12114 }
12115
12116 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12117 a normal call should be emitted rather than expanding the function
12118 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12119 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12120 passed as second argument. */
12121
12122 tree
12123 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12124 enum built_in_function fcode)
12125 {
12126 tree dest, size, len, fn, fmt, flag;
12127 const char *fmt_str;
12128
12129 /* Verify the required arguments in the original call. */
12130 if (call_expr_nargs (exp) < 5)
12131 return NULL_TREE;
12132 dest = CALL_EXPR_ARG (exp, 0);
12133 if (!validate_arg (dest, POINTER_TYPE))
12134 return NULL_TREE;
12135 len = CALL_EXPR_ARG (exp, 1);
12136 if (!validate_arg (len, INTEGER_TYPE))
12137 return NULL_TREE;
12138 flag = CALL_EXPR_ARG (exp, 2);
12139 if (!validate_arg (flag, INTEGER_TYPE))
12140 return NULL_TREE;
12141 size = CALL_EXPR_ARG (exp, 3);
12142 if (!validate_arg (size, INTEGER_TYPE))
12143 return NULL_TREE;
12144 fmt = CALL_EXPR_ARG (exp, 4);
12145 if (!validate_arg (fmt, POINTER_TYPE))
12146 return NULL_TREE;
12147
12148 if (! host_integerp (size, 1))
12149 return NULL_TREE;
12150
12151 if (! integer_all_onesp (size))
12152 {
12153 if (! host_integerp (len, 1))
12154 {
12155 /* If LEN is not constant, try MAXLEN too.
12156 For MAXLEN only allow optimizing into non-_ocs function
12157 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12158 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12159 return NULL_TREE;
12160 }
12161 else
12162 maxlen = len;
12163
12164 if (tree_int_cst_lt (size, maxlen))
12165 return NULL_TREE;
12166 }
12167
12168 if (!init_target_chars ())
12169 return NULL_TREE;
12170
12171 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12172 or if format doesn't contain % chars or is "%s". */
12173 if (! integer_zerop (flag))
12174 {
12175 fmt_str = c_getstr (fmt);
12176 if (fmt_str == NULL)
12177 return NULL_TREE;
12178 if (strchr (fmt_str, target_percent) != NULL
12179 && strcmp (fmt_str, target_percent_s))
12180 return NULL_TREE;
12181 }
12182
12183 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12184 available. */
12185 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12186 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12187 if (!fn)
12188 return NULL_TREE;
12189
12190 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12191 }
12192
12193 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12194 FMT and ARG are the arguments to the call; we don't fold cases with
12195 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12196
12197 Return NULL_TREE if no simplification was possible, otherwise return the
12198 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12199 code of the function to be simplified. */
12200
12201 static tree
12202 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12203 enum built_in_function fcode)
12204 {
12205 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12206 const char *fmt_str = NULL;
12207
12208 /* If the return value is used, don't do the transformation. */
12209 if (! ignore)
12210 return NULL_TREE;
12211
12212 /* Verify the required arguments in the original call. */
12213 if (!validate_arg (fmt, POINTER_TYPE))
12214 return NULL_TREE;
12215
12216 /* Check whether the format is a literal string constant. */
12217 fmt_str = c_getstr (fmt);
12218 if (fmt_str == NULL)
12219 return NULL_TREE;
12220
12221 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12222 {
12223 /* If we're using an unlocked function, assume the other
12224 unlocked functions exist explicitly. */
12225 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12226 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12227 }
12228 else
12229 {
12230 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12231 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12232 }
12233
12234 if (!init_target_chars ())
12235 return NULL_TREE;
12236
12237 if (strcmp (fmt_str, target_percent_s) == 0
12238 || strchr (fmt_str, target_percent) == NULL)
12239 {
12240 const char *str;
12241
12242 if (strcmp (fmt_str, target_percent_s) == 0)
12243 {
12244 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12245 return NULL_TREE;
12246
12247 if (!arg || !validate_arg (arg, POINTER_TYPE))
12248 return NULL_TREE;
12249
12250 str = c_getstr (arg);
12251 if (str == NULL)
12252 return NULL_TREE;
12253 }
12254 else
12255 {
12256 /* The format specifier doesn't contain any '%' characters. */
12257 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12258 && arg)
12259 return NULL_TREE;
12260 str = fmt_str;
12261 }
12262
12263 /* If the string was "", printf does nothing. */
12264 if (str[0] == '\0')
12265 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12266
12267 /* If the string has length of 1, call putchar. */
12268 if (str[1] == '\0')
12269 {
12270 /* Given printf("c"), (where c is any one character,)
12271 convert "c"[0] to an int and pass that to the replacement
12272 function. */
12273 newarg = build_int_cst (NULL_TREE, str[0]);
12274 if (fn_putchar)
12275 call = build_call_expr (fn_putchar, 1, newarg);
12276 }
12277 else
12278 {
12279 /* If the string was "string\n", call puts("string"). */
12280 size_t len = strlen (str);
12281 if ((unsigned char)str[len - 1] == target_newline)
12282 {
12283 /* Create a NUL-terminated string that's one char shorter
12284 than the original, stripping off the trailing '\n'. */
12285 char *newstr = alloca (len);
12286 memcpy (newstr, str, len - 1);
12287 newstr[len - 1] = 0;
12288
12289 newarg = build_string_literal (len, newstr);
12290 if (fn_puts)
12291 call = build_call_expr (fn_puts, 1, newarg);
12292 }
12293 else
12294 /* We'd like to arrange to call fputs(string,stdout) here,
12295 but we need stdout and don't have a way to get it yet. */
12296 return NULL_TREE;
12297 }
12298 }
12299
12300 /* The other optimizations can be done only on the non-va_list variants. */
12301 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12302 return NULL_TREE;
12303
12304 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12305 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12306 {
12307 if (!arg || !validate_arg (arg, POINTER_TYPE))
12308 return NULL_TREE;
12309 if (fn_puts)
12310 call = build_call_expr (fn_puts, 1, arg);
12311 }
12312
12313 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12314 else if (strcmp (fmt_str, target_percent_c) == 0)
12315 {
12316 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12317 return NULL_TREE;
12318 if (fn_putchar)
12319 call = build_call_expr (fn_putchar, 1, arg);
12320 }
12321
12322 if (!call)
12323 return NULL_TREE;
12324
12325 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12326 }
12327
12328 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12329 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12330 more than 3 arguments, and ARG may be null in the 2-argument case.
12331
12332 Return NULL_TREE if no simplification was possible, otherwise return the
12333 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12334 code of the function to be simplified. */
12335
12336 static tree
12337 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12338 enum built_in_function fcode)
12339 {
12340 tree fn_fputc, fn_fputs, call = NULL_TREE;
12341 const char *fmt_str = NULL;
12342
12343 /* If the return value is used, don't do the transformation. */
12344 if (! ignore)
12345 return NULL_TREE;
12346
12347 /* Verify the required arguments in the original call. */
12348 if (!validate_arg (fp, POINTER_TYPE))
12349 return NULL_TREE;
12350 if (!validate_arg (fmt, POINTER_TYPE))
12351 return NULL_TREE;
12352
12353 /* Check whether the format is a literal string constant. */
12354 fmt_str = c_getstr (fmt);
12355 if (fmt_str == NULL)
12356 return NULL_TREE;
12357
12358 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12359 {
12360 /* If we're using an unlocked function, assume the other
12361 unlocked functions exist explicitly. */
12362 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12363 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12364 }
12365 else
12366 {
12367 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12368 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12369 }
12370
12371 if (!init_target_chars ())
12372 return NULL_TREE;
12373
12374 /* If the format doesn't contain % args or %%, use strcpy. */
12375 if (strchr (fmt_str, target_percent) == NULL)
12376 {
12377 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12378 && arg)
12379 return NULL_TREE;
12380
12381 /* If the format specifier was "", fprintf does nothing. */
12382 if (fmt_str[0] == '\0')
12383 {
12384 /* If FP has side-effects, just wait until gimplification is
12385 done. */
12386 if (TREE_SIDE_EFFECTS (fp))
12387 return NULL_TREE;
12388
12389 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12390 }
12391
12392 /* When "string" doesn't contain %, replace all cases of
12393 fprintf (fp, string) with fputs (string, fp). The fputs
12394 builtin will take care of special cases like length == 1. */
12395 if (fn_fputs)
12396 call = build_call_expr (fn_fputs, 2, fmt, fp);
12397 }
12398
12399 /* The other optimizations can be done only on the non-va_list variants. */
12400 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12401 return NULL_TREE;
12402
12403 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12404 else if (strcmp (fmt_str, target_percent_s) == 0)
12405 {
12406 if (!arg || !validate_arg (arg, POINTER_TYPE))
12407 return NULL_TREE;
12408 if (fn_fputs)
12409 call = build_call_expr (fn_fputs, 2, arg, fp);
12410 }
12411
12412 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12413 else if (strcmp (fmt_str, target_percent_c) == 0)
12414 {
12415 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12416 return NULL_TREE;
12417 if (fn_fputc)
12418 call = build_call_expr (fn_fputc, 2, arg, fp);
12419 }
12420
12421 if (!call)
12422 return NULL_TREE;
12423 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12424 }
12425
12426 /* Initialize format string characters in the target charset. */
12427
12428 static bool
12429 init_target_chars (void)
12430 {
12431 static bool init;
12432 if (!init)
12433 {
12434 target_newline = lang_hooks.to_target_charset ('\n');
12435 target_percent = lang_hooks.to_target_charset ('%');
12436 target_c = lang_hooks.to_target_charset ('c');
12437 target_s = lang_hooks.to_target_charset ('s');
12438 if (target_newline == 0 || target_percent == 0 || target_c == 0
12439 || target_s == 0)
12440 return false;
12441
12442 target_percent_c[0] = target_percent;
12443 target_percent_c[1] = target_c;
12444 target_percent_c[2] = '\0';
12445
12446 target_percent_s[0] = target_percent;
12447 target_percent_s[1] = target_s;
12448 target_percent_s[2] = '\0';
12449
12450 target_percent_s_newline[0] = target_percent;
12451 target_percent_s_newline[1] = target_s;
12452 target_percent_s_newline[2] = target_newline;
12453 target_percent_s_newline[3] = '\0';
12454
12455 init = true;
12456 }
12457 return true;
12458 }
12459
12460 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12461 and no overflow/underflow occurred. INEXACT is true if M was not
12462 exactly calculated. TYPE is the tree type for the result. This
12463 function assumes that you cleared the MPFR flags and then
12464 calculated M to see if anything subsequently set a flag prior to
12465 entering this function. Return NULL_TREE if any checks fail. */
12466
12467 static tree
12468 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12469 {
12470 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12471 overflow/underflow occurred. If -frounding-math, proceed iff the
12472 result of calling FUNC was exact. */
12473 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12474 && (!flag_rounding_math || !inexact))
12475 {
12476 REAL_VALUE_TYPE rr;
12477
12478 real_from_mpfr (&rr, m, type, GMP_RNDN);
12479 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12480 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12481 but the mpft_t is not, then we underflowed in the
12482 conversion. */
12483 if (real_isfinite (&rr)
12484 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12485 {
12486 REAL_VALUE_TYPE rmode;
12487
12488 real_convert (&rmode, TYPE_MODE (type), &rr);
12489 /* Proceed iff the specified mode can hold the value. */
12490 if (real_identical (&rmode, &rr))
12491 return build_real (type, rmode);
12492 }
12493 }
12494 return NULL_TREE;
12495 }
12496
12497 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12498 FUNC on it and return the resulting value as a tree with type TYPE.
12499 If MIN and/or MAX are not NULL, then the supplied ARG must be
12500 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12501 acceptable values, otherwise they are not. The mpfr precision is
12502 set to the precision of TYPE. We assume that function FUNC returns
12503 zero if the result could be calculated exactly within the requested
12504 precision. */
12505
12506 static tree
12507 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12508 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12509 bool inclusive)
12510 {
12511 tree result = NULL_TREE;
12512
12513 STRIP_NOPS (arg);
12514
12515 /* To proceed, MPFR must exactly represent the target floating point
12516 format, which only happens when the target base equals two. */
12517 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12518 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12519 {
12520 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12521
12522 if (real_isfinite (ra)
12523 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12524 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12525 {
12526 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12527 int inexact;
12528 mpfr_t m;
12529
12530 mpfr_init2 (m, prec);
12531 mpfr_from_real (m, ra, GMP_RNDN);
12532 mpfr_clear_flags ();
12533 inexact = func (m, m, GMP_RNDN);
12534 result = do_mpfr_ckconv (m, type, inexact);
12535 mpfr_clear (m);
12536 }
12537 }
12538
12539 return result;
12540 }
12541
12542 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12543 FUNC on it and return the resulting value as a tree with type TYPE.
12544 The mpfr precision is set to the precision of TYPE. We assume that
12545 function FUNC returns zero if the result could be calculated
12546 exactly within the requested precision. */
12547
12548 static tree
12549 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12550 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12551 {
12552 tree result = NULL_TREE;
12553
12554 STRIP_NOPS (arg1);
12555 STRIP_NOPS (arg2);
12556
12557 /* To proceed, MPFR must exactly represent the target floating point
12558 format, which only happens when the target base equals two. */
12559 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12560 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12561 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12562 {
12563 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12564 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12565
12566 if (real_isfinite (ra1) && real_isfinite (ra2))
12567 {
12568 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12569 int inexact;
12570 mpfr_t m1, m2;
12571
12572 mpfr_inits2 (prec, m1, m2, NULL);
12573 mpfr_from_real (m1, ra1, GMP_RNDN);
12574 mpfr_from_real (m2, ra2, GMP_RNDN);
12575 mpfr_clear_flags ();
12576 inexact = func (m1, m1, m2, GMP_RNDN);
12577 result = do_mpfr_ckconv (m1, type, inexact);
12578 mpfr_clears (m1, m2, NULL);
12579 }
12580 }
12581
12582 return result;
12583 }
12584
12585 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12586 FUNC on it and return the resulting value as a tree with type TYPE.
12587 The mpfr precision is set to the precision of TYPE. We assume that
12588 function FUNC returns zero if the result could be calculated
12589 exactly within the requested precision. */
12590
12591 static tree
12592 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12593 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12594 {
12595 tree result = NULL_TREE;
12596
12597 STRIP_NOPS (arg1);
12598 STRIP_NOPS (arg2);
12599 STRIP_NOPS (arg3);
12600
12601 /* To proceed, MPFR must exactly represent the target floating point
12602 format, which only happens when the target base equals two. */
12603 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12604 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12605 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12606 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12607 {
12608 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12609 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12610 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12611
12612 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12613 {
12614 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12615 int inexact;
12616 mpfr_t m1, m2, m3;
12617
12618 mpfr_inits2 (prec, m1, m2, m3, NULL);
12619 mpfr_from_real (m1, ra1, GMP_RNDN);
12620 mpfr_from_real (m2, ra2, GMP_RNDN);
12621 mpfr_from_real (m3, ra3, GMP_RNDN);
12622 mpfr_clear_flags ();
12623 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12624 result = do_mpfr_ckconv (m1, type, inexact);
12625 mpfr_clears (m1, m2, m3, NULL);
12626 }
12627 }
12628
12629 return result;
12630 }
12631
12632 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12633 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12634 If ARG_SINP and ARG_COSP are NULL then the result is returned
12635 as a complex value.
12636 The type is taken from the type of ARG and is used for setting the
12637 precision of the calculation and results. */
12638
12639 static tree
12640 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12641 {
12642 tree const type = TREE_TYPE (arg);
12643 tree result = NULL_TREE;
12644
12645 STRIP_NOPS (arg);
12646
12647 /* To proceed, MPFR must exactly represent the target floating point
12648 format, which only happens when the target base equals two. */
12649 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12650 && TREE_CODE (arg) == REAL_CST
12651 && !TREE_OVERFLOW (arg))
12652 {
12653 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12654
12655 if (real_isfinite (ra))
12656 {
12657 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12658 tree result_s, result_c;
12659 int inexact;
12660 mpfr_t m, ms, mc;
12661
12662 mpfr_inits2 (prec, m, ms, mc, NULL);
12663 mpfr_from_real (m, ra, GMP_RNDN);
12664 mpfr_clear_flags ();
12665 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12666 result_s = do_mpfr_ckconv (ms, type, inexact);
12667 result_c = do_mpfr_ckconv (mc, type, inexact);
12668 mpfr_clears (m, ms, mc, NULL);
12669 if (result_s && result_c)
12670 {
12671 /* If we are to return in a complex value do so. */
12672 if (!arg_sinp && !arg_cosp)
12673 return build_complex (build_complex_type (type),
12674 result_c, result_s);
12675
12676 /* Dereference the sin/cos pointer arguments. */
12677 arg_sinp = build_fold_indirect_ref (arg_sinp);
12678 arg_cosp = build_fold_indirect_ref (arg_cosp);
12679 /* Proceed if valid pointer type were passed in. */
12680 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12681 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12682 {
12683 /* Set the values. */
12684 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12685 result_s);
12686 TREE_SIDE_EFFECTS (result_s) = 1;
12687 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12688 result_c);
12689 TREE_SIDE_EFFECTS (result_c) = 1;
12690 /* Combine the assignments into a compound expr. */
12691 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12692 result_s, result_c));
12693 }
12694 }
12695 }
12696 }
12697 return result;
12698 }
12699
12700 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12701 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12702 two-argument mpfr order N Bessel function FUNC on them and return
12703 the resulting value as a tree with type TYPE. The mpfr precision
12704 is set to the precision of TYPE. We assume that function FUNC
12705 returns zero if the result could be calculated exactly within the
12706 requested precision. */
12707 static tree
12708 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12709 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12710 const REAL_VALUE_TYPE *min, bool inclusive)
12711 {
12712 tree result = NULL_TREE;
12713
12714 STRIP_NOPS (arg1);
12715 STRIP_NOPS (arg2);
12716
12717 /* To proceed, MPFR must exactly represent the target floating point
12718 format, which only happens when the target base equals two. */
12719 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12720 && host_integerp (arg1, 0)
12721 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12722 {
12723 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12724 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12725
12726 if (n == (long)n
12727 && real_isfinite (ra)
12728 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12729 {
12730 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12731 int inexact;
12732 mpfr_t m;
12733
12734 mpfr_init2 (m, prec);
12735 mpfr_from_real (m, ra, GMP_RNDN);
12736 mpfr_clear_flags ();
12737 inexact = func (m, n, m, GMP_RNDN);
12738 result = do_mpfr_ckconv (m, type, inexact);
12739 mpfr_clear (m);
12740 }
12741 }
12742
12743 return result;
12744 }
12745
12746 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12747 the pointer *(ARG_QUO) and return the result. The type is taken
12748 from the type of ARG0 and is used for setting the precision of the
12749 calculation and results. */
12750
12751 static tree
12752 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12753 {
12754 tree const type = TREE_TYPE (arg0);
12755 tree result = NULL_TREE;
12756
12757 STRIP_NOPS (arg0);
12758 STRIP_NOPS (arg1);
12759
12760 /* To proceed, MPFR must exactly represent the target floating point
12761 format, which only happens when the target base equals two. */
12762 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12763 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12764 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12765 {
12766 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12767 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12768
12769 if (real_isfinite (ra0) && real_isfinite (ra1))
12770 {
12771 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12772 tree result_rem;
12773 long integer_quo;
12774 mpfr_t m0, m1;
12775
12776 mpfr_inits2 (prec, m0, m1, NULL);
12777 mpfr_from_real (m0, ra0, GMP_RNDN);
12778 mpfr_from_real (m1, ra1, GMP_RNDN);
12779 mpfr_clear_flags ();
12780 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12781 /* Remquo is independent of the rounding mode, so pass
12782 inexact=0 to do_mpfr_ckconv(). */
12783 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12784 mpfr_clears (m0, m1, NULL);
12785 if (result_rem)
12786 {
12787 /* MPFR calculates quo in the host's long so it may
12788 return more bits in quo than the target int can hold
12789 if sizeof(host long) > sizeof(target int). This can
12790 happen even for native compilers in LP64 mode. In
12791 these cases, modulo the quo value with the largest
12792 number that the target int can hold while leaving one
12793 bit for the sign. */
12794 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12795 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12796
12797 /* Dereference the quo pointer argument. */
12798 arg_quo = build_fold_indirect_ref (arg_quo);
12799 /* Proceed iff a valid pointer type was passed in. */
12800 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12801 {
12802 /* Set the value. */
12803 tree result_quo = fold_build2 (MODIFY_EXPR,
12804 TREE_TYPE (arg_quo), arg_quo,
12805 build_int_cst (NULL, integer_quo));
12806 TREE_SIDE_EFFECTS (result_quo) = 1;
12807 /* Combine the quo assignment with the rem. */
12808 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12809 result_quo, result_rem));
12810 }
12811 }
12812 }
12813 }
12814 return result;
12815 }
12816
12817 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12818 resulting value as a tree with type TYPE. The mpfr precision is
12819 set to the precision of TYPE. We assume that this mpfr function
12820 returns zero if the result could be calculated exactly within the
12821 requested precision. In addition, the integer pointer represented
12822 by ARG_SG will be dereferenced and set to the appropriate signgam
12823 (-1,1) value. */
12824
12825 static tree
12826 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12827 {
12828 tree result = NULL_TREE;
12829
12830 STRIP_NOPS (arg);
12831
12832 /* To proceed, MPFR must exactly represent the target floating point
12833 format, which only happens when the target base equals two. Also
12834 verify ARG is a constant and that ARG_SG is an int pointer. */
12835 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12836 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12837 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12838 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12839 {
12840 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12841
12842 /* In addition to NaN and Inf, the argument cannot be zero or a
12843 negative integer. */
12844 if (real_isfinite (ra)
12845 && ra->cl != rvc_zero
12846 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12847 {
12848 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12849 int inexact, sg;
12850 mpfr_t m;
12851 tree result_lg;
12852
12853 mpfr_init2 (m, prec);
12854 mpfr_from_real (m, ra, GMP_RNDN);
12855 mpfr_clear_flags ();
12856 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12857 result_lg = do_mpfr_ckconv (m, type, inexact);
12858 mpfr_clear (m);
12859 if (result_lg)
12860 {
12861 tree result_sg;
12862
12863 /* Dereference the arg_sg pointer argument. */
12864 arg_sg = build_fold_indirect_ref (arg_sg);
12865 /* Assign the signgam value into *arg_sg. */
12866 result_sg = fold_build2 (MODIFY_EXPR,
12867 TREE_TYPE (arg_sg), arg_sg,
12868 build_int_cst (NULL, sg));
12869 TREE_SIDE_EFFECTS (result_sg) = 1;
12870 /* Combine the signgam assignment with the lgamma result. */
12871 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12872 result_sg, result_lg));
12873 }
12874 }
12875 }
12876
12877 return result;
12878 }
12879 #endif