i386.c (enum pta_flags): Move out of struct scope...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
61
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
64 {
65 #include "builtins.def"
66 };
67 #undef DEF_BUILTIN
68
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
76
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_alloca (tree, rtx);
140 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
141 static rtx expand_builtin_frame_address (tree, tree);
142 static rtx expand_builtin_fputs (tree, rtx, bool);
143 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
146 static tree stabilize_va_list (tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_expect (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static tree rewrite_call_expr (tree, int, tree, int, ...);
155 static bool validate_arg (tree, enum tree_code code);
156 static bool integer_valued_real_p (tree);
157 static tree fold_trunc_transparent_mathfn (tree, tree);
158 static bool readonly_data_expr (tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (tree, tree);
162 static tree fold_builtin_cbrt (tree, tree);
163 static tree fold_builtin_pow (tree, tree, tree, tree);
164 static tree fold_builtin_powi (tree, tree, tree, tree);
165 static tree fold_builtin_cos (tree, tree, tree);
166 static tree fold_builtin_cosh (tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (tree, tree);
169 static tree fold_builtin_floor (tree, tree);
170 static tree fold_builtin_ceil (tree, tree);
171 static tree fold_builtin_round (tree, tree);
172 static tree fold_builtin_int_roundingfn (tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
175 static tree fold_builtin_strchr (tree, tree, tree);
176 static tree fold_builtin_memchr (tree, tree, tree, tree);
177 static tree fold_builtin_memcmp (tree, tree, tree);
178 static tree fold_builtin_strcmp (tree, tree);
179 static tree fold_builtin_strncmp (tree, tree, tree);
180 static tree fold_builtin_signbit (tree, tree);
181 static tree fold_builtin_copysign (tree, tree, tree, tree);
182 static tree fold_builtin_isascii (tree);
183 static tree fold_builtin_toascii (tree);
184 static tree fold_builtin_isdigit (tree);
185 static tree fold_builtin_fabs (tree, tree);
186 static tree fold_builtin_abs (tree, tree);
187 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 enum tree_code);
189 static tree fold_builtin_n (tree, tree *, int, bool);
190 static tree fold_builtin_0 (tree, bool);
191 static tree fold_builtin_1 (tree, tree, bool);
192 static tree fold_builtin_2 (tree, tree, tree, bool);
193 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (tree, tree, bool);
196
197 static tree fold_builtin_strpbrk (tree, tree, tree);
198 static tree fold_builtin_strstr (tree, tree, tree);
199 static tree fold_builtin_strrchr (tree, tree, tree);
200 static tree fold_builtin_strcat (tree, tree);
201 static tree fold_builtin_strncat (tree, tree, tree);
202 static tree fold_builtin_strspn (tree, tree);
203 static tree fold_builtin_strcspn (tree, tree);
204 static tree fold_builtin_sprintf (tree, tree, tree, int);
205
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
219
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree do_mpfr_bessel_n (tree, tree, tree,
236 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, bool);
238 static tree do_mpfr_remquo (tree, tree, tree);
239 static tree do_mpfr_lgamma_r (tree, tree, tree);
240 #endif
241
242 /* Return true if NODE should be considered for inline expansion regardless
243 of the optimization level. This means whenever a function is invoked with
244 its "internal" name, which normally contains the prefix "__builtin". */
245
246 static bool called_as_built_in (tree node)
247 {
248 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
249 if (strncmp (name, "__builtin_", 10) == 0)
250 return true;
251 if (strncmp (name, "__sync_", 7) == 0)
252 return true;
253 return false;
254 }
255
256 /* Return the alignment in bits of EXP, a pointer valued expression.
257 But don't return more than MAX_ALIGN no matter what.
258 The alignment returned is, by default, the alignment of the thing that
259 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260
261 Otherwise, look at the expression to see if we can do better, i.e., if the
262 expression is actually pointing at an object whose alignment is tighter. */
263
264 int
265 get_pointer_alignment (tree exp, unsigned int max_align)
266 {
267 unsigned int align, inner;
268
269 /* We rely on TER to compute accurate alignment information. */
270 if (!(optimize && flag_tree_ter))
271 return 0;
272
273 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
274 return 0;
275
276 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
277 align = MIN (align, max_align);
278
279 while (1)
280 {
281 switch (TREE_CODE (exp))
282 {
283 case NOP_EXPR:
284 case CONVERT_EXPR:
285 case NON_LVALUE_EXPR:
286 exp = TREE_OPERAND (exp, 0);
287 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
288 return align;
289
290 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
291 align = MIN (inner, max_align);
292 break;
293
294 case PLUS_EXPR:
295 /* If sum of pointer + int, restrict our maximum alignment to that
296 imposed by the integer. If not, we can't do any better than
297 ALIGN. */
298 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
299 return align;
300
301 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
302 & (max_align / BITS_PER_UNIT - 1))
303 != 0)
304 max_align >>= 1;
305
306 exp = TREE_OPERAND (exp, 0);
307 break;
308
309 case ADDR_EXPR:
310 /* See what we are pointing at and look at its alignment. */
311 exp = TREE_OPERAND (exp, 0);
312 inner = max_align;
313 if (handled_component_p (exp))
314 {
315 HOST_WIDE_INT bitsize, bitpos;
316 tree offset;
317 enum machine_mode mode;
318 int unsignedp, volatilep;
319
320 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
321 &mode, &unsignedp, &volatilep, true);
322 if (bitpos)
323 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
324 if (offset && TREE_CODE (offset) == PLUS_EXPR
325 && host_integerp (TREE_OPERAND (offset, 1), 1))
326 {
327 /* Any overflow in calculating offset_bits won't change
328 the alignment. */
329 unsigned offset_bits
330 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 * BITS_PER_UNIT);
332
333 if (offset_bits)
334 inner = MIN (inner, (offset_bits & -offset_bits));
335 offset = TREE_OPERAND (offset, 0);
336 }
337 if (offset && TREE_CODE (offset) == MULT_EXPR
338 && host_integerp (TREE_OPERAND (offset, 1), 1))
339 {
340 /* Any overflow in calculating offset_factor won't change
341 the alignment. */
342 unsigned offset_factor
343 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 * BITS_PER_UNIT);
345
346 if (offset_factor)
347 inner = MIN (inner, (offset_factor & -offset_factor));
348 }
349 else if (offset)
350 inner = MIN (inner, BITS_PER_UNIT);
351 }
352 if (TREE_CODE (exp) == FUNCTION_DECL)
353 align = FUNCTION_BOUNDARY;
354 else if (DECL_P (exp))
355 align = MIN (inner, DECL_ALIGN (exp));
356 #ifdef CONSTANT_ALIGNMENT
357 else if (CONSTANT_CLASS_P (exp))
358 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
359 #endif
360 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
361 || TREE_CODE (exp) == INDIRECT_REF)
362 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
363 else
364 align = MIN (align, inner);
365 return MIN (align, max_align);
366
367 default:
368 return align;
369 }
370 }
371 }
372
373 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
374 way, because it could contain a zero byte in the middle.
375 TREE_STRING_LENGTH is the size of the character array, not the string.
376
377 ONLY_VALUE should be nonzero if the result is not going to be emitted
378 into the instruction stream and zero if it is going to be expanded.
379 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
380 is returned, otherwise NULL, since
381 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
382 evaluate the side-effects.
383
384 The value returned is of type `ssizetype'.
385
386 Unfortunately, string_constant can't access the values of const char
387 arrays with initializers, so neither can we do so here. */
388
389 tree
390 c_strlen (tree src, int only_value)
391 {
392 tree offset_node;
393 HOST_WIDE_INT offset;
394 int max;
395 const char *ptr;
396
397 STRIP_NOPS (src);
398 if (TREE_CODE (src) == COND_EXPR
399 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
400 {
401 tree len1, len2;
402
403 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
404 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
405 if (tree_int_cst_equal (len1, len2))
406 return len1;
407 }
408
409 if (TREE_CODE (src) == COMPOUND_EXPR
410 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
411 return c_strlen (TREE_OPERAND (src, 1), only_value);
412
413 src = string_constant (src, &offset_node);
414 if (src == 0)
415 return NULL_TREE;
416
417 max = TREE_STRING_LENGTH (src) - 1;
418 ptr = TREE_STRING_POINTER (src);
419
420 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
421 {
422 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
423 compute the offset to the following null if we don't know where to
424 start searching for it. */
425 int i;
426
427 for (i = 0; i < max; i++)
428 if (ptr[i] == 0)
429 return NULL_TREE;
430
431 /* We don't know the starting offset, but we do know that the string
432 has no internal zero bytes. We can assume that the offset falls
433 within the bounds of the string; otherwise, the programmer deserves
434 what he gets. Subtract the offset from the length of the string,
435 and return that. This would perhaps not be valid if we were dealing
436 with named arrays in addition to literal string constants. */
437
438 return size_diffop (size_int (max), offset_node);
439 }
440
441 /* We have a known offset into the string. Start searching there for
442 a null character if we can represent it as a single HOST_WIDE_INT. */
443 if (offset_node == 0)
444 offset = 0;
445 else if (! host_integerp (offset_node, 0))
446 offset = -1;
447 else
448 offset = tree_low_cst (offset_node, 0);
449
450 /* If the offset is known to be out of bounds, warn, and call strlen at
451 runtime. */
452 if (offset < 0 || offset > max)
453 {
454 warning (0, "offset outside bounds of constant string");
455 return NULL_TREE;
456 }
457
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
461
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
465 }
466
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
469
470 static const char *
471 c_getstr (tree src)
472 {
473 tree offset_node;
474
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
478
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
484
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
486 }
487
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
490
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
493 {
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
497
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
499
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
504 {
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
513
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
517 }
518 return immed_double_const (c[0], c[1], mode);
519 }
520
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
524
525 static int
526 target_char_cast (tree cst, char *p)
527 {
528 unsigned HOST_WIDE_INT val, hostval;
529
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
533
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
537
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
541
542 if (val != hostval)
543 return 1;
544
545 *p = hostval;
546 return 0;
547 }
548
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
552
553 static tree
554 builtin_save_expr (tree exp)
555 {
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
560
561 return save_expr (exp);
562 }
563
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
567
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
570 {
571 int i;
572
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
577
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
582
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
590 {
591 tem = hard_frame_pointer_rtx;
592
593 /* Tell reload not to eliminate the frame pointer. */
594 current_function_accesses_prior_frames = 1;
595 }
596 #endif
597
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
605
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
614
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
617 {
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
626 }
627
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
636
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
646 }
647
648 /* Alias set used for setjmp buffer. */
649 static HOST_WIDE_INT setjmp_alias_set = -1;
650
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
654
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
657 {
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
661
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
664
665 buf_addr = convert_memory_address (Pmode, buf_addr);
666
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
668
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
672
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
676
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
679
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
682
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
688
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
694
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 current_function_calls_setjmp = 1;
698
699 /* We have a nonlocal label. */
700 current_function_has_nonlocal_label = 1;
701 }
702
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
705
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
708 {
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
712
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
716
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
722 {
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
727 }
728
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
731 {
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
735
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
740
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
743 {
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area (cfun)));
748 }
749 }
750 #endif
751
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
763
764 /* @@@ This is a kludge. Not all machine descriptions define a blockage
765 insn, but we must not allow the code we just generated to be reordered
766 by scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. So emit an ASM_INPUT to act as blockage
768 insn. */
769 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
770 }
771
772 /* __builtin_longjmp is passed a pointer to an array of five words (not
773 all will be used on all machines). It operates similarly to the C
774 library function of the same name, but is more efficient. Much of
775 the code below is copied from the handling of non-local gotos. */
776
777 static void
778 expand_builtin_longjmp (rtx buf_addr, rtx value)
779 {
780 rtx fp, lab, stack, insn, last;
781 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
785
786 buf_addr = convert_memory_address (Pmode, buf_addr);
787
788 buf_addr = force_reg (Pmode, buf_addr);
789
790 /* We used to store value in static_chain_rtx, but that fails if pointers
791 are smaller than integers. We instead require that the user must pass
792 a second argument of 1, because that is what builtin_setjmp will
793 return. This also makes EH slightly more efficient, since we are no
794 longer copying around a value that we don't care about. */
795 gcc_assert (value == const1_rtx);
796
797 last = get_last_insn ();
798 #ifdef HAVE_builtin_longjmp
799 if (HAVE_builtin_longjmp)
800 emit_insn (gen_builtin_longjmp (buf_addr));
801 else
802 #endif
803 {
804 fp = gen_rtx_MEM (Pmode, buf_addr);
805 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
806 GET_MODE_SIZE (Pmode)));
807
808 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
809 2 * GET_MODE_SIZE (Pmode)));
810 set_mem_alias_set (fp, setjmp_alias_set);
811 set_mem_alias_set (lab, setjmp_alias_set);
812 set_mem_alias_set (stack, setjmp_alias_set);
813
814 /* Pick up FP, label, and SP from the block and jump. This code is
815 from expand_goto in stmt.c; see there for detailed comments. */
816 #ifdef HAVE_nonlocal_goto
817 if (HAVE_nonlocal_goto)
818 /* We have to pass a value to the nonlocal_goto pattern that will
819 get copied into the static_chain pointer, but it does not matter
820 what that value is, because builtin_setjmp does not use it. */
821 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
822 else
823 #endif
824 {
825 lab = copy_to_reg (lab);
826
827 emit_insn (gen_rtx_CLOBBER (VOIDmode,
828 gen_rtx_MEM (BLKmode,
829 gen_rtx_SCRATCH (VOIDmode))));
830 emit_insn (gen_rtx_CLOBBER (VOIDmode,
831 gen_rtx_MEM (BLKmode,
832 hard_frame_pointer_rtx)));
833
834 emit_move_insn (hard_frame_pointer_rtx, fp);
835 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836
837 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
838 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
839 emit_indirect_jump (lab);
840 }
841 }
842
843 /* Search backwards and mark the jump insn as a non-local goto.
844 Note that this precludes the use of __builtin_longjmp to a
845 __builtin_setjmp target in the same function. However, we've
846 already cautioned the user that these functions are for
847 internal exception handling use only. */
848 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 {
850 gcc_assert (insn != last);
851
852 if (JUMP_P (insn))
853 {
854 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
855 REG_NOTES (insn));
856 break;
857 }
858 else if (CALL_P (insn))
859 break;
860 }
861 }
862
863 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
864 and the address of the save area. */
865
866 static rtx
867 expand_builtin_nonlocal_goto (tree exp)
868 {
869 tree t_label, t_save_area;
870 rtx r_label, r_save_area, r_fp, r_sp, insn;
871
872 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
873 return NULL_RTX;
874
875 t_label = CALL_EXPR_ARG (exp, 0);
876 t_save_area = CALL_EXPR_ARG (exp, 1);
877
878 r_label = expand_normal (t_label);
879 r_label = convert_memory_address (Pmode, r_label);
880 r_save_area = expand_normal (t_save_area);
881 r_save_area = convert_memory_address (Pmode, r_save_area);
882 r_fp = gen_rtx_MEM (Pmode, r_save_area);
883 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
884 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
885
886 current_function_has_nonlocal_goto = 1;
887
888 #ifdef HAVE_nonlocal_goto
889 /* ??? We no longer need to pass the static chain value, afaik. */
890 if (HAVE_nonlocal_goto)
891 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
892 else
893 #endif
894 {
895 r_label = copy_to_reg (r_label);
896
897 emit_insn (gen_rtx_CLOBBER (VOIDmode,
898 gen_rtx_MEM (BLKmode,
899 gen_rtx_SCRATCH (VOIDmode))));
900
901 emit_insn (gen_rtx_CLOBBER (VOIDmode,
902 gen_rtx_MEM (BLKmode,
903 hard_frame_pointer_rtx)));
904
905 /* Restore frame pointer for containing function.
906 This sets the actual hard register used for the frame pointer
907 to the location of the function's incoming static chain info.
908 The non-local goto handler will then adjust it to contain the
909 proper value and reload the argument pointer, if needed. */
910 emit_move_insn (hard_frame_pointer_rtx, r_fp);
911 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
912
913 /* USE of hard_frame_pointer_rtx added for consistency;
914 not clear if really needed. */
915 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
916 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
917 emit_indirect_jump (r_label);
918 }
919
920 /* Search backwards to the jump insn and mark it as a
921 non-local goto. */
922 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
923 {
924 if (JUMP_P (insn))
925 {
926 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
927 const0_rtx, REG_NOTES (insn));
928 break;
929 }
930 else if (CALL_P (insn))
931 break;
932 }
933
934 return const0_rtx;
935 }
936
937 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
938 (not all will be used on all machines) that was passed to __builtin_setjmp.
939 It updates the stack pointer in that block to correspond to the current
940 stack pointer. */
941
942 static void
943 expand_builtin_update_setjmp_buf (rtx buf_addr)
944 {
945 enum machine_mode sa_mode = Pmode;
946 rtx stack_save;
947
948
949 #ifdef HAVE_save_stack_nonlocal
950 if (HAVE_save_stack_nonlocal)
951 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
952 #endif
953 #ifdef STACK_SAVEAREA_MODE
954 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
955 #endif
956
957 stack_save
958 = gen_rtx_MEM (sa_mode,
959 memory_address
960 (sa_mode,
961 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
962
963 #ifdef HAVE_setjmp
964 if (HAVE_setjmp)
965 emit_insn (gen_setjmp ());
966 #endif
967
968 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
969 }
970
971 /* Expand a call to __builtin_prefetch. For a target that does not support
972 data prefetch, evaluate the memory address argument in case it has side
973 effects. */
974
975 static void
976 expand_builtin_prefetch (tree exp)
977 {
978 tree arg0, arg1, arg2;
979 int nargs;
980 rtx op0, op1, op2;
981
982 if (!validate_arglist (exp, POINTER_TYPE, 0))
983 return;
984
985 arg0 = CALL_EXPR_ARG (exp, 0);
986
987 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
988 zero (read) and argument 2 (locality) defaults to 3 (high degree of
989 locality). */
990 nargs = call_expr_nargs (exp);
991 if (nargs > 1)
992 arg1 = CALL_EXPR_ARG (exp, 1);
993 else
994 arg1 = integer_zero_node;
995 if (nargs > 2)
996 arg2 = CALL_EXPR_ARG (exp, 2);
997 else
998 arg2 = build_int_cst (NULL_TREE, 3);
999
1000 /* Argument 0 is an address. */
1001 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1002
1003 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1004 if (TREE_CODE (arg1) != INTEGER_CST)
1005 {
1006 error ("second argument to %<__builtin_prefetch%> must be a constant");
1007 arg1 = integer_zero_node;
1008 }
1009 op1 = expand_normal (arg1);
1010 /* Argument 1 must be either zero or one. */
1011 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1012 {
1013 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1014 " using zero");
1015 op1 = const0_rtx;
1016 }
1017
1018 /* Argument 2 (locality) must be a compile-time constant int. */
1019 if (TREE_CODE (arg2) != INTEGER_CST)
1020 {
1021 error ("third argument to %<__builtin_prefetch%> must be a constant");
1022 arg2 = integer_zero_node;
1023 }
1024 op2 = expand_normal (arg2);
1025 /* Argument 2 must be 0, 1, 2, or 3. */
1026 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1027 {
1028 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1029 op2 = const0_rtx;
1030 }
1031
1032 #ifdef HAVE_prefetch
1033 if (HAVE_prefetch)
1034 {
1035 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1036 (op0,
1037 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1038 || (GET_MODE (op0) != Pmode))
1039 {
1040 op0 = convert_memory_address (Pmode, op0);
1041 op0 = force_reg (Pmode, op0);
1042 }
1043 emit_insn (gen_prefetch (op0, op1, op2));
1044 }
1045 #endif
1046
1047 /* Don't do anything with direct references to volatile memory, but
1048 generate code to handle other side effects. */
1049 if (!MEM_P (op0) && side_effects_p (op0))
1050 emit_insn (op0);
1051 }
1052
1053 /* Get a MEM rtx for expression EXP which is the address of an operand
1054 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1055 the maximum length of the block of memory that might be accessed or
1056 NULL if unknown. */
1057
1058 static rtx
1059 get_memory_rtx (tree exp, tree len)
1060 {
1061 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1062 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1063
1064 /* Get an expression we can use to find the attributes to assign to MEM.
1065 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1066 we can. First remove any nops. */
1067 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1068 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1069 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1070 exp = TREE_OPERAND (exp, 0);
1071
1072 if (TREE_CODE (exp) == ADDR_EXPR)
1073 exp = TREE_OPERAND (exp, 0);
1074 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1075 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1076 else
1077 exp = NULL;
1078
1079 /* Honor attributes derived from exp, except for the alias set
1080 (as builtin stringops may alias with anything) and the size
1081 (as stringops may access multiple array elements). */
1082 if (exp)
1083 {
1084 set_mem_attributes (mem, exp, 0);
1085
1086 /* Allow the string and memory builtins to overflow from one
1087 field into another, see http://gcc.gnu.org/PR23561.
1088 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1089 memory accessed by the string or memory builtin will fit
1090 within the field. */
1091 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1092 {
1093 tree mem_expr = MEM_EXPR (mem);
1094 HOST_WIDE_INT offset = -1, length = -1;
1095 tree inner = exp;
1096
1097 while (TREE_CODE (inner) == ARRAY_REF
1098 || TREE_CODE (inner) == NOP_EXPR
1099 || TREE_CODE (inner) == CONVERT_EXPR
1100 || TREE_CODE (inner) == NON_LVALUE_EXPR
1101 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1102 || TREE_CODE (inner) == SAVE_EXPR)
1103 inner = TREE_OPERAND (inner, 0);
1104
1105 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1106
1107 if (MEM_OFFSET (mem)
1108 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1109 offset = INTVAL (MEM_OFFSET (mem));
1110
1111 if (offset >= 0 && len && host_integerp (len, 0))
1112 length = tree_low_cst (len, 0);
1113
1114 while (TREE_CODE (inner) == COMPONENT_REF)
1115 {
1116 tree field = TREE_OPERAND (inner, 1);
1117 gcc_assert (! DECL_BIT_FIELD (field));
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1120
1121 if (length >= 0
1122 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1123 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1124 {
1125 HOST_WIDE_INT size
1126 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1127 /* If we can prove the memory starting at XEXP (mem, 0)
1128 and ending at XEXP (mem, 0) + LENGTH will fit into
1129 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1130 if (offset <= size
1131 && length <= size
1132 && offset + length <= size)
1133 break;
1134 }
1135
1136 if (offset >= 0
1137 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1138 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1139 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1140 / BITS_PER_UNIT;
1141 else
1142 {
1143 offset = -1;
1144 length = -1;
1145 }
1146
1147 mem_expr = TREE_OPERAND (mem_expr, 0);
1148 inner = TREE_OPERAND (inner, 0);
1149 }
1150
1151 if (mem_expr == NULL)
1152 offset = -1;
1153 if (mem_expr != MEM_EXPR (mem))
1154 {
1155 set_mem_expr (mem, mem_expr);
1156 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1157 }
1158 }
1159 set_mem_alias_set (mem, 0);
1160 set_mem_size (mem, NULL_RTX);
1161 }
1162
1163 return mem;
1164 }
1165 \f
1166 /* Built-in functions to perform an untyped call and return. */
1167
1168 /* For each register that may be used for calling a function, this
1169 gives a mode used to copy the register's value. VOIDmode indicates
1170 the register is not used for calling a function. If the machine
1171 has register windows, this gives only the outbound registers.
1172 INCOMING_REGNO gives the corresponding inbound register. */
1173 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1174
1175 /* For each register that may be used for returning values, this gives
1176 a mode used to copy the register's value. VOIDmode indicates the
1177 register is not used for returning values. If the machine has
1178 register windows, this gives only the outbound registers.
1179 INCOMING_REGNO gives the corresponding inbound register. */
1180 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1181
1182 /* For each register that may be used for calling a function, this
1183 gives the offset of that register into the block returned by
1184 __builtin_apply_args. 0 indicates that the register is not
1185 used for calling a function. */
1186 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1187
1188 /* Return the size required for the block returned by __builtin_apply_args,
1189 and initialize apply_args_mode. */
1190
1191 static int
1192 apply_args_size (void)
1193 {
1194 static int size = -1;
1195 int align;
1196 unsigned int regno;
1197 enum machine_mode mode;
1198
1199 /* The values computed by this function never change. */
1200 if (size < 0)
1201 {
1202 /* The first value is the incoming arg-pointer. */
1203 size = GET_MODE_SIZE (Pmode);
1204
1205 /* The second value is the structure value address unless this is
1206 passed as an "invisible" first argument. */
1207 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1208 size += GET_MODE_SIZE (Pmode);
1209
1210 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1211 if (FUNCTION_ARG_REGNO_P (regno))
1212 {
1213 mode = reg_raw_mode[regno];
1214
1215 gcc_assert (mode != VOIDmode);
1216
1217 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1218 if (size % align != 0)
1219 size = CEIL (size, align) * align;
1220 apply_args_reg_offset[regno] = size;
1221 size += GET_MODE_SIZE (mode);
1222 apply_args_mode[regno] = mode;
1223 }
1224 else
1225 {
1226 apply_args_mode[regno] = VOIDmode;
1227 apply_args_reg_offset[regno] = 0;
1228 }
1229 }
1230 return size;
1231 }
1232
1233 /* Return the size required for the block returned by __builtin_apply,
1234 and initialize apply_result_mode. */
1235
1236 static int
1237 apply_result_size (void)
1238 {
1239 static int size = -1;
1240 int align, regno;
1241 enum machine_mode mode;
1242
1243 /* The values computed by this function never change. */
1244 if (size < 0)
1245 {
1246 size = 0;
1247
1248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1249 if (FUNCTION_VALUE_REGNO_P (regno))
1250 {
1251 mode = reg_raw_mode[regno];
1252
1253 gcc_assert (mode != VOIDmode);
1254
1255 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1256 if (size % align != 0)
1257 size = CEIL (size, align) * align;
1258 size += GET_MODE_SIZE (mode);
1259 apply_result_mode[regno] = mode;
1260 }
1261 else
1262 apply_result_mode[regno] = VOIDmode;
1263
1264 /* Allow targets that use untyped_call and untyped_return to override
1265 the size so that machine-specific information can be stored here. */
1266 #ifdef APPLY_RESULT_SIZE
1267 size = APPLY_RESULT_SIZE;
1268 #endif
1269 }
1270 return size;
1271 }
1272
1273 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1274 /* Create a vector describing the result block RESULT. If SAVEP is true,
1275 the result block is used to save the values; otherwise it is used to
1276 restore the values. */
1277
1278 static rtx
1279 result_vector (int savep, rtx result)
1280 {
1281 int regno, size, align, nelts;
1282 enum machine_mode mode;
1283 rtx reg, mem;
1284 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1285
1286 size = nelts = 0;
1287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1288 if ((mode = apply_result_mode[regno]) != VOIDmode)
1289 {
1290 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1291 if (size % align != 0)
1292 size = CEIL (size, align) * align;
1293 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1294 mem = adjust_address (result, mode, size);
1295 savevec[nelts++] = (savep
1296 ? gen_rtx_SET (VOIDmode, mem, reg)
1297 : gen_rtx_SET (VOIDmode, reg, mem));
1298 size += GET_MODE_SIZE (mode);
1299 }
1300 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1301 }
1302 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1303
1304 /* Save the state required to perform an untyped call with the same
1305 arguments as were passed to the current function. */
1306
1307 static rtx
1308 expand_builtin_apply_args_1 (void)
1309 {
1310 rtx registers, tem;
1311 int size, align, regno;
1312 enum machine_mode mode;
1313 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1314
1315 /* Create a block where the arg-pointer, structure value address,
1316 and argument registers can be saved. */
1317 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1318
1319 /* Walk past the arg-pointer and structure value address. */
1320 size = GET_MODE_SIZE (Pmode);
1321 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1322 size += GET_MODE_SIZE (Pmode);
1323
1324 /* Save each register used in calling a function to the block. */
1325 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1326 if ((mode = apply_args_mode[regno]) != VOIDmode)
1327 {
1328 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1329 if (size % align != 0)
1330 size = CEIL (size, align) * align;
1331
1332 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1333
1334 emit_move_insn (adjust_address (registers, mode, size), tem);
1335 size += GET_MODE_SIZE (mode);
1336 }
1337
1338 /* Save the arg pointer to the block. */
1339 tem = copy_to_reg (virtual_incoming_args_rtx);
1340 #ifdef STACK_GROWS_DOWNWARD
1341 /* We need the pointer as the caller actually passed them to us, not
1342 as we might have pretended they were passed. Make sure it's a valid
1343 operand, as emit_move_insn isn't expected to handle a PLUS. */
1344 tem
1345 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1346 NULL_RTX);
1347 #endif
1348 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1349
1350 size = GET_MODE_SIZE (Pmode);
1351
1352 /* Save the structure value address unless this is passed as an
1353 "invisible" first argument. */
1354 if (struct_incoming_value)
1355 {
1356 emit_move_insn (adjust_address (registers, Pmode, size),
1357 copy_to_reg (struct_incoming_value));
1358 size += GET_MODE_SIZE (Pmode);
1359 }
1360
1361 /* Return the address of the block. */
1362 return copy_addr_to_reg (XEXP (registers, 0));
1363 }
1364
1365 /* __builtin_apply_args returns block of memory allocated on
1366 the stack into which is stored the arg pointer, structure
1367 value address, static chain, and all the registers that might
1368 possibly be used in performing a function call. The code is
1369 moved to the start of the function so the incoming values are
1370 saved. */
1371
1372 static rtx
1373 expand_builtin_apply_args (void)
1374 {
1375 /* Don't do __builtin_apply_args more than once in a function.
1376 Save the result of the first call and reuse it. */
1377 if (apply_args_value != 0)
1378 return apply_args_value;
1379 {
1380 /* When this function is called, it means that registers must be
1381 saved on entry to this function. So we migrate the
1382 call to the first insn of this function. */
1383 rtx temp;
1384 rtx seq;
1385
1386 start_sequence ();
1387 temp = expand_builtin_apply_args_1 ();
1388 seq = get_insns ();
1389 end_sequence ();
1390
1391 apply_args_value = temp;
1392
1393 /* Put the insns after the NOTE that starts the function.
1394 If this is inside a start_sequence, make the outer-level insn
1395 chain current, so the code is placed at the start of the
1396 function. */
1397 push_topmost_sequence ();
1398 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1399 pop_topmost_sequence ();
1400 return temp;
1401 }
1402 }
1403
1404 /* Perform an untyped call and save the state required to perform an
1405 untyped return of whatever value was returned by the given function. */
1406
1407 static rtx
1408 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1409 {
1410 int size, align, regno;
1411 enum machine_mode mode;
1412 rtx incoming_args, result, reg, dest, src, call_insn;
1413 rtx old_stack_level = 0;
1414 rtx call_fusage = 0;
1415 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1416
1417 arguments = convert_memory_address (Pmode, arguments);
1418
1419 /* Create a block where the return registers can be saved. */
1420 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1421
1422 /* Fetch the arg pointer from the ARGUMENTS block. */
1423 incoming_args = gen_reg_rtx (Pmode);
1424 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1425 #ifndef STACK_GROWS_DOWNWARD
1426 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1427 incoming_args, 0, OPTAB_LIB_WIDEN);
1428 #endif
1429
1430 /* Push a new argument block and copy the arguments. Do not allow
1431 the (potential) memcpy call below to interfere with our stack
1432 manipulations. */
1433 do_pending_stack_adjust ();
1434 NO_DEFER_POP;
1435
1436 /* Save the stack with nonlocal if available. */
1437 #ifdef HAVE_save_stack_nonlocal
1438 if (HAVE_save_stack_nonlocal)
1439 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1440 else
1441 #endif
1442 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1443
1444 /* Allocate a block of memory onto the stack and copy the memory
1445 arguments to the outgoing arguments address. */
1446 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1447 dest = virtual_outgoing_args_rtx;
1448 #ifndef STACK_GROWS_DOWNWARD
1449 if (GET_CODE (argsize) == CONST_INT)
1450 dest = plus_constant (dest, -INTVAL (argsize));
1451 else
1452 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1453 #endif
1454 dest = gen_rtx_MEM (BLKmode, dest);
1455 set_mem_align (dest, PARM_BOUNDARY);
1456 src = gen_rtx_MEM (BLKmode, incoming_args);
1457 set_mem_align (src, PARM_BOUNDARY);
1458 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1459
1460 /* Refer to the argument block. */
1461 apply_args_size ();
1462 arguments = gen_rtx_MEM (BLKmode, arguments);
1463 set_mem_align (arguments, PARM_BOUNDARY);
1464
1465 /* Walk past the arg-pointer and structure value address. */
1466 size = GET_MODE_SIZE (Pmode);
1467 if (struct_value)
1468 size += GET_MODE_SIZE (Pmode);
1469
1470 /* Restore each of the registers previously saved. Make USE insns
1471 for each of these registers for use in making the call. */
1472 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1473 if ((mode = apply_args_mode[regno]) != VOIDmode)
1474 {
1475 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1476 if (size % align != 0)
1477 size = CEIL (size, align) * align;
1478 reg = gen_rtx_REG (mode, regno);
1479 emit_move_insn (reg, adjust_address (arguments, mode, size));
1480 use_reg (&call_fusage, reg);
1481 size += GET_MODE_SIZE (mode);
1482 }
1483
1484 /* Restore the structure value address unless this is passed as an
1485 "invisible" first argument. */
1486 size = GET_MODE_SIZE (Pmode);
1487 if (struct_value)
1488 {
1489 rtx value = gen_reg_rtx (Pmode);
1490 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1491 emit_move_insn (struct_value, value);
1492 if (REG_P (struct_value))
1493 use_reg (&call_fusage, struct_value);
1494 size += GET_MODE_SIZE (Pmode);
1495 }
1496
1497 /* All arguments and registers used for the call are set up by now! */
1498 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1499
1500 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1501 and we don't want to load it into a register as an optimization,
1502 because prepare_call_address already did it if it should be done. */
1503 if (GET_CODE (function) != SYMBOL_REF)
1504 function = memory_address (FUNCTION_MODE, function);
1505
1506 /* Generate the actual call instruction and save the return value. */
1507 #ifdef HAVE_untyped_call
1508 if (HAVE_untyped_call)
1509 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1510 result, result_vector (1, result)));
1511 else
1512 #endif
1513 #ifdef HAVE_call_value
1514 if (HAVE_call_value)
1515 {
1516 rtx valreg = 0;
1517
1518 /* Locate the unique return register. It is not possible to
1519 express a call that sets more than one return register using
1520 call_value; use untyped_call for that. In fact, untyped_call
1521 only needs to save the return registers in the given block. */
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_result_mode[regno]) != VOIDmode)
1524 {
1525 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1526
1527 valreg = gen_rtx_REG (mode, regno);
1528 }
1529
1530 emit_call_insn (GEN_CALL_VALUE (valreg,
1531 gen_rtx_MEM (FUNCTION_MODE, function),
1532 const0_rtx, NULL_RTX, const0_rtx));
1533
1534 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1535 }
1536 else
1537 #endif
1538 gcc_unreachable ();
1539
1540 /* Find the CALL insn we just emitted, and attach the register usage
1541 information. */
1542 call_insn = last_call_insn ();
1543 add_function_usage_to (call_insn, call_fusage);
1544
1545 /* Restore the stack. */
1546 #ifdef HAVE_save_stack_nonlocal
1547 if (HAVE_save_stack_nonlocal)
1548 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1549 else
1550 #endif
1551 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1552
1553 OK_DEFER_POP;
1554
1555 /* Return the address of the result block. */
1556 result = copy_addr_to_reg (XEXP (result, 0));
1557 return convert_memory_address (ptr_mode, result);
1558 }
1559
1560 /* Perform an untyped return. */
1561
1562 static void
1563 expand_builtin_return (rtx result)
1564 {
1565 int size, align, regno;
1566 enum machine_mode mode;
1567 rtx reg;
1568 rtx call_fusage = 0;
1569
1570 result = convert_memory_address (Pmode, result);
1571
1572 apply_result_size ();
1573 result = gen_rtx_MEM (BLKmode, result);
1574
1575 #ifdef HAVE_untyped_return
1576 if (HAVE_untyped_return)
1577 {
1578 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1579 emit_barrier ();
1580 return;
1581 }
1582 #endif
1583
1584 /* Restore the return value and note that each value is used. */
1585 size = 0;
1586 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1587 if ((mode = apply_result_mode[regno]) != VOIDmode)
1588 {
1589 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1590 if (size % align != 0)
1591 size = CEIL (size, align) * align;
1592 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1593 emit_move_insn (reg, adjust_address (result, mode, size));
1594
1595 push_to_sequence (call_fusage);
1596 emit_insn (gen_rtx_USE (VOIDmode, reg));
1597 call_fusage = get_insns ();
1598 end_sequence ();
1599 size += GET_MODE_SIZE (mode);
1600 }
1601
1602 /* Put the USE insns before the return. */
1603 emit_insn (call_fusage);
1604
1605 /* Return whatever values was restored by jumping directly to the end
1606 of the function. */
1607 expand_naked_return ();
1608 }
1609
1610 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1611
1612 static enum type_class
1613 type_to_class (tree type)
1614 {
1615 switch (TREE_CODE (type))
1616 {
1617 case VOID_TYPE: return void_type_class;
1618 case INTEGER_TYPE: return integer_type_class;
1619 case ENUMERAL_TYPE: return enumeral_type_class;
1620 case BOOLEAN_TYPE: return boolean_type_class;
1621 case POINTER_TYPE: return pointer_type_class;
1622 case REFERENCE_TYPE: return reference_type_class;
1623 case OFFSET_TYPE: return offset_type_class;
1624 case REAL_TYPE: return real_type_class;
1625 case COMPLEX_TYPE: return complex_type_class;
1626 case FUNCTION_TYPE: return function_type_class;
1627 case METHOD_TYPE: return method_type_class;
1628 case RECORD_TYPE: return record_type_class;
1629 case UNION_TYPE:
1630 case QUAL_UNION_TYPE: return union_type_class;
1631 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1632 ? string_type_class : array_type_class);
1633 case LANG_TYPE: return lang_type_class;
1634 default: return no_type_class;
1635 }
1636 }
1637
1638 /* Expand a call EXP to __builtin_classify_type. */
1639
1640 static rtx
1641 expand_builtin_classify_type (tree exp)
1642 {
1643 if (call_expr_nargs (exp))
1644 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1645 return GEN_INT (no_type_class);
1646 }
1647
1648 /* This helper macro, meant to be used in mathfn_built_in below,
1649 determines which among a set of three builtin math functions is
1650 appropriate for a given type mode. The `F' and `L' cases are
1651 automatically generated from the `double' case. */
1652 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1653 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1654 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1655 fcodel = BUILT_IN_MATHFN##L ; break;
1656 /* Similar to above, but appends _R after any F/L suffix. */
1657 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1658 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1659 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1660 fcodel = BUILT_IN_MATHFN##L_R ; break;
1661
1662 /* Return mathematic function equivalent to FN but operating directly
1663 on TYPE, if available. If we can't do the conversion, return zero. */
1664 tree
1665 mathfn_built_in (tree type, enum built_in_function fn)
1666 {
1667 enum built_in_function fcode, fcodef, fcodel;
1668
1669 switch (fn)
1670 {
1671 CASE_MATHFN (BUILT_IN_ACOS)
1672 CASE_MATHFN (BUILT_IN_ACOSH)
1673 CASE_MATHFN (BUILT_IN_ASIN)
1674 CASE_MATHFN (BUILT_IN_ASINH)
1675 CASE_MATHFN (BUILT_IN_ATAN)
1676 CASE_MATHFN (BUILT_IN_ATAN2)
1677 CASE_MATHFN (BUILT_IN_ATANH)
1678 CASE_MATHFN (BUILT_IN_CBRT)
1679 CASE_MATHFN (BUILT_IN_CEIL)
1680 CASE_MATHFN (BUILT_IN_CEXPI)
1681 CASE_MATHFN (BUILT_IN_COPYSIGN)
1682 CASE_MATHFN (BUILT_IN_COS)
1683 CASE_MATHFN (BUILT_IN_COSH)
1684 CASE_MATHFN (BUILT_IN_DREM)
1685 CASE_MATHFN (BUILT_IN_ERF)
1686 CASE_MATHFN (BUILT_IN_ERFC)
1687 CASE_MATHFN (BUILT_IN_EXP)
1688 CASE_MATHFN (BUILT_IN_EXP10)
1689 CASE_MATHFN (BUILT_IN_EXP2)
1690 CASE_MATHFN (BUILT_IN_EXPM1)
1691 CASE_MATHFN (BUILT_IN_FABS)
1692 CASE_MATHFN (BUILT_IN_FDIM)
1693 CASE_MATHFN (BUILT_IN_FLOOR)
1694 CASE_MATHFN (BUILT_IN_FMA)
1695 CASE_MATHFN (BUILT_IN_FMAX)
1696 CASE_MATHFN (BUILT_IN_FMIN)
1697 CASE_MATHFN (BUILT_IN_FMOD)
1698 CASE_MATHFN (BUILT_IN_FREXP)
1699 CASE_MATHFN (BUILT_IN_GAMMA)
1700 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1701 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1702 CASE_MATHFN (BUILT_IN_HYPOT)
1703 CASE_MATHFN (BUILT_IN_ILOGB)
1704 CASE_MATHFN (BUILT_IN_INF)
1705 CASE_MATHFN (BUILT_IN_ISINF)
1706 CASE_MATHFN (BUILT_IN_J0)
1707 CASE_MATHFN (BUILT_IN_J1)
1708 CASE_MATHFN (BUILT_IN_JN)
1709 CASE_MATHFN (BUILT_IN_LCEIL)
1710 CASE_MATHFN (BUILT_IN_LDEXP)
1711 CASE_MATHFN (BUILT_IN_LFLOOR)
1712 CASE_MATHFN (BUILT_IN_LGAMMA)
1713 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1714 CASE_MATHFN (BUILT_IN_LLCEIL)
1715 CASE_MATHFN (BUILT_IN_LLFLOOR)
1716 CASE_MATHFN (BUILT_IN_LLRINT)
1717 CASE_MATHFN (BUILT_IN_LLROUND)
1718 CASE_MATHFN (BUILT_IN_LOG)
1719 CASE_MATHFN (BUILT_IN_LOG10)
1720 CASE_MATHFN (BUILT_IN_LOG1P)
1721 CASE_MATHFN (BUILT_IN_LOG2)
1722 CASE_MATHFN (BUILT_IN_LOGB)
1723 CASE_MATHFN (BUILT_IN_LRINT)
1724 CASE_MATHFN (BUILT_IN_LROUND)
1725 CASE_MATHFN (BUILT_IN_MODF)
1726 CASE_MATHFN (BUILT_IN_NAN)
1727 CASE_MATHFN (BUILT_IN_NANS)
1728 CASE_MATHFN (BUILT_IN_NEARBYINT)
1729 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1730 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1731 CASE_MATHFN (BUILT_IN_POW)
1732 CASE_MATHFN (BUILT_IN_POWI)
1733 CASE_MATHFN (BUILT_IN_POW10)
1734 CASE_MATHFN (BUILT_IN_REMAINDER)
1735 CASE_MATHFN (BUILT_IN_REMQUO)
1736 CASE_MATHFN (BUILT_IN_RINT)
1737 CASE_MATHFN (BUILT_IN_ROUND)
1738 CASE_MATHFN (BUILT_IN_SCALB)
1739 CASE_MATHFN (BUILT_IN_SCALBLN)
1740 CASE_MATHFN (BUILT_IN_SCALBN)
1741 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1742 CASE_MATHFN (BUILT_IN_SIN)
1743 CASE_MATHFN (BUILT_IN_SINCOS)
1744 CASE_MATHFN (BUILT_IN_SINH)
1745 CASE_MATHFN (BUILT_IN_SQRT)
1746 CASE_MATHFN (BUILT_IN_TAN)
1747 CASE_MATHFN (BUILT_IN_TANH)
1748 CASE_MATHFN (BUILT_IN_TGAMMA)
1749 CASE_MATHFN (BUILT_IN_TRUNC)
1750 CASE_MATHFN (BUILT_IN_Y0)
1751 CASE_MATHFN (BUILT_IN_Y1)
1752 CASE_MATHFN (BUILT_IN_YN)
1753
1754 default:
1755 return NULL_TREE;
1756 }
1757
1758 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1759 return implicit_built_in_decls[fcode];
1760 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1761 return implicit_built_in_decls[fcodef];
1762 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1763 return implicit_built_in_decls[fcodel];
1764 else
1765 return NULL_TREE;
1766 }
1767
1768 /* If errno must be maintained, expand the RTL to check if the result,
1769 TARGET, of a built-in function call, EXP, is NaN, and if so set
1770 errno to EDOM. */
1771
1772 static void
1773 expand_errno_check (tree exp, rtx target)
1774 {
1775 rtx lab = gen_label_rtx ();
1776
1777 /* Test the result; if it is NaN, set errno=EDOM because
1778 the argument was not in the domain. */
1779 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1780 0, lab);
1781
1782 #ifdef TARGET_EDOM
1783 /* If this built-in doesn't throw an exception, set errno directly. */
1784 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1785 {
1786 #ifdef GEN_ERRNO_RTX
1787 rtx errno_rtx = GEN_ERRNO_RTX;
1788 #else
1789 rtx errno_rtx
1790 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1791 #endif
1792 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1793 emit_label (lab);
1794 return;
1795 }
1796 #endif
1797
1798 /* We can't set errno=EDOM directly; let the library call do it.
1799 Pop the arguments right away in case the call gets deleted. */
1800 NO_DEFER_POP;
1801 expand_call (exp, target, 0);
1802 OK_DEFER_POP;
1803 emit_label (lab);
1804 }
1805
1806 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1807 Return NULL_RTX if a normal call should be emitted rather than expanding
1808 the function in-line. EXP is the expression that is a call to the builtin
1809 function; if convenient, the result should be placed in TARGET.
1810 SUBTARGET may be used as the target for computing one of EXP's operands. */
1811
1812 static rtx
1813 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1814 {
1815 optab builtin_optab;
1816 rtx op0, insns, before_call;
1817 tree fndecl = get_callee_fndecl (exp);
1818 enum machine_mode mode;
1819 bool errno_set = false;
1820 tree arg, narg;
1821
1822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1823 return NULL_RTX;
1824
1825 arg = CALL_EXPR_ARG (exp, 0);
1826
1827 switch (DECL_FUNCTION_CODE (fndecl))
1828 {
1829 CASE_FLT_FN (BUILT_IN_SQRT):
1830 errno_set = ! tree_expr_nonnegative_p (arg);
1831 builtin_optab = sqrt_optab;
1832 break;
1833 CASE_FLT_FN (BUILT_IN_EXP):
1834 errno_set = true; builtin_optab = exp_optab; break;
1835 CASE_FLT_FN (BUILT_IN_EXP10):
1836 CASE_FLT_FN (BUILT_IN_POW10):
1837 errno_set = true; builtin_optab = exp10_optab; break;
1838 CASE_FLT_FN (BUILT_IN_EXP2):
1839 errno_set = true; builtin_optab = exp2_optab; break;
1840 CASE_FLT_FN (BUILT_IN_EXPM1):
1841 errno_set = true; builtin_optab = expm1_optab; break;
1842 CASE_FLT_FN (BUILT_IN_LOGB):
1843 errno_set = true; builtin_optab = logb_optab; break;
1844 CASE_FLT_FN (BUILT_IN_LOG):
1845 errno_set = true; builtin_optab = log_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOG10):
1847 errno_set = true; builtin_optab = log10_optab; break;
1848 CASE_FLT_FN (BUILT_IN_LOG2):
1849 errno_set = true; builtin_optab = log2_optab; break;
1850 CASE_FLT_FN (BUILT_IN_LOG1P):
1851 errno_set = true; builtin_optab = log1p_optab; break;
1852 CASE_FLT_FN (BUILT_IN_ASIN):
1853 builtin_optab = asin_optab; break;
1854 CASE_FLT_FN (BUILT_IN_ACOS):
1855 builtin_optab = acos_optab; break;
1856 CASE_FLT_FN (BUILT_IN_TAN):
1857 builtin_optab = tan_optab; break;
1858 CASE_FLT_FN (BUILT_IN_ATAN):
1859 builtin_optab = atan_optab; break;
1860 CASE_FLT_FN (BUILT_IN_FLOOR):
1861 builtin_optab = floor_optab; break;
1862 CASE_FLT_FN (BUILT_IN_CEIL):
1863 builtin_optab = ceil_optab; break;
1864 CASE_FLT_FN (BUILT_IN_TRUNC):
1865 builtin_optab = btrunc_optab; break;
1866 CASE_FLT_FN (BUILT_IN_ROUND):
1867 builtin_optab = round_optab; break;
1868 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1869 builtin_optab = nearbyint_optab;
1870 if (flag_trapping_math)
1871 break;
1872 /* Else fallthrough and expand as rint. */
1873 CASE_FLT_FN (BUILT_IN_RINT):
1874 builtin_optab = rint_optab; break;
1875 default:
1876 gcc_unreachable ();
1877 }
1878
1879 /* Make a suitable register to place result in. */
1880 mode = TYPE_MODE (TREE_TYPE (exp));
1881
1882 if (! flag_errno_math || ! HONOR_NANS (mode))
1883 errno_set = false;
1884
1885 /* Before working hard, check whether the instruction is available. */
1886 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1887 {
1888 target = gen_reg_rtx (mode);
1889
1890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1891 need to expand the argument again. This way, we will not perform
1892 side-effects more the once. */
1893 narg = builtin_save_expr (arg);
1894 if (narg != arg)
1895 {
1896 arg = narg;
1897 exp = build_call_expr (fndecl, 1, arg);
1898 }
1899
1900 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1901
1902 start_sequence ();
1903
1904 /* Compute into TARGET.
1905 Set TARGET to wherever the result comes back. */
1906 target = expand_unop (mode, builtin_optab, op0, target, 0);
1907
1908 if (target != 0)
1909 {
1910 if (errno_set)
1911 expand_errno_check (exp, target);
1912
1913 /* Output the entire sequence. */
1914 insns = get_insns ();
1915 end_sequence ();
1916 emit_insn (insns);
1917 return target;
1918 }
1919
1920 /* If we were unable to expand via the builtin, stop the sequence
1921 (without outputting the insns) and call to the library function
1922 with the stabilized argument list. */
1923 end_sequence ();
1924 }
1925
1926 before_call = get_last_insn ();
1927
1928 target = expand_call (exp, target, target == const0_rtx);
1929
1930 /* If this is a sqrt operation and we don't care about errno, try to
1931 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1932 This allows the semantics of the libcall to be visible to the RTL
1933 optimizers. */
1934 if (builtin_optab == sqrt_optab && !errno_set)
1935 {
1936 /* Search backwards through the insns emitted by expand_call looking
1937 for the instruction with the REG_RETVAL note. */
1938 rtx last = get_last_insn ();
1939 while (last != before_call)
1940 {
1941 if (find_reg_note (last, REG_RETVAL, NULL))
1942 {
1943 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1944 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1945 two elements, i.e. symbol_ref(sqrt) and the operand. */
1946 if (note
1947 && GET_CODE (note) == EXPR_LIST
1948 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1949 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1950 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1951 {
1952 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1953 /* Check operand is a register with expected mode. */
1954 if (operand
1955 && REG_P (operand)
1956 && GET_MODE (operand) == mode)
1957 {
1958 /* Replace the REG_EQUAL note with a SQRT rtx. */
1959 rtx equiv = gen_rtx_SQRT (mode, operand);
1960 set_unique_reg_note (last, REG_EQUAL, equiv);
1961 }
1962 }
1963 break;
1964 }
1965 last = PREV_INSN (last);
1966 }
1967 }
1968
1969 return target;
1970 }
1971
1972 /* Expand a call to the builtin binary math functions (pow and atan2).
1973 Return NULL_RTX if a normal call should be emitted rather than expanding the
1974 function in-line. EXP is the expression that is a call to the builtin
1975 function; if convenient, the result should be placed in TARGET.
1976 SUBTARGET may be used as the target for computing one of EXP's
1977 operands. */
1978
1979 static rtx
1980 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1981 {
1982 optab builtin_optab;
1983 rtx op0, op1, insns;
1984 int op1_type = REAL_TYPE;
1985 tree fndecl = get_callee_fndecl (exp);
1986 tree arg0, arg1, narg;
1987 enum machine_mode mode;
1988 bool errno_set = true;
1989 bool stable = true;
1990
1991 switch (DECL_FUNCTION_CODE (fndecl))
1992 {
1993 CASE_FLT_FN (BUILT_IN_SCALBN):
1994 CASE_FLT_FN (BUILT_IN_SCALBLN):
1995 CASE_FLT_FN (BUILT_IN_LDEXP):
1996 op1_type = INTEGER_TYPE;
1997 default:
1998 break;
1999 }
2000
2001 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2002 return NULL_RTX;
2003
2004 arg0 = CALL_EXPR_ARG (exp, 0);
2005 arg1 = CALL_EXPR_ARG (exp, 1);
2006
2007 switch (DECL_FUNCTION_CODE (fndecl))
2008 {
2009 CASE_FLT_FN (BUILT_IN_POW):
2010 builtin_optab = pow_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ATAN2):
2012 builtin_optab = atan2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_SCALB):
2014 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2015 return 0;
2016 builtin_optab = scalb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_SCALBN):
2018 CASE_FLT_FN (BUILT_IN_SCALBLN):
2019 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2020 return 0;
2021 /* Fall through... */
2022 CASE_FLT_FN (BUILT_IN_LDEXP):
2023 builtin_optab = ldexp_optab; break;
2024 CASE_FLT_FN (BUILT_IN_FMOD):
2025 builtin_optab = fmod_optab; break;
2026 CASE_FLT_FN (BUILT_IN_REMAINDER):
2027 CASE_FLT_FN (BUILT_IN_DREM):
2028 builtin_optab = remainder_optab; break;
2029 default:
2030 gcc_unreachable ();
2031 }
2032
2033 /* Make a suitable register to place result in. */
2034 mode = TYPE_MODE (TREE_TYPE (exp));
2035
2036 /* Before working hard, check whether the instruction is available. */
2037 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2038 return NULL_RTX;
2039
2040 target = gen_reg_rtx (mode);
2041
2042 if (! flag_errno_math || ! HONOR_NANS (mode))
2043 errno_set = false;
2044
2045 /* Always stabilize the argument list. */
2046 narg = builtin_save_expr (arg1);
2047 if (narg != arg1)
2048 {
2049 arg1 = narg;
2050 stable = false;
2051 }
2052 narg = builtin_save_expr (arg0);
2053 if (narg != arg0)
2054 {
2055 arg0 = narg;
2056 stable = false;
2057 }
2058
2059 if (! stable)
2060 exp = build_call_expr (fndecl, 2, arg0, arg1);
2061
2062 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2063 op1 = expand_normal (arg1);
2064
2065 start_sequence ();
2066
2067 /* Compute into TARGET.
2068 Set TARGET to wherever the result comes back. */
2069 target = expand_binop (mode, builtin_optab, op0, op1,
2070 target, 0, OPTAB_DIRECT);
2071
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 if (target == 0)
2076 {
2077 end_sequence ();
2078 return expand_call (exp, target, target == const0_rtx);
2079 }
2080
2081 if (errno_set)
2082 expand_errno_check (exp, target);
2083
2084 /* Output the entire sequence. */
2085 insns = get_insns ();
2086 end_sequence ();
2087 emit_insn (insns);
2088
2089 return target;
2090 }
2091
2092 /* Expand a call to the builtin sin and cos math functions.
2093 Return NULL_RTX if a normal call should be emitted rather than expanding the
2094 function in-line. EXP is the expression that is a call to the builtin
2095 function; if convenient, the result should be placed in TARGET.
2096 SUBTARGET may be used as the target for computing one of EXP's
2097 operands. */
2098
2099 static rtx
2100 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2101 {
2102 optab builtin_optab;
2103 rtx op0, insns;
2104 tree fndecl = get_callee_fndecl (exp);
2105 enum machine_mode mode;
2106 tree arg, narg;
2107
2108 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2109 return NULL_RTX;
2110
2111 arg = CALL_EXPR_ARG (exp, 0);
2112
2113 switch (DECL_FUNCTION_CODE (fndecl))
2114 {
2115 CASE_FLT_FN (BUILT_IN_SIN):
2116 CASE_FLT_FN (BUILT_IN_COS):
2117 builtin_optab = sincos_optab; break;
2118 default:
2119 gcc_unreachable ();
2120 }
2121
2122 /* Make a suitable register to place result in. */
2123 mode = TYPE_MODE (TREE_TYPE (exp));
2124
2125 /* Check if sincos insn is available, otherwise fallback
2126 to sin or cos insn. */
2127 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2128 switch (DECL_FUNCTION_CODE (fndecl))
2129 {
2130 CASE_FLT_FN (BUILT_IN_SIN):
2131 builtin_optab = sin_optab; break;
2132 CASE_FLT_FN (BUILT_IN_COS):
2133 builtin_optab = cos_optab; break;
2134 default:
2135 gcc_unreachable ();
2136 }
2137
2138 /* Before working hard, check whether the instruction is available. */
2139 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2140 {
2141 target = gen_reg_rtx (mode);
2142
2143 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2144 need to expand the argument again. This way, we will not perform
2145 side-effects more the once. */
2146 narg = save_expr (arg);
2147 if (narg != arg)
2148 {
2149 arg = narg;
2150 exp = build_call_expr (fndecl, 1, arg);
2151 }
2152
2153 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2154
2155 start_sequence ();
2156
2157 /* Compute into TARGET.
2158 Set TARGET to wherever the result comes back. */
2159 if (builtin_optab == sincos_optab)
2160 {
2161 int result;
2162
2163 switch (DECL_FUNCTION_CODE (fndecl))
2164 {
2165 CASE_FLT_FN (BUILT_IN_SIN):
2166 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2167 break;
2168 CASE_FLT_FN (BUILT_IN_COS):
2169 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2170 break;
2171 default:
2172 gcc_unreachable ();
2173 }
2174 gcc_assert (result);
2175 }
2176 else
2177 {
2178 target = expand_unop (mode, builtin_optab, op0, target, 0);
2179 }
2180
2181 if (target != 0)
2182 {
2183 /* Output the entire sequence. */
2184 insns = get_insns ();
2185 end_sequence ();
2186 emit_insn (insns);
2187 return target;
2188 }
2189
2190 /* If we were unable to expand via the builtin, stop the sequence
2191 (without outputting the insns) and call to the library function
2192 with the stabilized argument list. */
2193 end_sequence ();
2194 }
2195
2196 target = expand_call (exp, target, target == const0_rtx);
2197
2198 return target;
2199 }
2200
2201 /* Expand a call to one of the builtin math functions that operate on
2202 floating point argument and output an integer result (ilogb, isinf,
2203 isnan, etc).
2204 Return 0 if a normal call should be emitted rather than expanding the
2205 function in-line. EXP is the expression that is a call to the builtin
2206 function; if convenient, the result should be placed in TARGET.
2207 SUBTARGET may be used as the target for computing one of EXP's operands. */
2208
2209 static rtx
2210 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2211 {
2212 optab builtin_optab;
2213 enum insn_code icode;
2214 rtx op0;
2215 tree fndecl = get_callee_fndecl (exp);
2216 enum machine_mode mode;
2217 bool errno_set = false;
2218 tree arg, narg;
2219
2220 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2221 return NULL_RTX;
2222
2223 arg = CALL_EXPR_ARG (exp, 0);
2224
2225 switch (DECL_FUNCTION_CODE (fndecl))
2226 {
2227 CASE_FLT_FN (BUILT_IN_ILOGB):
2228 errno_set = true; builtin_optab = ilogb_optab; break;
2229 CASE_FLT_FN (BUILT_IN_ISINF):
2230 builtin_optab = isinf_optab; break;
2231 default:
2232 gcc_unreachable ();
2233 }
2234
2235 /* There's no easy way to detect the case we need to set EDOM. */
2236 if (flag_errno_math && errno_set)
2237 return NULL_RTX;
2238
2239 /* Optab mode depends on the mode of the input argument. */
2240 mode = TYPE_MODE (TREE_TYPE (arg));
2241
2242 icode = builtin_optab->handlers[(int) mode].insn_code;
2243
2244 /* Before working hard, check whether the instruction is available. */
2245 if (icode != CODE_FOR_nothing)
2246 {
2247 /* Make a suitable register to place result in. */
2248 if (!target
2249 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2250 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2251
2252 gcc_assert (insn_data[icode].operand[0].predicate
2253 (target, GET_MODE (target)));
2254
2255 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2256 need to expand the argument again. This way, we will not perform
2257 side-effects more the once. */
2258 narg = builtin_save_expr (arg);
2259 if (narg != arg)
2260 {
2261 arg = narg;
2262 exp = build_call_expr (fndecl, 1, arg);
2263 }
2264
2265 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2266
2267 if (mode != GET_MODE (op0))
2268 op0 = convert_to_mode (mode, op0, 0);
2269
2270 /* Compute into TARGET.
2271 Set TARGET to wherever the result comes back. */
2272 emit_unop_insn (icode, target, op0, UNKNOWN);
2273 return target;
2274 }
2275
2276 target = expand_call (exp, target, target == const0_rtx);
2277
2278 return target;
2279 }
2280
2281 /* Expand a call to the builtin sincos math function.
2282 Return NULL_RTX if a normal call should be emitted rather than expanding the
2283 function in-line. EXP is the expression that is a call to the builtin
2284 function. */
2285
2286 static rtx
2287 expand_builtin_sincos (tree exp)
2288 {
2289 rtx op0, op1, op2, target1, target2;
2290 enum machine_mode mode;
2291 tree arg, sinp, cosp;
2292 int result;
2293
2294 if (!validate_arglist (exp, REAL_TYPE,
2295 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2296 return NULL_RTX;
2297
2298 arg = CALL_EXPR_ARG (exp, 0);
2299 sinp = CALL_EXPR_ARG (exp, 1);
2300 cosp = CALL_EXPR_ARG (exp, 2);
2301
2302 /* Make a suitable register to place result in. */
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2304
2305 /* Check if sincos insn is available, otherwise emit the call. */
2306 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2307 return NULL_RTX;
2308
2309 target1 = gen_reg_rtx (mode);
2310 target2 = gen_reg_rtx (mode);
2311
2312 op0 = expand_normal (arg);
2313 op1 = expand_normal (build_fold_indirect_ref (sinp));
2314 op2 = expand_normal (build_fold_indirect_ref (cosp));
2315
2316 /* Compute into target1 and target2.
2317 Set TARGET to wherever the result comes back. */
2318 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2319 gcc_assert (result);
2320
2321 /* Move target1 and target2 to the memory locations indicated
2322 by op1 and op2. */
2323 emit_move_insn (op1, target1);
2324 emit_move_insn (op2, target2);
2325
2326 return const0_rtx;
2327 }
2328
2329 /* Expand a call to the internal cexpi builtin to the sincos math function.
2330 EXP is the expression that is a call to the builtin function; if convenient,
2331 the result should be placed in TARGET. SUBTARGET may be used as the target
2332 for computing one of EXP's operands. */
2333
2334 static rtx
2335 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2336 {
2337 tree fndecl = get_callee_fndecl (exp);
2338 tree arg, type;
2339 enum machine_mode mode;
2340 rtx op0, op1, op2;
2341
2342 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2343 return NULL_RTX;
2344
2345 arg = CALL_EXPR_ARG (exp, 0);
2346 type = TREE_TYPE (arg);
2347 mode = TYPE_MODE (TREE_TYPE (arg));
2348
2349 /* Try expanding via a sincos optab, fall back to emitting a libcall
2350 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2351 is only generated from sincos, cexp or if we have either of them. */
2352 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2353 {
2354 op1 = gen_reg_rtx (mode);
2355 op2 = gen_reg_rtx (mode);
2356
2357 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2358
2359 /* Compute into op1 and op2. */
2360 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2361 }
2362 else if (TARGET_HAS_SINCOS)
2363 {
2364 tree call, fn = NULL_TREE;
2365 tree top1, top2;
2366 rtx op1a, op2a;
2367
2368 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2369 fn = built_in_decls[BUILT_IN_SINCOSF];
2370 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2371 fn = built_in_decls[BUILT_IN_SINCOS];
2372 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2373 fn = built_in_decls[BUILT_IN_SINCOSL];
2374 else
2375 gcc_unreachable ();
2376
2377 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2378 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2379 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2380 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2381 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2382 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2383
2384 /* Make sure not to fold the sincos call again. */
2385 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2386 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2387 call, 3, arg, top1, top2));
2388 }
2389 else
2390 {
2391 tree call, fn = NULL_TREE, narg;
2392 tree ctype = build_complex_type (type);
2393
2394 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2395 fn = built_in_decls[BUILT_IN_CEXPF];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2397 fn = built_in_decls[BUILT_IN_CEXP];
2398 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2399 fn = built_in_decls[BUILT_IN_CEXPL];
2400 else
2401 gcc_unreachable ();
2402
2403 /* If we don't have a decl for cexp create one. This is the
2404 friendliest fallback if the user calls __builtin_cexpi
2405 without full target C99 function support. */
2406 if (fn == NULL_TREE)
2407 {
2408 tree fntype;
2409 const char *name = NULL;
2410
2411 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2412 name = "cexpf";
2413 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2414 name = "cexp";
2415 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2416 name = "cexpl";
2417
2418 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2419 fn = build_fn_decl (name, fntype);
2420 }
2421
2422 narg = fold_build2 (COMPLEX_EXPR, ctype,
2423 build_real (type, dconst0), arg);
2424
2425 /* Make sure not to fold the cexp call again. */
2426 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2427 return expand_expr (build_call_nary (ctype, call, 1, narg),
2428 target, VOIDmode, EXPAND_NORMAL);
2429 }
2430
2431 /* Now build the proper return type. */
2432 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2433 make_tree (TREE_TYPE (arg), op2),
2434 make_tree (TREE_TYPE (arg), op1)),
2435 target, VOIDmode, EXPAND_NORMAL);
2436 }
2437
2438 /* Expand a call to one of the builtin rounding functions gcc defines
2439 as an extension (lfloor and lceil). As these are gcc extensions we
2440 do not need to worry about setting errno to EDOM.
2441 If expanding via optab fails, lower expression to (int)(floor(x)).
2442 EXP is the expression that is a call to the builtin function;
2443 if convenient, the result should be placed in TARGET. SUBTARGET may
2444 be used as the target for computing one of EXP's operands. */
2445
2446 static rtx
2447 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2448 {
2449 convert_optab builtin_optab;
2450 rtx op0, insns, tmp;
2451 tree fndecl = get_callee_fndecl (exp);
2452 enum built_in_function fallback_fn;
2453 tree fallback_fndecl;
2454 enum machine_mode mode;
2455 tree arg, narg;
2456
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 gcc_unreachable ();
2459
2460 arg = CALL_EXPR_ARG (exp, 0);
2461
2462 switch (DECL_FUNCTION_CODE (fndecl))
2463 {
2464 CASE_FLT_FN (BUILT_IN_LCEIL):
2465 CASE_FLT_FN (BUILT_IN_LLCEIL):
2466 builtin_optab = lceil_optab;
2467 fallback_fn = BUILT_IN_CEIL;
2468 break;
2469
2470 CASE_FLT_FN (BUILT_IN_LFLOOR):
2471 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2472 builtin_optab = lfloor_optab;
2473 fallback_fn = BUILT_IN_FLOOR;
2474 break;
2475
2476 default:
2477 gcc_unreachable ();
2478 }
2479
2480 /* Make a suitable register to place result in. */
2481 mode = TYPE_MODE (TREE_TYPE (exp));
2482
2483 target = gen_reg_rtx (mode);
2484
2485 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2486 need to expand the argument again. This way, we will not perform
2487 side-effects more the once. */
2488 narg = builtin_save_expr (arg);
2489 if (narg != arg)
2490 {
2491 arg = narg;
2492 exp = build_call_expr (fndecl, 1, arg);
2493 }
2494
2495 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2496
2497 start_sequence ();
2498
2499 /* Compute into TARGET. */
2500 if (expand_sfix_optab (target, op0, builtin_optab))
2501 {
2502 /* Output the entire sequence. */
2503 insns = get_insns ();
2504 end_sequence ();
2505 emit_insn (insns);
2506 return target;
2507 }
2508
2509 /* If we were unable to expand via the builtin, stop the sequence
2510 (without outputting the insns). */
2511 end_sequence ();
2512
2513 /* Fall back to floating point rounding optab. */
2514 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2515
2516 /* For non-C99 targets we may end up without a fallback fndecl here
2517 if the user called __builtin_lfloor directly. In this case emit
2518 a call to the floor/ceil variants nevertheless. This should result
2519 in the best user experience for not full C99 targets. */
2520 if (fallback_fndecl == NULL_TREE)
2521 {
2522 tree fntype;
2523 const char *name = NULL;
2524
2525 switch (DECL_FUNCTION_CODE (fndecl))
2526 {
2527 case BUILT_IN_LCEIL:
2528 case BUILT_IN_LLCEIL:
2529 name = "ceil";
2530 break;
2531 case BUILT_IN_LCEILF:
2532 case BUILT_IN_LLCEILF:
2533 name = "ceilf";
2534 break;
2535 case BUILT_IN_LCEILL:
2536 case BUILT_IN_LLCEILL:
2537 name = "ceill";
2538 break;
2539 case BUILT_IN_LFLOOR:
2540 case BUILT_IN_LLFLOOR:
2541 name = "floor";
2542 break;
2543 case BUILT_IN_LFLOORF:
2544 case BUILT_IN_LLFLOORF:
2545 name = "floorf";
2546 break;
2547 case BUILT_IN_LFLOORL:
2548 case BUILT_IN_LLFLOORL:
2549 name = "floorl";
2550 break;
2551 default:
2552 gcc_unreachable ();
2553 }
2554
2555 fntype = build_function_type_list (TREE_TYPE (arg),
2556 TREE_TYPE (arg), NULL_TREE);
2557 fallback_fndecl = build_fn_decl (name, fntype);
2558 }
2559
2560 exp = build_call_expr (fallback_fndecl, 1, arg);
2561
2562 tmp = expand_normal (exp);
2563
2564 /* Truncate the result of floating point optab to integer
2565 via expand_fix (). */
2566 target = gen_reg_rtx (mode);
2567 expand_fix (target, tmp, 0);
2568
2569 return target;
2570 }
2571
2572 /* Expand a call to one of the builtin math functions doing integer
2573 conversion (lrint).
2574 Return 0 if a normal call should be emitted rather than expanding the
2575 function in-line. EXP is the expression that is a call to the builtin
2576 function; if convenient, the result should be placed in TARGET.
2577 SUBTARGET may be used as the target for computing one of EXP's operands. */
2578
2579 static rtx
2580 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2581 {
2582 convert_optab builtin_optab;
2583 rtx op0, insns;
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, narg;
2586 enum machine_mode mode;
2587
2588 /* There's no easy way to detect the case we need to set EDOM. */
2589 if (flag_errno_math)
2590 return NULL_RTX;
2591
2592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2593 gcc_unreachable ();
2594
2595 arg = CALL_EXPR_ARG (exp, 0);
2596
2597 switch (DECL_FUNCTION_CODE (fndecl))
2598 {
2599 CASE_FLT_FN (BUILT_IN_LRINT):
2600 CASE_FLT_FN (BUILT_IN_LLRINT):
2601 builtin_optab = lrint_optab; break;
2602 CASE_FLT_FN (BUILT_IN_LROUND):
2603 CASE_FLT_FN (BUILT_IN_LLROUND):
2604 builtin_optab = lround_optab; break;
2605 default:
2606 gcc_unreachable ();
2607 }
2608
2609 /* Make a suitable register to place result in. */
2610 mode = TYPE_MODE (TREE_TYPE (exp));
2611
2612 target = gen_reg_rtx (mode);
2613
2614 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2615 need to expand the argument again. This way, we will not perform
2616 side-effects more the once. */
2617 narg = builtin_save_expr (arg);
2618 if (narg != arg)
2619 {
2620 arg = narg;
2621 exp = build_call_expr (fndecl, 1, arg);
2622 }
2623
2624 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2625
2626 start_sequence ();
2627
2628 if (expand_sfix_optab (target, op0, builtin_optab))
2629 {
2630 /* Output the entire sequence. */
2631 insns = get_insns ();
2632 end_sequence ();
2633 emit_insn (insns);
2634 return target;
2635 }
2636
2637 /* If we were unable to expand via the builtin, stop the sequence
2638 (without outputting the insns) and call to the library function
2639 with the stabilized argument list. */
2640 end_sequence ();
2641
2642 target = expand_call (exp, target, target == const0_rtx);
2643
2644 return target;
2645 }
2646
2647 /* To evaluate powi(x,n), the floating point value x raised to the
2648 constant integer exponent n, we use a hybrid algorithm that
2649 combines the "window method" with look-up tables. For an
2650 introduction to exponentiation algorithms and "addition chains",
2651 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2652 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2653 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2654 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2655
2656 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2657 multiplications to inline before calling the system library's pow
2658 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2659 so this default never requires calling pow, powf or powl. */
2660
2661 #ifndef POWI_MAX_MULTS
2662 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2663 #endif
2664
2665 /* The size of the "optimal power tree" lookup table. All
2666 exponents less than this value are simply looked up in the
2667 powi_table below. This threshold is also used to size the
2668 cache of pseudo registers that hold intermediate results. */
2669 #define POWI_TABLE_SIZE 256
2670
2671 /* The size, in bits of the window, used in the "window method"
2672 exponentiation algorithm. This is equivalent to a radix of
2673 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2674 #define POWI_WINDOW_SIZE 3
2675
2676 /* The following table is an efficient representation of an
2677 "optimal power tree". For each value, i, the corresponding
2678 value, j, in the table states than an optimal evaluation
2679 sequence for calculating pow(x,i) can be found by evaluating
2680 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2681 100 integers is given in Knuth's "Seminumerical algorithms". */
2682
2683 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2684 {
2685 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2686 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2687 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2688 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2689 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2690 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2691 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2692 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2693 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2694 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2695 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2696 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2697 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2698 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2699 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2700 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2701 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2702 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2703 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2704 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2705 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2706 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2707 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2708 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2709 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2710 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2711 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2712 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2713 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2714 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2715 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2716 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2717 };
2718
2719
2720 /* Return the number of multiplications required to calculate
2721 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2722 subroutine of powi_cost. CACHE is an array indicating
2723 which exponents have already been calculated. */
2724
2725 static int
2726 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2727 {
2728 /* If we've already calculated this exponent, then this evaluation
2729 doesn't require any additional multiplications. */
2730 if (cache[n])
2731 return 0;
2732
2733 cache[n] = true;
2734 return powi_lookup_cost (n - powi_table[n], cache)
2735 + powi_lookup_cost (powi_table[n], cache) + 1;
2736 }
2737
2738 /* Return the number of multiplications required to calculate
2739 powi(x,n) for an arbitrary x, given the exponent N. This
2740 function needs to be kept in sync with expand_powi below. */
2741
2742 static int
2743 powi_cost (HOST_WIDE_INT n)
2744 {
2745 bool cache[POWI_TABLE_SIZE];
2746 unsigned HOST_WIDE_INT digit;
2747 unsigned HOST_WIDE_INT val;
2748 int result;
2749
2750 if (n == 0)
2751 return 0;
2752
2753 /* Ignore the reciprocal when calculating the cost. */
2754 val = (n < 0) ? -n : n;
2755
2756 /* Initialize the exponent cache. */
2757 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2758 cache[1] = true;
2759
2760 result = 0;
2761
2762 while (val >= POWI_TABLE_SIZE)
2763 {
2764 if (val & 1)
2765 {
2766 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2767 result += powi_lookup_cost (digit, cache)
2768 + POWI_WINDOW_SIZE + 1;
2769 val >>= POWI_WINDOW_SIZE;
2770 }
2771 else
2772 {
2773 val >>= 1;
2774 result++;
2775 }
2776 }
2777
2778 return result + powi_lookup_cost (val, cache);
2779 }
2780
2781 /* Recursive subroutine of expand_powi. This function takes the array,
2782 CACHE, of already calculated exponents and an exponent N and returns
2783 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2784
2785 static rtx
2786 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2787 {
2788 unsigned HOST_WIDE_INT digit;
2789 rtx target, result;
2790 rtx op0, op1;
2791
2792 if (n < POWI_TABLE_SIZE)
2793 {
2794 if (cache[n])
2795 return cache[n];
2796
2797 target = gen_reg_rtx (mode);
2798 cache[n] = target;
2799
2800 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2801 op1 = expand_powi_1 (mode, powi_table[n], cache);
2802 }
2803 else if (n & 1)
2804 {
2805 target = gen_reg_rtx (mode);
2806 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2807 op0 = expand_powi_1 (mode, n - digit, cache);
2808 op1 = expand_powi_1 (mode, digit, cache);
2809 }
2810 else
2811 {
2812 target = gen_reg_rtx (mode);
2813 op0 = expand_powi_1 (mode, n >> 1, cache);
2814 op1 = op0;
2815 }
2816
2817 result = expand_mult (mode, op0, op1, target, 0);
2818 if (result != target)
2819 emit_move_insn (target, result);
2820 return target;
2821 }
2822
2823 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2824 floating point operand in mode MODE, and N is the exponent. This
2825 function needs to be kept in sync with powi_cost above. */
2826
2827 static rtx
2828 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2829 {
2830 unsigned HOST_WIDE_INT val;
2831 rtx cache[POWI_TABLE_SIZE];
2832 rtx result;
2833
2834 if (n == 0)
2835 return CONST1_RTX (mode);
2836
2837 val = (n < 0) ? -n : n;
2838
2839 memset (cache, 0, sizeof (cache));
2840 cache[1] = x;
2841
2842 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2843
2844 /* If the original exponent was negative, reciprocate the result. */
2845 if (n < 0)
2846 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2847 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2848
2849 return result;
2850 }
2851
2852 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2853 a normal call should be emitted rather than expanding the function
2854 in-line. EXP is the expression that is a call to the builtin
2855 function; if convenient, the result should be placed in TARGET. */
2856
2857 static rtx
2858 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2859 {
2860 tree arg0, arg1;
2861 tree fn, narg0;
2862 tree type = TREE_TYPE (exp);
2863 REAL_VALUE_TYPE cint, c, c2;
2864 HOST_WIDE_INT n;
2865 rtx op, op2;
2866 enum machine_mode mode = TYPE_MODE (type);
2867
2868 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2869 return NULL_RTX;
2870
2871 arg0 = CALL_EXPR_ARG (exp, 0);
2872 arg1 = CALL_EXPR_ARG (exp, 1);
2873
2874 if (TREE_CODE (arg1) != REAL_CST
2875 || TREE_OVERFLOW (arg1))
2876 return expand_builtin_mathfn_2 (exp, target, subtarget);
2877
2878 /* Handle constant exponents. */
2879
2880 /* For integer valued exponents we can expand to an optimal multiplication
2881 sequence using expand_powi. */
2882 c = TREE_REAL_CST (arg1);
2883 n = real_to_integer (&c);
2884 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2885 if (real_identical (&c, &cint)
2886 && ((n >= -1 && n <= 2)
2887 || (flag_unsafe_math_optimizations
2888 && !optimize_size
2889 && powi_cost (n) <= POWI_MAX_MULTS)))
2890 {
2891 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2892 if (n != 1)
2893 {
2894 op = force_reg (mode, op);
2895 op = expand_powi (op, mode, n);
2896 }
2897 return op;
2898 }
2899
2900 narg0 = builtin_save_expr (arg0);
2901
2902 /* If the exponent is not integer valued, check if it is half of an integer.
2903 In this case we can expand to sqrt (x) * x**(n/2). */
2904 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2905 if (fn != NULL_TREE)
2906 {
2907 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2908 n = real_to_integer (&c2);
2909 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2910 if (real_identical (&c2, &cint)
2911 && ((flag_unsafe_math_optimizations
2912 && !optimize_size
2913 && powi_cost (n/2) <= POWI_MAX_MULTS)
2914 || n == 1))
2915 {
2916 tree call_expr = build_call_expr (fn, 1, narg0);
2917 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2918 if (n != 1)
2919 {
2920 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2921 op2 = force_reg (mode, op2);
2922 op2 = expand_powi (op2, mode, abs (n / 2));
2923 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2924 0, OPTAB_LIB_WIDEN);
2925 /* If the original exponent was negative, reciprocate the
2926 result. */
2927 if (n < 0)
2928 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2929 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2930 }
2931 return op;
2932 }
2933 }
2934
2935 /* Try if the exponent is a third of an integer. In this case
2936 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2937 different from pow (x, 1./3.) due to rounding and behavior
2938 with negative x we need to constrain this transformation to
2939 unsafe math and positive x or finite math. */
2940 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2941 if (fn != NULL_TREE
2942 && flag_unsafe_math_optimizations
2943 && (tree_expr_nonnegative_p (arg0)
2944 || !HONOR_NANS (mode)))
2945 {
2946 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2947 real_round (&c2, mode, &c2);
2948 n = real_to_integer (&c2);
2949 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2950 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2951 real_convert (&c2, mode, &c2);
2952 if (real_identical (&c2, &c)
2953 && ((!optimize_size
2954 && powi_cost (n/3) <= POWI_MAX_MULTS)
2955 || n == 1))
2956 {
2957 tree call_expr = build_call_expr (fn, 1,narg0);
2958 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2959 if (abs (n) % 3 == 2)
2960 op = expand_simple_binop (mode, MULT, op, op, op,
2961 0, OPTAB_LIB_WIDEN);
2962 if (n != 1)
2963 {
2964 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2965 op2 = force_reg (mode, op2);
2966 op2 = expand_powi (op2, mode, abs (n / 3));
2967 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2968 0, OPTAB_LIB_WIDEN);
2969 /* If the original exponent was negative, reciprocate the
2970 result. */
2971 if (n < 0)
2972 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2973 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2974 }
2975 return op;
2976 }
2977 }
2978
2979 /* Fall back to optab expansion. */
2980 return expand_builtin_mathfn_2 (exp, target, subtarget);
2981 }
2982
2983 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2984 a normal call should be emitted rather than expanding the function
2985 in-line. EXP is the expression that is a call to the builtin
2986 function; if convenient, the result should be placed in TARGET. */
2987
2988 static rtx
2989 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2990 {
2991 tree arg0, arg1;
2992 rtx op0, op1;
2993 enum machine_mode mode;
2994 enum machine_mode mode2;
2995
2996 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2997 return NULL_RTX;
2998
2999 arg0 = CALL_EXPR_ARG (exp, 0);
3000 arg1 = CALL_EXPR_ARG (exp, 1);
3001 mode = TYPE_MODE (TREE_TYPE (exp));
3002
3003 /* Handle constant power. */
3004
3005 if (TREE_CODE (arg1) == INTEGER_CST
3006 && !TREE_OVERFLOW (arg1))
3007 {
3008 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3009
3010 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3011 Otherwise, check the number of multiplications required. */
3012 if ((TREE_INT_CST_HIGH (arg1) == 0
3013 || TREE_INT_CST_HIGH (arg1) == -1)
3014 && ((n >= -1 && n <= 2)
3015 || (! optimize_size
3016 && powi_cost (n) <= POWI_MAX_MULTS)))
3017 {
3018 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3019 op0 = force_reg (mode, op0);
3020 return expand_powi (op0, mode, n);
3021 }
3022 }
3023
3024 /* Emit a libcall to libgcc. */
3025
3026 /* Mode of the 2nd argument must match that of an int. */
3027 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3028
3029 if (target == NULL_RTX)
3030 target = gen_reg_rtx (mode);
3031
3032 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3033 if (GET_MODE (op0) != mode)
3034 op0 = convert_to_mode (mode, op0, 0);
3035 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3036 if (GET_MODE (op1) != mode2)
3037 op1 = convert_to_mode (mode2, op1, 0);
3038
3039 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3040 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3041 op0, mode, op1, mode2);
3042
3043 return target;
3044 }
3045
3046 /* Expand expression EXP which is a call to the strlen builtin. Return
3047 NULL_RTX if we failed the caller should emit a normal call, otherwise
3048 try to get the result in TARGET, if convenient. */
3049
3050 static rtx
3051 expand_builtin_strlen (tree exp, rtx target,
3052 enum machine_mode target_mode)
3053 {
3054 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3055 return NULL_RTX;
3056 else
3057 {
3058 rtx pat;
3059 tree len;
3060 tree src = CALL_EXPR_ARG (exp, 0);
3061 rtx result, src_reg, char_rtx, before_strlen;
3062 enum machine_mode insn_mode = target_mode, char_mode;
3063 enum insn_code icode = CODE_FOR_nothing;
3064 int align;
3065
3066 /* If the length can be computed at compile-time, return it. */
3067 len = c_strlen (src, 0);
3068 if (len)
3069 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3070
3071 /* If the length can be computed at compile-time and is constant
3072 integer, but there are side-effects in src, evaluate
3073 src for side-effects, then return len.
3074 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3075 can be optimized into: i++; x = 3; */
3076 len = c_strlen (src, 1);
3077 if (len && TREE_CODE (len) == INTEGER_CST)
3078 {
3079 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3080 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3081 }
3082
3083 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3084
3085 /* If SRC is not a pointer type, don't do this operation inline. */
3086 if (align == 0)
3087 return NULL_RTX;
3088
3089 /* Bail out if we can't compute strlen in the right mode. */
3090 while (insn_mode != VOIDmode)
3091 {
3092 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3093 if (icode != CODE_FOR_nothing)
3094 break;
3095
3096 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3097 }
3098 if (insn_mode == VOIDmode)
3099 return NULL_RTX;
3100
3101 /* Make a place to write the result of the instruction. */
3102 result = target;
3103 if (! (result != 0
3104 && REG_P (result)
3105 && GET_MODE (result) == insn_mode
3106 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3107 result = gen_reg_rtx (insn_mode);
3108
3109 /* Make a place to hold the source address. We will not expand
3110 the actual source until we are sure that the expansion will
3111 not fail -- there are trees that cannot be expanded twice. */
3112 src_reg = gen_reg_rtx (Pmode);
3113
3114 /* Mark the beginning of the strlen sequence so we can emit the
3115 source operand later. */
3116 before_strlen = get_last_insn ();
3117
3118 char_rtx = const0_rtx;
3119 char_mode = insn_data[(int) icode].operand[2].mode;
3120 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3121 char_mode))
3122 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3123
3124 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3125 char_rtx, GEN_INT (align));
3126 if (! pat)
3127 return NULL_RTX;
3128 emit_insn (pat);
3129
3130 /* Now that we are assured of success, expand the source. */
3131 start_sequence ();
3132 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3133 if (pat != src_reg)
3134 emit_move_insn (src_reg, pat);
3135 pat = get_insns ();
3136 end_sequence ();
3137
3138 if (before_strlen)
3139 emit_insn_after (pat, before_strlen);
3140 else
3141 emit_insn_before (pat, get_insns ());
3142
3143 /* Return the value in the proper mode for this function. */
3144 if (GET_MODE (result) == target_mode)
3145 target = result;
3146 else if (target != 0)
3147 convert_move (target, result, 0);
3148 else
3149 target = convert_to_mode (target_mode, result, 0);
3150
3151 return target;
3152 }
3153 }
3154
3155 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3156 caller should emit a normal call, otherwise try to get the result
3157 in TARGET, if convenient (and in mode MODE if that's convenient). */
3158
3159 static rtx
3160 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3161 {
3162 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3163 {
3164 tree type = TREE_TYPE (exp);
3165 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3166 CALL_EXPR_ARG (exp, 1), type);
3167 if (result)
3168 return expand_expr (result, target, mode, EXPAND_NORMAL);
3169 }
3170 return NULL_RTX;
3171 }
3172
3173 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3174 caller should emit a normal call, otherwise try to get the result
3175 in TARGET, if convenient (and in mode MODE if that's convenient). */
3176
3177 static rtx
3178 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3179 {
3180 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3181 {
3182 tree type = TREE_TYPE (exp);
3183 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3184 CALL_EXPR_ARG (exp, 1), type);
3185 if (result)
3186 return expand_expr (result, target, mode, EXPAND_NORMAL);
3187
3188 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3189 }
3190 return NULL_RTX;
3191 }
3192
3193 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3194 caller should emit a normal call, otherwise try to get the result
3195 in TARGET, if convenient (and in mode MODE if that's convenient). */
3196
3197 static rtx
3198 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3199 {
3200 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3201 {
3202 tree type = TREE_TYPE (exp);
3203 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3204 CALL_EXPR_ARG (exp, 1), type);
3205 if (result)
3206 return expand_expr (result, target, mode, EXPAND_NORMAL);
3207 }
3208 return NULL_RTX;
3209 }
3210
3211 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3212 caller should emit a normal call, otherwise try to get the result
3213 in TARGET, if convenient (and in mode MODE if that's convenient). */
3214
3215 static rtx
3216 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3217 {
3218 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3219 {
3220 tree type = TREE_TYPE (exp);
3221 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3222 CALL_EXPR_ARG (exp, 1), type);
3223 if (result)
3224 return expand_expr (result, target, mode, EXPAND_NORMAL);
3225 }
3226 return NULL_RTX;
3227 }
3228
3229 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3230 bytes from constant string DATA + OFFSET and return it as target
3231 constant. */
3232
3233 static rtx
3234 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3235 enum machine_mode mode)
3236 {
3237 const char *str = (const char *) data;
3238
3239 gcc_assert (offset >= 0
3240 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3241 <= strlen (str) + 1));
3242
3243 return c_readstr (str + offset, mode);
3244 }
3245
3246 /* Expand a call EXP to the memcpy builtin.
3247 Return NULL_RTX if we failed, the caller should emit a normal call,
3248 otherwise try to get the result in TARGET, if convenient (and in
3249 mode MODE if that's convenient). */
3250
3251 static rtx
3252 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3253 {
3254 tree fndecl = get_callee_fndecl (exp);
3255
3256 if (!validate_arglist (exp,
3257 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3258 return NULL_RTX;
3259 else
3260 {
3261 tree dest = CALL_EXPR_ARG (exp, 0);
3262 tree src = CALL_EXPR_ARG (exp, 1);
3263 tree len = CALL_EXPR_ARG (exp, 2);
3264 const char *src_str;
3265 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3266 unsigned int dest_align
3267 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3268 rtx dest_mem, src_mem, dest_addr, len_rtx;
3269 tree result = fold_builtin_memory_op (dest, src, len,
3270 TREE_TYPE (TREE_TYPE (fndecl)),
3271 false, /*endp=*/0);
3272 HOST_WIDE_INT expected_size = -1;
3273 unsigned int expected_align = 0;
3274
3275 if (result)
3276 {
3277 while (TREE_CODE (result) == COMPOUND_EXPR)
3278 {
3279 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3280 EXPAND_NORMAL);
3281 result = TREE_OPERAND (result, 1);
3282 }
3283 return expand_expr (result, target, mode, EXPAND_NORMAL);
3284 }
3285
3286 /* If DEST is not a pointer type, call the normal function. */
3287 if (dest_align == 0)
3288 return NULL_RTX;
3289
3290 /* If either SRC is not a pointer type, don't do this
3291 operation in-line. */
3292 if (src_align == 0)
3293 return NULL_RTX;
3294
3295 stringop_block_profile (exp, &expected_align, &expected_size);
3296 if (expected_align < dest_align)
3297 expected_align = dest_align;
3298 dest_mem = get_memory_rtx (dest, len);
3299 set_mem_align (dest_mem, dest_align);
3300 len_rtx = expand_normal (len);
3301 src_str = c_getstr (src);
3302
3303 /* If SRC is a string constant and block move would be done
3304 by pieces, we can avoid loading the string from memory
3305 and only stored the computed constants. */
3306 if (src_str
3307 && GET_CODE (len_rtx) == CONST_INT
3308 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3309 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3310 (void *) src_str, dest_align))
3311 {
3312 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3313 builtin_memcpy_read_str,
3314 (void *) src_str, dest_align, 0);
3315 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3316 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3317 return dest_mem;
3318 }
3319
3320 src_mem = get_memory_rtx (src, len);
3321 set_mem_align (src_mem, src_align);
3322
3323 /* Copy word part most expediently. */
3324 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3325 CALL_EXPR_TAILCALL (exp)
3326 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3327 expected_align, expected_size);
3328
3329 if (dest_addr == 0)
3330 {
3331 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3332 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3333 }
3334 return dest_addr;
3335 }
3336 }
3337
3338 /* Expand a call EXP to the mempcpy builtin.
3339 Return NULL_RTX if we failed; the caller should emit a normal call,
3340 otherwise try to get the result in TARGET, if convenient (and in
3341 mode MODE if that's convenient). If ENDP is 0 return the
3342 destination pointer, if ENDP is 1 return the end pointer ala
3343 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3344 stpcpy. */
3345
3346 static rtx
3347 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3348 {
3349 if (!validate_arglist (exp,
3350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 return NULL_RTX;
3352 else
3353 {
3354 tree dest = CALL_EXPR_ARG (exp, 0);
3355 tree src = CALL_EXPR_ARG (exp, 1);
3356 tree len = CALL_EXPR_ARG (exp, 2);
3357 return expand_builtin_mempcpy_args (dest, src, len,
3358 TREE_TYPE (exp),
3359 target, mode, /*endp=*/ 1);
3360 }
3361 }
3362
3363 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3364 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3365 so that this can also be called without constructing an actual CALL_EXPR.
3366 TYPE is the return type of the call. The other arguments and return value
3367 are the same as for expand_builtin_mempcpy. */
3368
3369 static rtx
3370 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3371 rtx target, enum machine_mode mode, int endp)
3372 {
3373 /* If return value is ignored, transform mempcpy into memcpy. */
3374 if (target == const0_rtx)
3375 {
3376 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3377
3378 if (!fn)
3379 return NULL_RTX;
3380
3381 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3382 target, mode, EXPAND_NORMAL);
3383 }
3384 else
3385 {
3386 const char *src_str;
3387 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3388 unsigned int dest_align
3389 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3390 rtx dest_mem, src_mem, len_rtx;
3391 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3392
3393 if (result)
3394 {
3395 while (TREE_CODE (result) == COMPOUND_EXPR)
3396 {
3397 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3398 EXPAND_NORMAL);
3399 result = TREE_OPERAND (result, 1);
3400 }
3401 return expand_expr (result, target, mode, EXPAND_NORMAL);
3402 }
3403
3404 /* If either SRC or DEST is not a pointer type, don't do this
3405 operation in-line. */
3406 if (dest_align == 0 || src_align == 0)
3407 return NULL_RTX;
3408
3409 /* If LEN is not constant, call the normal function. */
3410 if (! host_integerp (len, 1))
3411 return NULL_RTX;
3412
3413 len_rtx = expand_normal (len);
3414 src_str = c_getstr (src);
3415
3416 /* If SRC is a string constant and block move would be done
3417 by pieces, we can avoid loading the string from memory
3418 and only stored the computed constants. */
3419 if (src_str
3420 && GET_CODE (len_rtx) == CONST_INT
3421 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3422 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3423 (void *) src_str, dest_align))
3424 {
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3428 builtin_memcpy_read_str,
3429 (void *) src_str, dest_align, endp);
3430 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3431 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3432 return dest_mem;
3433 }
3434
3435 if (GET_CODE (len_rtx) == CONST_INT
3436 && can_move_by_pieces (INTVAL (len_rtx),
3437 MIN (dest_align, src_align)))
3438 {
3439 dest_mem = get_memory_rtx (dest, len);
3440 set_mem_align (dest_mem, dest_align);
3441 src_mem = get_memory_rtx (src, len);
3442 set_mem_align (src_mem, src_align);
3443 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3444 MIN (dest_align, src_align), endp);
3445 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3446 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3447 return dest_mem;
3448 }
3449
3450 return NULL_RTX;
3451 }
3452 }
3453
3454 /* Expand expression EXP, which is a call to the memmove builtin. Return
3455 NULL_RTX if we failed; the caller should emit a normal call. */
3456
3457 static rtx
3458 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3459 {
3460 if (!validate_arglist (exp,
3461 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3462 return NULL_RTX;
3463 else
3464 {
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree src = CALL_EXPR_ARG (exp, 1);
3467 tree len = CALL_EXPR_ARG (exp, 2);
3468 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3469 target, mode, ignore);
3470 }
3471 }
3472
3473 /* Helper function to do the actual work for expand_builtin_memmove. The
3474 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3475 so that this can also be called without constructing an actual CALL_EXPR.
3476 TYPE is the return type of the call. The other arguments and return value
3477 are the same as for expand_builtin_memmove. */
3478
3479 static rtx
3480 expand_builtin_memmove_args (tree dest, tree src, tree len,
3481 tree type, rtx target, enum machine_mode mode,
3482 int ignore)
3483 {
3484 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3485
3486 if (result)
3487 {
3488 STRIP_TYPE_NOPS (result);
3489 while (TREE_CODE (result) == COMPOUND_EXPR)
3490 {
3491 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3492 EXPAND_NORMAL);
3493 result = TREE_OPERAND (result, 1);
3494 }
3495 return expand_expr (result, target, mode, EXPAND_NORMAL);
3496 }
3497
3498 /* Otherwise, call the normal function. */
3499 return NULL_RTX;
3500 }
3501
3502 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3503 NULL_RTX if we failed the caller should emit a normal call. */
3504
3505 static rtx
3506 expand_builtin_bcopy (tree exp, int ignore)
3507 {
3508 tree type = TREE_TYPE (exp);
3509 tree src, dest, size;
3510
3511 if (!validate_arglist (exp,
3512 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3513 return NULL_RTX;
3514
3515 src = CALL_EXPR_ARG (exp, 0);
3516 dest = CALL_EXPR_ARG (exp, 1);
3517 size = CALL_EXPR_ARG (exp, 2);
3518
3519 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3520 This is done this way so that if it isn't expanded inline, we fall
3521 back to calling bcopy instead of memmove. */
3522 return expand_builtin_memmove_args (dest, src,
3523 fold_convert (sizetype, size),
3524 type, const0_rtx, VOIDmode,
3525 ignore);
3526 }
3527
3528 #ifndef HAVE_movstr
3529 # define HAVE_movstr 0
3530 # define CODE_FOR_movstr CODE_FOR_nothing
3531 #endif
3532
3533 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3534 we failed, the caller should emit a normal call, otherwise try to
3535 get the result in TARGET, if convenient. If ENDP is 0 return the
3536 destination pointer, if ENDP is 1 return the end pointer ala
3537 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3538 stpcpy. */
3539
3540 static rtx
3541 expand_movstr (tree dest, tree src, rtx target, int endp)
3542 {
3543 rtx end;
3544 rtx dest_mem;
3545 rtx src_mem;
3546 rtx insn;
3547 const struct insn_data * data;
3548
3549 if (!HAVE_movstr)
3550 return NULL_RTX;
3551
3552 dest_mem = get_memory_rtx (dest, NULL);
3553 src_mem = get_memory_rtx (src, NULL);
3554 if (!endp)
3555 {
3556 target = force_reg (Pmode, XEXP (dest_mem, 0));
3557 dest_mem = replace_equiv_address (dest_mem, target);
3558 end = gen_reg_rtx (Pmode);
3559 }
3560 else
3561 {
3562 if (target == 0 || target == const0_rtx)
3563 {
3564 end = gen_reg_rtx (Pmode);
3565 if (target == 0)
3566 target = end;
3567 }
3568 else
3569 end = target;
3570 }
3571
3572 data = insn_data + CODE_FOR_movstr;
3573
3574 if (data->operand[0].mode != VOIDmode)
3575 end = gen_lowpart (data->operand[0].mode, end);
3576
3577 insn = data->genfun (end, dest_mem, src_mem);
3578
3579 gcc_assert (insn);
3580
3581 emit_insn (insn);
3582
3583 /* movstr is supposed to set end to the address of the NUL
3584 terminator. If the caller requested a mempcpy-like return value,
3585 adjust it. */
3586 if (endp == 1 && target != const0_rtx)
3587 {
3588 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3589 emit_move_insn (target, force_operand (tem, NULL_RTX));
3590 }
3591
3592 return target;
3593 }
3594
3595 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3596 NULL_RTX if we failed the caller should emit a normal call, otherwise
3597 try to get the result in TARGET, if convenient (and in mode MODE if that's
3598 convenient). */
3599
3600 static rtx
3601 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3602 {
3603 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3604 {
3605 tree dest = CALL_EXPR_ARG (exp, 0);
3606 tree src = CALL_EXPR_ARG (exp, 1);
3607 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3608 }
3609 return NULL_RTX;
3610 }
3611
3612 /* Helper function to do the actual work for expand_builtin_strcpy. The
3613 arguments to the builtin_strcpy call DEST and SRC are broken out
3614 so that this can also be called without constructing an actual CALL_EXPR.
3615 The other arguments and return value are the same as for
3616 expand_builtin_strcpy. */
3617
3618 static rtx
3619 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3620 rtx target, enum machine_mode mode)
3621 {
3622 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3623 if (result)
3624 return expand_expr (result, target, mode, EXPAND_NORMAL);
3625 return expand_movstr (dest, src, target, /*endp=*/0);
3626
3627 }
3628
3629 /* Expand a call EXP to the stpcpy builtin.
3630 Return NULL_RTX if we failed the caller should emit a normal call,
3631 otherwise try to get the result in TARGET, if convenient (and in
3632 mode MODE if that's convenient). */
3633
3634 static rtx
3635 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3636 {
3637 tree dst, src;
3638
3639 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3640 return NULL_RTX;
3641
3642 dst = CALL_EXPR_ARG (exp, 0);
3643 src = CALL_EXPR_ARG (exp, 1);
3644
3645 /* If return value is ignored, transform stpcpy into strcpy. */
3646 if (target == const0_rtx)
3647 {
3648 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3649 if (!fn)
3650 return NULL_RTX;
3651
3652 return expand_expr (build_call_expr (fn, 2, dst, src),
3653 target, mode, EXPAND_NORMAL);
3654 }
3655 else
3656 {
3657 tree len, lenp1;
3658 rtx ret;
3659
3660 /* Ensure we get an actual string whose length can be evaluated at
3661 compile-time, not an expression containing a string. This is
3662 because the latter will potentially produce pessimized code
3663 when used to produce the return value. */
3664 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3665 return expand_movstr (dst, src, target, /*endp=*/2);
3666
3667 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3668 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3669 target, mode, /*endp=*/2);
3670
3671 if (ret)
3672 return ret;
3673
3674 if (TREE_CODE (len) == INTEGER_CST)
3675 {
3676 rtx len_rtx = expand_normal (len);
3677
3678 if (GET_CODE (len_rtx) == CONST_INT)
3679 {
3680 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3681 dst, src, target, mode);
3682
3683 if (ret)
3684 {
3685 if (! target)
3686 {
3687 if (mode != VOIDmode)
3688 target = gen_reg_rtx (mode);
3689 else
3690 target = gen_reg_rtx (GET_MODE (ret));
3691 }
3692 if (GET_MODE (target) != GET_MODE (ret))
3693 ret = gen_lowpart (GET_MODE (target), ret);
3694
3695 ret = plus_constant (ret, INTVAL (len_rtx));
3696 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3697 gcc_assert (ret);
3698
3699 return target;
3700 }
3701 }
3702 }
3703
3704 return expand_movstr (dst, src, target, /*endp=*/2);
3705 }
3706 }
3707
3708 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3709 bytes from constant string DATA + OFFSET and return it as target
3710 constant. */
3711
3712 static rtx
3713 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3714 enum machine_mode mode)
3715 {
3716 const char *str = (const char *) data;
3717
3718 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3719 return const0_rtx;
3720
3721 return c_readstr (str + offset, mode);
3722 }
3723
3724 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3725 NULL_RTX if we failed the caller should emit a normal call. */
3726
3727 static rtx
3728 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3729 {
3730 tree fndecl = get_callee_fndecl (exp);
3731
3732 if (validate_arglist (exp,
3733 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3734 {
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree src = CALL_EXPR_ARG (exp, 1);
3737 tree len = CALL_EXPR_ARG (exp, 2);
3738 tree slen = c_strlen (src, 1);
3739 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3740
3741 if (result)
3742 {
3743 while (TREE_CODE (result) == COMPOUND_EXPR)
3744 {
3745 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3746 EXPAND_NORMAL);
3747 result = TREE_OPERAND (result, 1);
3748 }
3749 return expand_expr (result, target, mode, EXPAND_NORMAL);
3750 }
3751
3752 /* We must be passed a constant len and src parameter. */
3753 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3754 return NULL_RTX;
3755
3756 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3757
3758 /* We're required to pad with trailing zeros if the requested
3759 len is greater than strlen(s2)+1. In that case try to
3760 use store_by_pieces, if it fails, punt. */
3761 if (tree_int_cst_lt (slen, len))
3762 {
3763 unsigned int dest_align
3764 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3765 const char *p = c_getstr (src);
3766 rtx dest_mem;
3767
3768 if (!p || dest_align == 0 || !host_integerp (len, 1)
3769 || !can_store_by_pieces (tree_low_cst (len, 1),
3770 builtin_strncpy_read_str,
3771 (void *) p, dest_align))
3772 return NULL_RTX;
3773
3774 dest_mem = get_memory_rtx (dest, len);
3775 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3776 builtin_strncpy_read_str,
3777 (void *) p, dest_align, 0);
3778 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3779 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3780 return dest_mem;
3781 }
3782 }
3783 return NULL_RTX;
3784 }
3785
3786 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3787 bytes from constant string DATA + OFFSET and return it as target
3788 constant. */
3789
3790 rtx
3791 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3792 enum machine_mode mode)
3793 {
3794 const char *c = (const char *) data;
3795 char *p = alloca (GET_MODE_SIZE (mode));
3796
3797 memset (p, *c, GET_MODE_SIZE (mode));
3798
3799 return c_readstr (p, mode);
3800 }
3801
3802 /* Callback routine for store_by_pieces. Return the RTL of a register
3803 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3804 char value given in the RTL register data. For example, if mode is
3805 4 bytes wide, return the RTL for 0x01010101*data. */
3806
3807 static rtx
3808 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3809 enum machine_mode mode)
3810 {
3811 rtx target, coeff;
3812 size_t size;
3813 char *p;
3814
3815 size = GET_MODE_SIZE (mode);
3816 if (size == 1)
3817 return (rtx) data;
3818
3819 p = alloca (size);
3820 memset (p, 1, size);
3821 coeff = c_readstr (p, mode);
3822
3823 target = convert_to_mode (mode, (rtx) data, 1);
3824 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3825 return force_reg (mode, target);
3826 }
3827
3828 /* Expand expression EXP, which is a call to the memset builtin. Return
3829 NULL_RTX if we failed the caller should emit a normal call, otherwise
3830 try to get the result in TARGET, if convenient (and in mode MODE if that's
3831 convenient). */
3832
3833 static rtx
3834 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3835 {
3836 if (!validate_arglist (exp,
3837 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3838 return NULL_RTX;
3839 else
3840 {
3841 tree dest = CALL_EXPR_ARG (exp, 0);
3842 tree val = CALL_EXPR_ARG (exp, 1);
3843 tree len = CALL_EXPR_ARG (exp, 2);
3844 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3845 }
3846 }
3847
3848 /* Helper function to do the actual work for expand_builtin_memset. The
3849 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3850 so that this can also be called without constructing an actual CALL_EXPR.
3851 The other arguments and return value are the same as for
3852 expand_builtin_memset. */
3853
3854 static rtx
3855 expand_builtin_memset_args (tree dest, tree val, tree len,
3856 rtx target, enum machine_mode mode, tree orig_exp)
3857 {
3858 tree fndecl, fn;
3859 enum built_in_function fcode;
3860 char c;
3861 unsigned int dest_align;
3862 rtx dest_mem, dest_addr, len_rtx;
3863 HOST_WIDE_INT expected_size = -1;
3864 unsigned int expected_align = 0;
3865
3866 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3867
3868 /* If DEST is not a pointer type, don't do this operation in-line. */
3869 if (dest_align == 0)
3870 return NULL_RTX;
3871
3872 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3873 if (expected_align < dest_align)
3874 expected_align = dest_align;
3875
3876 /* If the LEN parameter is zero, return DEST. */
3877 if (integer_zerop (len))
3878 {
3879 /* Evaluate and ignore VAL in case it has side-effects. */
3880 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3881 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3882 }
3883
3884 /* Stabilize the arguments in case we fail. */
3885 dest = builtin_save_expr (dest);
3886 val = builtin_save_expr (val);
3887 len = builtin_save_expr (len);
3888
3889 len_rtx = expand_normal (len);
3890 dest_mem = get_memory_rtx (dest, len);
3891
3892 if (TREE_CODE (val) != INTEGER_CST)
3893 {
3894 rtx val_rtx;
3895
3896 val_rtx = expand_normal (val);
3897 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3898 val_rtx, 0);
3899
3900 /* Assume that we can memset by pieces if we can store
3901 * the coefficients by pieces (in the required modes).
3902 * We can't pass builtin_memset_gen_str as that emits RTL. */
3903 c = 1;
3904 if (host_integerp (len, 1)
3905 && !(optimize_size && tree_low_cst (len, 1) > 1)
3906 && can_store_by_pieces (tree_low_cst (len, 1),
3907 builtin_memset_read_str, &c, dest_align))
3908 {
3909 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3910 val_rtx);
3911 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3912 builtin_memset_gen_str, val_rtx, dest_align, 0);
3913 }
3914 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3915 dest_align, expected_align,
3916 expected_size))
3917 goto do_libcall;
3918
3919 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3920 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3921 return dest_mem;
3922 }
3923
3924 if (target_char_cast (val, &c))
3925 goto do_libcall;
3926
3927 if (c)
3928 {
3929 if (host_integerp (len, 1)
3930 && !(optimize_size && tree_low_cst (len, 1) > 1)
3931 && can_store_by_pieces (tree_low_cst (len, 1),
3932 builtin_memset_read_str, &c, dest_align))
3933 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3934 builtin_memset_read_str, &c, dest_align, 0);
3935 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3936 dest_align, expected_align,
3937 expected_size))
3938 goto do_libcall;
3939
3940 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3941 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3942 return dest_mem;
3943 }
3944
3945 set_mem_align (dest_mem, dest_align);
3946 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3947 CALL_EXPR_TAILCALL (orig_exp)
3948 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3949 expected_align, expected_size);
3950
3951 if (dest_addr == 0)
3952 {
3953 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3954 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3955 }
3956
3957 return dest_addr;
3958
3959 do_libcall:
3960 fndecl = get_callee_fndecl (orig_exp);
3961 fcode = DECL_FUNCTION_CODE (fndecl);
3962 if (fcode == BUILT_IN_MEMSET)
3963 fn = build_call_expr (fndecl, 3, dest, val, len);
3964 else if (fcode == BUILT_IN_BZERO)
3965 fn = build_call_expr (fndecl, 2, dest, len);
3966 else
3967 gcc_unreachable ();
3968 if (TREE_CODE (fn) == CALL_EXPR)
3969 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3970 return expand_call (fn, target, target == const0_rtx);
3971 }
3972
3973 /* Expand expression EXP, which is a call to the bzero builtin. Return
3974 NULL_RTX if we failed the caller should emit a normal call. */
3975
3976 static rtx
3977 expand_builtin_bzero (tree exp)
3978 {
3979 tree dest, size;
3980
3981 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3982 return NULL_RTX;
3983
3984 dest = CALL_EXPR_ARG (exp, 0);
3985 size = CALL_EXPR_ARG (exp, 1);
3986
3987 /* New argument list transforming bzero(ptr x, int y) to
3988 memset(ptr x, int 0, size_t y). This is done this way
3989 so that if it isn't expanded inline, we fallback to
3990 calling bzero instead of memset. */
3991
3992 return expand_builtin_memset_args (dest, integer_zero_node,
3993 fold_convert (sizetype, size),
3994 const0_rtx, VOIDmode, exp);
3995 }
3996
3997 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3998 caller should emit a normal call, otherwise try to get the result
3999 in TARGET, if convenient (and in mode MODE if that's convenient). */
4000
4001 static rtx
4002 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4003 {
4004 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4005 INTEGER_TYPE, VOID_TYPE))
4006 {
4007 tree type = TREE_TYPE (exp);
4008 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4009 CALL_EXPR_ARG (exp, 1),
4010 CALL_EXPR_ARG (exp, 2), type);
4011 if (result)
4012 return expand_expr (result, target, mode, EXPAND_NORMAL);
4013 }
4014 return NULL_RTX;
4015 }
4016
4017 /* Expand expression EXP, which is a call to the memcmp built-in function.
4018 Return NULL_RTX if we failed and the
4019 caller should emit a normal call, otherwise try to get the result in
4020 TARGET, if convenient (and in mode MODE, if that's convenient). */
4021
4022 static rtx
4023 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4024 {
4025 if (!validate_arglist (exp,
4026 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4027 return NULL_RTX;
4028 else
4029 {
4030 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4031 CALL_EXPR_ARG (exp, 1),
4032 CALL_EXPR_ARG (exp, 2));
4033 if (result)
4034 return expand_expr (result, target, mode, EXPAND_NORMAL);
4035 }
4036
4037 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4038 {
4039 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4040 rtx result;
4041 rtx insn;
4042 tree arg1 = CALL_EXPR_ARG (exp, 0);
4043 tree arg2 = CALL_EXPR_ARG (exp, 1);
4044 tree len = CALL_EXPR_ARG (exp, 2);
4045
4046 int arg1_align
4047 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4048 int arg2_align
4049 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4050 enum machine_mode insn_mode;
4051
4052 #ifdef HAVE_cmpmemsi
4053 if (HAVE_cmpmemsi)
4054 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4055 else
4056 #endif
4057 #ifdef HAVE_cmpstrnsi
4058 if (HAVE_cmpstrnsi)
4059 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4060 else
4061 #endif
4062 return NULL_RTX;
4063
4064 /* If we don't have POINTER_TYPE, call the function. */
4065 if (arg1_align == 0 || arg2_align == 0)
4066 return NULL_RTX;
4067
4068 /* Make a place to write the result of the instruction. */
4069 result = target;
4070 if (! (result != 0
4071 && REG_P (result) && GET_MODE (result) == insn_mode
4072 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4073 result = gen_reg_rtx (insn_mode);
4074
4075 arg1_rtx = get_memory_rtx (arg1, len);
4076 arg2_rtx = get_memory_rtx (arg2, len);
4077 arg3_rtx = expand_normal (len);
4078
4079 /* Set MEM_SIZE as appropriate. */
4080 if (GET_CODE (arg3_rtx) == CONST_INT)
4081 {
4082 set_mem_size (arg1_rtx, arg3_rtx);
4083 set_mem_size (arg2_rtx, arg3_rtx);
4084 }
4085
4086 #ifdef HAVE_cmpmemsi
4087 if (HAVE_cmpmemsi)
4088 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4089 GEN_INT (MIN (arg1_align, arg2_align)));
4090 else
4091 #endif
4092 #ifdef HAVE_cmpstrnsi
4093 if (HAVE_cmpstrnsi)
4094 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4095 GEN_INT (MIN (arg1_align, arg2_align)));
4096 else
4097 #endif
4098 gcc_unreachable ();
4099
4100 if (insn)
4101 emit_insn (insn);
4102 else
4103 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4104 TYPE_MODE (integer_type_node), 3,
4105 XEXP (arg1_rtx, 0), Pmode,
4106 XEXP (arg2_rtx, 0), Pmode,
4107 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4108 TYPE_UNSIGNED (sizetype)),
4109 TYPE_MODE (sizetype));
4110
4111 /* Return the value in the proper mode for this function. */
4112 mode = TYPE_MODE (TREE_TYPE (exp));
4113 if (GET_MODE (result) == mode)
4114 return result;
4115 else if (target != 0)
4116 {
4117 convert_move (target, result, 0);
4118 return target;
4119 }
4120 else
4121 return convert_to_mode (mode, result, 0);
4122 }
4123 #endif
4124
4125 return NULL_RTX;
4126 }
4127
4128 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4129 if we failed the caller should emit a normal call, otherwise try to get
4130 the result in TARGET, if convenient. */
4131
4132 static rtx
4133 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4134 {
4135 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4136 return NULL_RTX;
4137 else
4138 {
4139 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4140 CALL_EXPR_ARG (exp, 1));
4141 if (result)
4142 return expand_expr (result, target, mode, EXPAND_NORMAL);
4143 }
4144
4145 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4146 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4147 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4148 {
4149 rtx arg1_rtx, arg2_rtx;
4150 rtx result, insn = NULL_RTX;
4151 tree fndecl, fn;
4152 tree arg1 = CALL_EXPR_ARG (exp, 0);
4153 tree arg2 = CALL_EXPR_ARG (exp, 1);
4154
4155 int arg1_align
4156 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4157 int arg2_align
4158 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4159
4160 /* If we don't have POINTER_TYPE, call the function. */
4161 if (arg1_align == 0 || arg2_align == 0)
4162 return NULL_RTX;
4163
4164 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4165 arg1 = builtin_save_expr (arg1);
4166 arg2 = builtin_save_expr (arg2);
4167
4168 arg1_rtx = get_memory_rtx (arg1, NULL);
4169 arg2_rtx = get_memory_rtx (arg2, NULL);
4170
4171 #ifdef HAVE_cmpstrsi
4172 /* Try to call cmpstrsi. */
4173 if (HAVE_cmpstrsi)
4174 {
4175 enum machine_mode insn_mode
4176 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4177
4178 /* Make a place to write the result of the instruction. */
4179 result = target;
4180 if (! (result != 0
4181 && REG_P (result) && GET_MODE (result) == insn_mode
4182 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4183 result = gen_reg_rtx (insn_mode);
4184
4185 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4186 GEN_INT (MIN (arg1_align, arg2_align)));
4187 }
4188 #endif
4189 #ifdef HAVE_cmpstrnsi
4190 /* Try to determine at least one length and call cmpstrnsi. */
4191 if (!insn && HAVE_cmpstrnsi)
4192 {
4193 tree len;
4194 rtx arg3_rtx;
4195
4196 enum machine_mode insn_mode
4197 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4198 tree len1 = c_strlen (arg1, 1);
4199 tree len2 = c_strlen (arg2, 1);
4200
4201 if (len1)
4202 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4203 if (len2)
4204 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4205
4206 /* If we don't have a constant length for the first, use the length
4207 of the second, if we know it. We don't require a constant for
4208 this case; some cost analysis could be done if both are available
4209 but neither is constant. For now, assume they're equally cheap,
4210 unless one has side effects. If both strings have constant lengths,
4211 use the smaller. */
4212
4213 if (!len1)
4214 len = len2;
4215 else if (!len2)
4216 len = len1;
4217 else if (TREE_SIDE_EFFECTS (len1))
4218 len = len2;
4219 else if (TREE_SIDE_EFFECTS (len2))
4220 len = len1;
4221 else if (TREE_CODE (len1) != INTEGER_CST)
4222 len = len2;
4223 else if (TREE_CODE (len2) != INTEGER_CST)
4224 len = len1;
4225 else if (tree_int_cst_lt (len1, len2))
4226 len = len1;
4227 else
4228 len = len2;
4229
4230 /* If both arguments have side effects, we cannot optimize. */
4231 if (!len || TREE_SIDE_EFFECTS (len))
4232 goto do_libcall;
4233
4234 arg3_rtx = expand_normal (len);
4235
4236 /* Make a place to write the result of the instruction. */
4237 result = target;
4238 if (! (result != 0
4239 && REG_P (result) && GET_MODE (result) == insn_mode
4240 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4241 result = gen_reg_rtx (insn_mode);
4242
4243 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4244 GEN_INT (MIN (arg1_align, arg2_align)));
4245 }
4246 #endif
4247
4248 if (insn)
4249 {
4250 emit_insn (insn);
4251
4252 /* Return the value in the proper mode for this function. */
4253 mode = TYPE_MODE (TREE_TYPE (exp));
4254 if (GET_MODE (result) == mode)
4255 return result;
4256 if (target == 0)
4257 return convert_to_mode (mode, result, 0);
4258 convert_move (target, result, 0);
4259 return target;
4260 }
4261
4262 /* Expand the library call ourselves using a stabilized argument
4263 list to avoid re-evaluating the function's arguments twice. */
4264 #ifdef HAVE_cmpstrnsi
4265 do_libcall:
4266 #endif
4267 fndecl = get_callee_fndecl (exp);
4268 fn = build_call_expr (fndecl, 2, arg1, arg2);
4269 if (TREE_CODE (fn) == CALL_EXPR)
4270 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4271 return expand_call (fn, target, target == const0_rtx);
4272 }
4273 #endif
4274 return NULL_RTX;
4275 }
4276
4277 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4278 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4279 the result in TARGET, if convenient. */
4280
4281 static rtx
4282 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4283 {
4284 if (!validate_arglist (exp,
4285 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4286 return NULL_RTX;
4287 else
4288 {
4289 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4290 CALL_EXPR_ARG (exp, 1),
4291 CALL_EXPR_ARG (exp, 2));
4292 if (result)
4293 return expand_expr (result, target, mode, EXPAND_NORMAL);
4294 }
4295
4296 /* If c_strlen can determine an expression for one of the string
4297 lengths, and it doesn't have side effects, then emit cmpstrnsi
4298 using length MIN(strlen(string)+1, arg3). */
4299 #ifdef HAVE_cmpstrnsi
4300 if (HAVE_cmpstrnsi)
4301 {
4302 tree len, len1, len2;
4303 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4304 rtx result, insn;
4305 tree fndecl, fn;
4306 tree arg1 = CALL_EXPR_ARG (exp, 0);
4307 tree arg2 = CALL_EXPR_ARG (exp, 1);
4308 tree arg3 = CALL_EXPR_ARG (exp, 2);
4309
4310 int arg1_align
4311 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4312 int arg2_align
4313 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4314 enum machine_mode insn_mode
4315 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4316
4317 len1 = c_strlen (arg1, 1);
4318 len2 = c_strlen (arg2, 1);
4319
4320 if (len1)
4321 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4322 if (len2)
4323 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4324
4325 /* If we don't have a constant length for the first, use the length
4326 of the second, if we know it. We don't require a constant for
4327 this case; some cost analysis could be done if both are available
4328 but neither is constant. For now, assume they're equally cheap,
4329 unless one has side effects. If both strings have constant lengths,
4330 use the smaller. */
4331
4332 if (!len1)
4333 len = len2;
4334 else if (!len2)
4335 len = len1;
4336 else if (TREE_SIDE_EFFECTS (len1))
4337 len = len2;
4338 else if (TREE_SIDE_EFFECTS (len2))
4339 len = len1;
4340 else if (TREE_CODE (len1) != INTEGER_CST)
4341 len = len2;
4342 else if (TREE_CODE (len2) != INTEGER_CST)
4343 len = len1;
4344 else if (tree_int_cst_lt (len1, len2))
4345 len = len1;
4346 else
4347 len = len2;
4348
4349 /* If both arguments have side effects, we cannot optimize. */
4350 if (!len || TREE_SIDE_EFFECTS (len))
4351 return NULL_RTX;
4352
4353 /* The actual new length parameter is MIN(len,arg3). */
4354 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4355 fold_convert (TREE_TYPE (len), arg3));
4356
4357 /* If we don't have POINTER_TYPE, call the function. */
4358 if (arg1_align == 0 || arg2_align == 0)
4359 return NULL_RTX;
4360
4361 /* Make a place to write the result of the instruction. */
4362 result = target;
4363 if (! (result != 0
4364 && REG_P (result) && GET_MODE (result) == insn_mode
4365 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4366 result = gen_reg_rtx (insn_mode);
4367
4368 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4369 arg1 = builtin_save_expr (arg1);
4370 arg2 = builtin_save_expr (arg2);
4371 len = builtin_save_expr (len);
4372
4373 arg1_rtx = get_memory_rtx (arg1, len);
4374 arg2_rtx = get_memory_rtx (arg2, len);
4375 arg3_rtx = expand_normal (len);
4376 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4377 GEN_INT (MIN (arg1_align, arg2_align)));
4378 if (insn)
4379 {
4380 emit_insn (insn);
4381
4382 /* Return the value in the proper mode for this function. */
4383 mode = TYPE_MODE (TREE_TYPE (exp));
4384 if (GET_MODE (result) == mode)
4385 return result;
4386 if (target == 0)
4387 return convert_to_mode (mode, result, 0);
4388 convert_move (target, result, 0);
4389 return target;
4390 }
4391
4392 /* Expand the library call ourselves using a stabilized argument
4393 list to avoid re-evaluating the function's arguments twice. */
4394 fndecl = get_callee_fndecl (exp);
4395 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4396 if (TREE_CODE (fn) == CALL_EXPR)
4397 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4398 return expand_call (fn, target, target == const0_rtx);
4399 }
4400 #endif
4401 return NULL_RTX;
4402 }
4403
4404 /* Expand expression EXP, which is a call to the strcat builtin.
4405 Return NULL_RTX if we failed the caller should emit a normal call,
4406 otherwise try to get the result in TARGET, if convenient. */
4407
4408 static rtx
4409 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4410 {
4411 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4412 return NULL_RTX;
4413 else
4414 {
4415 tree dst = CALL_EXPR_ARG (exp, 0);
4416 tree src = CALL_EXPR_ARG (exp, 1);
4417 const char *p = c_getstr (src);
4418
4419 /* If the string length is zero, return the dst parameter. */
4420 if (p && *p == '\0')
4421 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4422
4423 if (!optimize_size)
4424 {
4425 /* See if we can store by pieces into (dst + strlen(dst)). */
4426 tree newsrc, newdst,
4427 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4428 rtx insns;
4429
4430 /* Stabilize the argument list. */
4431 newsrc = builtin_save_expr (src);
4432 dst = builtin_save_expr (dst);
4433
4434 start_sequence ();
4435
4436 /* Create strlen (dst). */
4437 newdst = build_call_expr (strlen_fn, 1, dst);
4438 /* Create (dst + (cast) strlen (dst)). */
4439 newdst = fold_convert (TREE_TYPE (dst), newdst);
4440 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4441
4442 newdst = builtin_save_expr (newdst);
4443
4444 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4445 {
4446 end_sequence (); /* Stop sequence. */
4447 return NULL_RTX;
4448 }
4449
4450 /* Output the entire sequence. */
4451 insns = get_insns ();
4452 end_sequence ();
4453 emit_insn (insns);
4454
4455 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4456 }
4457
4458 return NULL_RTX;
4459 }
4460 }
4461
4462 /* Expand expression EXP, which is a call to the strncat builtin.
4463 Return NULL_RTX if we failed the caller should emit a normal call,
4464 otherwise try to get the result in TARGET, if convenient. */
4465
4466 static rtx
4467 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4468 {
4469 if (validate_arglist (exp,
4470 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4471 {
4472 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4473 CALL_EXPR_ARG (exp, 1),
4474 CALL_EXPR_ARG (exp, 2));
4475 if (result)
4476 return expand_expr (result, target, mode, EXPAND_NORMAL);
4477 }
4478 return NULL_RTX;
4479 }
4480
4481 /* Expand expression EXP, which is a call to the strspn builtin.
4482 Return NULL_RTX if we failed the caller should emit a normal call,
4483 otherwise try to get the result in TARGET, if convenient. */
4484
4485 static rtx
4486 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4487 {
4488 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4489 {
4490 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4491 CALL_EXPR_ARG (exp, 1));
4492 if (result)
4493 return expand_expr (result, target, mode, EXPAND_NORMAL);
4494 }
4495 return NULL_RTX;
4496 }
4497
4498 /* Expand expression EXP, which is a call to the strcspn builtin.
4499 Return NULL_RTX if we failed the caller should emit a normal call,
4500 otherwise try to get the result in TARGET, if convenient. */
4501
4502 static rtx
4503 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4504 {
4505 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4506 {
4507 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4508 CALL_EXPR_ARG (exp, 1));
4509 if (result)
4510 return expand_expr (result, target, mode, EXPAND_NORMAL);
4511 }
4512 return NULL_RTX;
4513 }
4514
4515 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4516 if that's convenient. */
4517
4518 rtx
4519 expand_builtin_saveregs (void)
4520 {
4521 rtx val, seq;
4522
4523 /* Don't do __builtin_saveregs more than once in a function.
4524 Save the result of the first call and reuse it. */
4525 if (saveregs_value != 0)
4526 return saveregs_value;
4527
4528 /* When this function is called, it means that registers must be
4529 saved on entry to this function. So we migrate the call to the
4530 first insn of this function. */
4531
4532 start_sequence ();
4533
4534 /* Do whatever the machine needs done in this case. */
4535 val = targetm.calls.expand_builtin_saveregs ();
4536
4537 seq = get_insns ();
4538 end_sequence ();
4539
4540 saveregs_value = val;
4541
4542 /* Put the insns after the NOTE that starts the function. If this
4543 is inside a start_sequence, make the outer-level insn chain current, so
4544 the code is placed at the start of the function. */
4545 push_topmost_sequence ();
4546 emit_insn_after (seq, entry_of_function ());
4547 pop_topmost_sequence ();
4548
4549 return val;
4550 }
4551
4552 /* __builtin_args_info (N) returns word N of the arg space info
4553 for the current function. The number and meanings of words
4554 is controlled by the definition of CUMULATIVE_ARGS. */
4555
4556 static rtx
4557 expand_builtin_args_info (tree exp)
4558 {
4559 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4560 int *word_ptr = (int *) &current_function_args_info;
4561
4562 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4563
4564 if (call_expr_nargs (exp) != 0)
4565 {
4566 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4567 error ("argument of %<__builtin_args_info%> must be constant");
4568 else
4569 {
4570 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4571
4572 if (wordnum < 0 || wordnum >= nwords)
4573 error ("argument of %<__builtin_args_info%> out of range");
4574 else
4575 return GEN_INT (word_ptr[wordnum]);
4576 }
4577 }
4578 else
4579 error ("missing argument in %<__builtin_args_info%>");
4580
4581 return const0_rtx;
4582 }
4583
4584 /* Expand a call to __builtin_next_arg. */
4585
4586 static rtx
4587 expand_builtin_next_arg (void)
4588 {
4589 /* Checking arguments is already done in fold_builtin_next_arg
4590 that must be called before this function. */
4591 return expand_binop (Pmode, add_optab,
4592 current_function_internal_arg_pointer,
4593 current_function_arg_offset_rtx,
4594 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4595 }
4596
4597 /* Make it easier for the backends by protecting the valist argument
4598 from multiple evaluations. */
4599
4600 static tree
4601 stabilize_va_list (tree valist, int needs_lvalue)
4602 {
4603 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4604 {
4605 if (TREE_SIDE_EFFECTS (valist))
4606 valist = save_expr (valist);
4607
4608 /* For this case, the backends will be expecting a pointer to
4609 TREE_TYPE (va_list_type_node), but it's possible we've
4610 actually been given an array (an actual va_list_type_node).
4611 So fix it. */
4612 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4613 {
4614 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4615 valist = build_fold_addr_expr_with_type (valist, p1);
4616 }
4617 }
4618 else
4619 {
4620 tree pt;
4621
4622 if (! needs_lvalue)
4623 {
4624 if (! TREE_SIDE_EFFECTS (valist))
4625 return valist;
4626
4627 pt = build_pointer_type (va_list_type_node);
4628 valist = fold_build1 (ADDR_EXPR, pt, valist);
4629 TREE_SIDE_EFFECTS (valist) = 1;
4630 }
4631
4632 if (TREE_SIDE_EFFECTS (valist))
4633 valist = save_expr (valist);
4634 valist = build_fold_indirect_ref (valist);
4635 }
4636
4637 return valist;
4638 }
4639
4640 /* The "standard" definition of va_list is void*. */
4641
4642 tree
4643 std_build_builtin_va_list (void)
4644 {
4645 return ptr_type_node;
4646 }
4647
4648 /* The "standard" implementation of va_start: just assign `nextarg' to
4649 the variable. */
4650
4651 void
4652 std_expand_builtin_va_start (tree valist, rtx nextarg)
4653 {
4654 tree t;
4655
4656 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4657 make_tree (ptr_type_node, nextarg));
4658 TREE_SIDE_EFFECTS (t) = 1;
4659
4660 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4661 }
4662
4663 /* Expand EXP, a call to __builtin_va_start. */
4664
4665 static rtx
4666 expand_builtin_va_start (tree exp)
4667 {
4668 rtx nextarg;
4669 tree valist;
4670
4671 if (call_expr_nargs (exp) < 2)
4672 {
4673 error ("too few arguments to function %<va_start%>");
4674 return const0_rtx;
4675 }
4676
4677 if (fold_builtin_next_arg (exp, true))
4678 return const0_rtx;
4679
4680 nextarg = expand_builtin_next_arg ();
4681 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4682
4683 #ifdef EXPAND_BUILTIN_VA_START
4684 EXPAND_BUILTIN_VA_START (valist, nextarg);
4685 #else
4686 std_expand_builtin_va_start (valist, nextarg);
4687 #endif
4688
4689 return const0_rtx;
4690 }
4691
4692 /* The "standard" implementation of va_arg: read the value from the
4693 current (padded) address and increment by the (padded) size. */
4694
4695 tree
4696 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4697 {
4698 tree addr, t, type_size, rounded_size, valist_tmp;
4699 unsigned HOST_WIDE_INT align, boundary;
4700 bool indirect;
4701
4702 #ifdef ARGS_GROW_DOWNWARD
4703 /* All of the alignment and movement below is for args-grow-up machines.
4704 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4705 implement their own specialized gimplify_va_arg_expr routines. */
4706 gcc_unreachable ();
4707 #endif
4708
4709 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4710 if (indirect)
4711 type = build_pointer_type (type);
4712
4713 align = PARM_BOUNDARY / BITS_PER_UNIT;
4714 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4715
4716 /* Hoist the valist value into a temporary for the moment. */
4717 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4718
4719 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4720 requires greater alignment, we must perform dynamic alignment. */
4721 if (boundary > align
4722 && !integer_zerop (TYPE_SIZE (type)))
4723 {
4724 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4725 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4726 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4727 gimplify_and_add (t, pre_p);
4728
4729 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4730 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4731 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4732 gimplify_and_add (t, pre_p);
4733 }
4734 else
4735 boundary = align;
4736
4737 /* If the actual alignment is less than the alignment of the type,
4738 adjust the type accordingly so that we don't assume strict alignment
4739 when deferencing the pointer. */
4740 boundary *= BITS_PER_UNIT;
4741 if (boundary < TYPE_ALIGN (type))
4742 {
4743 type = build_variant_type_copy (type);
4744 TYPE_ALIGN (type) = boundary;
4745 }
4746
4747 /* Compute the rounded size of the type. */
4748 type_size = size_in_bytes (type);
4749 rounded_size = round_up (type_size, align);
4750
4751 /* Reduce rounded_size so it's sharable with the postqueue. */
4752 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4753
4754 /* Get AP. */
4755 addr = valist_tmp;
4756 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4757 {
4758 /* Small args are padded downward. */
4759 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4760 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4761 size_binop (MINUS_EXPR, rounded_size, type_size));
4762 t = fold_convert (TREE_TYPE (addr), t);
4763 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4764 }
4765
4766 /* Compute new value for AP. */
4767 t = fold_convert (TREE_TYPE (valist), rounded_size);
4768 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4769 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4770 gimplify_and_add (t, pre_p);
4771
4772 addr = fold_convert (build_pointer_type (type), addr);
4773
4774 if (indirect)
4775 addr = build_va_arg_indirect_ref (addr);
4776
4777 return build_va_arg_indirect_ref (addr);
4778 }
4779
4780 /* Build an indirect-ref expression over the given TREE, which represents a
4781 piece of a va_arg() expansion. */
4782 tree
4783 build_va_arg_indirect_ref (tree addr)
4784 {
4785 addr = build_fold_indirect_ref (addr);
4786
4787 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4788 mf_mark (addr);
4789
4790 return addr;
4791 }
4792
4793 /* Return a dummy expression of type TYPE in order to keep going after an
4794 error. */
4795
4796 static tree
4797 dummy_object (tree type)
4798 {
4799 tree t = build_int_cst (build_pointer_type (type), 0);
4800 return build1 (INDIRECT_REF, type, t);
4801 }
4802
4803 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4804 builtin function, but a very special sort of operator. */
4805
4806 enum gimplify_status
4807 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4808 {
4809 tree promoted_type, want_va_type, have_va_type;
4810 tree valist = TREE_OPERAND (*expr_p, 0);
4811 tree type = TREE_TYPE (*expr_p);
4812 tree t;
4813
4814 /* Verify that valist is of the proper type. */
4815 want_va_type = va_list_type_node;
4816 have_va_type = TREE_TYPE (valist);
4817
4818 if (have_va_type == error_mark_node)
4819 return GS_ERROR;
4820
4821 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4822 {
4823 /* If va_list is an array type, the argument may have decayed
4824 to a pointer type, e.g. by being passed to another function.
4825 In that case, unwrap both types so that we can compare the
4826 underlying records. */
4827 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4828 || POINTER_TYPE_P (have_va_type))
4829 {
4830 want_va_type = TREE_TYPE (want_va_type);
4831 have_va_type = TREE_TYPE (have_va_type);
4832 }
4833 }
4834
4835 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4836 {
4837 error ("first argument to %<va_arg%> not of type %<va_list%>");
4838 return GS_ERROR;
4839 }
4840
4841 /* Generate a diagnostic for requesting data of a type that cannot
4842 be passed through `...' due to type promotion at the call site. */
4843 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4844 != type)
4845 {
4846 static bool gave_help;
4847
4848 /* Unfortunately, this is merely undefined, rather than a constraint
4849 violation, so we cannot make this an error. If this call is never
4850 executed, the program is still strictly conforming. */
4851 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4852 type, promoted_type);
4853 if (! gave_help)
4854 {
4855 gave_help = true;
4856 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4857 promoted_type, type);
4858 }
4859
4860 /* We can, however, treat "undefined" any way we please.
4861 Call abort to encourage the user to fix the program. */
4862 inform ("if this code is reached, the program will abort");
4863 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4864 append_to_statement_list (t, pre_p);
4865
4866 /* This is dead code, but go ahead and finish so that the
4867 mode of the result comes out right. */
4868 *expr_p = dummy_object (type);
4869 return GS_ALL_DONE;
4870 }
4871 else
4872 {
4873 /* Make it easier for the backends by protecting the valist argument
4874 from multiple evaluations. */
4875 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4876 {
4877 /* For this case, the backends will be expecting a pointer to
4878 TREE_TYPE (va_list_type_node), but it's possible we've
4879 actually been given an array (an actual va_list_type_node).
4880 So fix it. */
4881 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4882 {
4883 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4884 valist = build_fold_addr_expr_with_type (valist, p1);
4885 }
4886 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4887 }
4888 else
4889 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4890
4891 if (!targetm.gimplify_va_arg_expr)
4892 /* FIXME:Once most targets are converted we should merely
4893 assert this is non-null. */
4894 return GS_ALL_DONE;
4895
4896 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4897 return GS_OK;
4898 }
4899 }
4900
4901 /* Expand EXP, a call to __builtin_va_end. */
4902
4903 static rtx
4904 expand_builtin_va_end (tree exp)
4905 {
4906 tree valist = CALL_EXPR_ARG (exp, 0);
4907
4908 /* Evaluate for side effects, if needed. I hate macros that don't
4909 do that. */
4910 if (TREE_SIDE_EFFECTS (valist))
4911 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4912
4913 return const0_rtx;
4914 }
4915
4916 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4917 builtin rather than just as an assignment in stdarg.h because of the
4918 nastiness of array-type va_list types. */
4919
4920 static rtx
4921 expand_builtin_va_copy (tree exp)
4922 {
4923 tree dst, src, t;
4924
4925 dst = CALL_EXPR_ARG (exp, 0);
4926 src = CALL_EXPR_ARG (exp, 1);
4927
4928 dst = stabilize_va_list (dst, 1);
4929 src = stabilize_va_list (src, 0);
4930
4931 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4932 {
4933 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4934 TREE_SIDE_EFFECTS (t) = 1;
4935 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4936 }
4937 else
4938 {
4939 rtx dstb, srcb, size;
4940
4941 /* Evaluate to pointers. */
4942 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4943 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4944 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4945 VOIDmode, EXPAND_NORMAL);
4946
4947 dstb = convert_memory_address (Pmode, dstb);
4948 srcb = convert_memory_address (Pmode, srcb);
4949
4950 /* "Dereference" to BLKmode memories. */
4951 dstb = gen_rtx_MEM (BLKmode, dstb);
4952 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4953 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4954 srcb = gen_rtx_MEM (BLKmode, srcb);
4955 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4956 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4957
4958 /* Copy. */
4959 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4960 }
4961
4962 return const0_rtx;
4963 }
4964
4965 /* Expand a call to one of the builtin functions __builtin_frame_address or
4966 __builtin_return_address. */
4967
4968 static rtx
4969 expand_builtin_frame_address (tree fndecl, tree exp)
4970 {
4971 /* The argument must be a nonnegative integer constant.
4972 It counts the number of frames to scan up the stack.
4973 The value is the return address saved in that frame. */
4974 if (call_expr_nargs (exp) == 0)
4975 /* Warning about missing arg was already issued. */
4976 return const0_rtx;
4977 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4978 {
4979 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4980 error ("invalid argument to %<__builtin_frame_address%>");
4981 else
4982 error ("invalid argument to %<__builtin_return_address%>");
4983 return const0_rtx;
4984 }
4985 else
4986 {
4987 rtx tem
4988 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4989 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4990
4991 /* Some ports cannot access arbitrary stack frames. */
4992 if (tem == NULL)
4993 {
4994 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4995 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4996 else
4997 warning (0, "unsupported argument to %<__builtin_return_address%>");
4998 return const0_rtx;
4999 }
5000
5001 /* For __builtin_frame_address, return what we've got. */
5002 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5003 return tem;
5004
5005 if (!REG_P (tem)
5006 && ! CONSTANT_P (tem))
5007 tem = copy_to_mode_reg (Pmode, tem);
5008 return tem;
5009 }
5010 }
5011
5012 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5013 we failed and the caller should emit a normal call, otherwise try to get
5014 the result in TARGET, if convenient. */
5015
5016 static rtx
5017 expand_builtin_alloca (tree exp, rtx target)
5018 {
5019 rtx op0;
5020 rtx result;
5021
5022 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5023 should always expand to function calls. These can be intercepted
5024 in libmudflap. */
5025 if (flag_mudflap)
5026 return NULL_RTX;
5027
5028 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5029 return NULL_RTX;
5030
5031 /* Compute the argument. */
5032 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5033
5034 /* Allocate the desired space. */
5035 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5036 result = convert_memory_address (ptr_mode, result);
5037
5038 return result;
5039 }
5040
5041 /* Expand a call to a bswap builtin with argument ARG0. MODE
5042 is the mode to expand with. */
5043
5044 static rtx
5045 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5046 {
5047 enum machine_mode mode;
5048 tree arg;
5049 rtx op0;
5050
5051 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5052 return NULL_RTX;
5053
5054 arg = CALL_EXPR_ARG (exp, 0);
5055 mode = TYPE_MODE (TREE_TYPE (arg));
5056 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5057
5058 target = expand_unop (mode, bswap_optab, op0, target, 1);
5059
5060 gcc_assert (target);
5061
5062 return convert_to_mode (mode, target, 0);
5063 }
5064
5065 /* Expand a call to a unary builtin in EXP.
5066 Return NULL_RTX if a normal call should be emitted rather than expanding the
5067 function in-line. If convenient, the result should be placed in TARGET.
5068 SUBTARGET may be used as the target for computing one of EXP's operands. */
5069
5070 static rtx
5071 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5072 rtx subtarget, optab op_optab)
5073 {
5074 rtx op0;
5075
5076 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5077 return NULL_RTX;
5078
5079 /* Compute the argument. */
5080 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5081 VOIDmode, EXPAND_NORMAL);
5082 /* Compute op, into TARGET if possible.
5083 Set TARGET to wherever the result comes back. */
5084 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5085 op_optab, op0, target, 1);
5086 gcc_assert (target);
5087
5088 return convert_to_mode (target_mode, target, 0);
5089 }
5090
5091 /* If the string passed to fputs is a constant and is one character
5092 long, we attempt to transform this call into __builtin_fputc(). */
5093
5094 static rtx
5095 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5096 {
5097 /* Verify the arguments in the original call. */
5098 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5099 {
5100 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5101 CALL_EXPR_ARG (exp, 1),
5102 (target == const0_rtx),
5103 unlocked, NULL_TREE);
5104 if (result)
5105 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5106 }
5107 return NULL_RTX;
5108 }
5109
5110 /* Expand a call to __builtin_expect. We just return our argument
5111 as the builtin_expect semantic should've been already executed by
5112 tree branch prediction pass. */
5113
5114 static rtx
5115 expand_builtin_expect (tree exp, rtx target)
5116 {
5117 tree arg, c;
5118
5119 if (call_expr_nargs (exp) < 2)
5120 return const0_rtx;
5121 arg = CALL_EXPR_ARG (exp, 0);
5122 c = CALL_EXPR_ARG (exp, 1);
5123
5124 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5125 /* When guessing was done, the hints should be already stripped away. */
5126 gcc_assert (!flag_guess_branch_prob);
5127 return target;
5128 }
5129
5130 void
5131 expand_builtin_trap (void)
5132 {
5133 #ifdef HAVE_trap
5134 if (HAVE_trap)
5135 emit_insn (gen_trap ());
5136 else
5137 #endif
5138 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5139 emit_barrier ();
5140 }
5141
5142 /* Expand EXP, a call to fabs, fabsf or fabsl.
5143 Return NULL_RTX if a normal call should be emitted rather than expanding
5144 the function inline. If convenient, the result should be placed
5145 in TARGET. SUBTARGET may be used as the target for computing
5146 the operand. */
5147
5148 static rtx
5149 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5150 {
5151 enum machine_mode mode;
5152 tree arg;
5153 rtx op0;
5154
5155 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5156 return NULL_RTX;
5157
5158 arg = CALL_EXPR_ARG (exp, 0);
5159 mode = TYPE_MODE (TREE_TYPE (arg));
5160 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5161 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5162 }
5163
5164 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5165 Return NULL is a normal call should be emitted rather than expanding the
5166 function inline. If convenient, the result should be placed in TARGET.
5167 SUBTARGET may be used as the target for computing the operand. */
5168
5169 static rtx
5170 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5171 {
5172 rtx op0, op1;
5173 tree arg;
5174
5175 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5176 return NULL_RTX;
5177
5178 arg = CALL_EXPR_ARG (exp, 0);
5179 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5180
5181 arg = CALL_EXPR_ARG (exp, 1);
5182 op1 = expand_normal (arg);
5183
5184 return expand_copysign (op0, op1, target);
5185 }
5186
5187 /* Create a new constant string literal and return a char* pointer to it.
5188 The STRING_CST value is the LEN characters at STR. */
5189 tree
5190 build_string_literal (int len, const char *str)
5191 {
5192 tree t, elem, index, type;
5193
5194 t = build_string (len, str);
5195 elem = build_type_variant (char_type_node, 1, 0);
5196 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5197 type = build_array_type (elem, index);
5198 TREE_TYPE (t) = type;
5199 TREE_CONSTANT (t) = 1;
5200 TREE_INVARIANT (t) = 1;
5201 TREE_READONLY (t) = 1;
5202 TREE_STATIC (t) = 1;
5203
5204 type = build_pointer_type (type);
5205 t = build1 (ADDR_EXPR, type, t);
5206
5207 type = build_pointer_type (elem);
5208 t = build1 (NOP_EXPR, type, t);
5209 return t;
5210 }
5211
5212 /* Expand EXP, a call to printf or printf_unlocked.
5213 Return NULL_RTX if a normal call should be emitted rather than transforming
5214 the function inline. If convenient, the result should be placed in
5215 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5216 call. */
5217 static rtx
5218 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5219 bool unlocked)
5220 {
5221 /* If we're using an unlocked function, assume the other unlocked
5222 functions exist explicitly. */
5223 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5224 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5225 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5226 : implicit_built_in_decls[BUILT_IN_PUTS];
5227 const char *fmt_str;
5228 tree fn = 0;
5229 tree fmt, arg;
5230 int nargs = call_expr_nargs (exp);
5231
5232 /* If the return value is used, don't do the transformation. */
5233 if (target != const0_rtx)
5234 return NULL_RTX;
5235
5236 /* Verify the required arguments in the original call. */
5237 if (nargs == 0)
5238 return NULL_RTX;
5239 fmt = CALL_EXPR_ARG (exp, 0);
5240 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5241 return NULL_RTX;
5242
5243 /* Check whether the format is a literal string constant. */
5244 fmt_str = c_getstr (fmt);
5245 if (fmt_str == NULL)
5246 return NULL_RTX;
5247
5248 if (!init_target_chars ())
5249 return NULL_RTX;
5250
5251 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5252 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5253 {
5254 if ((nargs != 2)
5255 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5256 return NULL_RTX;
5257 if (fn_puts)
5258 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5259 }
5260 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5261 else if (strcmp (fmt_str, target_percent_c) == 0)
5262 {
5263 if ((nargs != 2)
5264 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5265 return NULL_RTX;
5266 if (fn_putchar)
5267 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5268 }
5269 else
5270 {
5271 /* We can't handle anything else with % args or %% ... yet. */
5272 if (strchr (fmt_str, target_percent))
5273 return NULL_RTX;
5274
5275 if (nargs > 1)
5276 return NULL_RTX;
5277
5278 /* If the format specifier was "", printf does nothing. */
5279 if (fmt_str[0] == '\0')
5280 return const0_rtx;
5281 /* If the format specifier has length of 1, call putchar. */
5282 if (fmt_str[1] == '\0')
5283 {
5284 /* Given printf("c"), (where c is any one character,)
5285 convert "c"[0] to an int and pass that to the replacement
5286 function. */
5287 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5288 if (fn_putchar)
5289 fn = build_call_expr (fn_putchar, 1, arg);
5290 }
5291 else
5292 {
5293 /* If the format specifier was "string\n", call puts("string"). */
5294 size_t len = strlen (fmt_str);
5295 if ((unsigned char)fmt_str[len - 1] == target_newline)
5296 {
5297 /* Create a NUL-terminated string that's one char shorter
5298 than the original, stripping off the trailing '\n'. */
5299 char *newstr = alloca (len);
5300 memcpy (newstr, fmt_str, len - 1);
5301 newstr[len - 1] = 0;
5302 arg = build_string_literal (len, newstr);
5303 if (fn_puts)
5304 fn = build_call_expr (fn_puts, 1, arg);
5305 }
5306 else
5307 /* We'd like to arrange to call fputs(string,stdout) here,
5308 but we need stdout and don't have a way to get it yet. */
5309 return NULL_RTX;
5310 }
5311 }
5312
5313 if (!fn)
5314 return NULL_RTX;
5315 if (TREE_CODE (fn) == CALL_EXPR)
5316 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5317 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5318 }
5319
5320 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5321 Return NULL_RTX if a normal call should be emitted rather than transforming
5322 the function inline. If convenient, the result should be placed in
5323 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5324 call. */
5325 static rtx
5326 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5327 bool unlocked)
5328 {
5329 /* If we're using an unlocked function, assume the other unlocked
5330 functions exist explicitly. */
5331 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5332 : implicit_built_in_decls[BUILT_IN_FPUTC];
5333 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5334 : implicit_built_in_decls[BUILT_IN_FPUTS];
5335 const char *fmt_str;
5336 tree fn = 0;
5337 tree fmt, fp, arg;
5338 int nargs = call_expr_nargs (exp);
5339
5340 /* If the return value is used, don't do the transformation. */
5341 if (target != const0_rtx)
5342 return NULL_RTX;
5343
5344 /* Verify the required arguments in the original call. */
5345 if (nargs < 2)
5346 return NULL_RTX;
5347 fp = CALL_EXPR_ARG (exp, 0);
5348 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5349 return NULL_RTX;
5350 fmt = CALL_EXPR_ARG (exp, 1);
5351 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5352 return NULL_RTX;
5353
5354 /* Check whether the format is a literal string constant. */
5355 fmt_str = c_getstr (fmt);
5356 if (fmt_str == NULL)
5357 return NULL_RTX;
5358
5359 if (!init_target_chars ())
5360 return NULL_RTX;
5361
5362 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5363 if (strcmp (fmt_str, target_percent_s) == 0)
5364 {
5365 if ((nargs != 3)
5366 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5367 return NULL_RTX;
5368 arg = CALL_EXPR_ARG (exp, 2);
5369 if (fn_fputs)
5370 fn = build_call_expr (fn_fputs, 2, arg, fp);
5371 }
5372 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5373 else if (strcmp (fmt_str, target_percent_c) == 0)
5374 {
5375 if ((nargs != 3)
5376 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5377 return NULL_RTX;
5378 arg = CALL_EXPR_ARG (exp, 2);
5379 if (fn_fputc)
5380 fn = build_call_expr (fn_fputc, 2, arg, fp);
5381 }
5382 else
5383 {
5384 /* We can't handle anything else with % args or %% ... yet. */
5385 if (strchr (fmt_str, target_percent))
5386 return NULL_RTX;
5387
5388 if (nargs > 2)
5389 return NULL_RTX;
5390
5391 /* If the format specifier was "", fprintf does nothing. */
5392 if (fmt_str[0] == '\0')
5393 {
5394 /* Evaluate and ignore FILE* argument for side-effects. */
5395 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5396 return const0_rtx;
5397 }
5398
5399 /* When "string" doesn't contain %, replace all cases of
5400 fprintf(stream,string) with fputs(string,stream). The fputs
5401 builtin will take care of special cases like length == 1. */
5402 if (fn_fputs)
5403 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5404 }
5405
5406 if (!fn)
5407 return NULL_RTX;
5408 if (TREE_CODE (fn) == CALL_EXPR)
5409 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5410 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5411 }
5412
5413 /* Expand a call EXP to sprintf. Return NULL_RTX if
5414 a normal call should be emitted rather than expanding the function
5415 inline. If convenient, the result should be placed in TARGET with
5416 mode MODE. */
5417
5418 static rtx
5419 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5420 {
5421 tree dest, fmt;
5422 const char *fmt_str;
5423 int nargs = call_expr_nargs (exp);
5424
5425 /* Verify the required arguments in the original call. */
5426 if (nargs < 2)
5427 return NULL_RTX;
5428 dest = CALL_EXPR_ARG (exp, 0);
5429 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5430 return NULL_RTX;
5431 fmt = CALL_EXPR_ARG (exp, 0);
5432 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5433 return NULL_RTX;
5434
5435 /* Check whether the format is a literal string constant. */
5436 fmt_str = c_getstr (fmt);
5437 if (fmt_str == NULL)
5438 return NULL_RTX;
5439
5440 if (!init_target_chars ())
5441 return NULL_RTX;
5442
5443 /* If the format doesn't contain % args or %%, use strcpy. */
5444 if (strchr (fmt_str, target_percent) == 0)
5445 {
5446 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5447 tree exp;
5448
5449 if ((nargs > 2) || ! fn)
5450 return NULL_RTX;
5451 expand_expr (build_call_expr (fn, 2, dest, fmt),
5452 const0_rtx, VOIDmode, EXPAND_NORMAL);
5453 if (target == const0_rtx)
5454 return const0_rtx;
5455 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5456 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5457 }
5458 /* If the format is "%s", use strcpy if the result isn't used. */
5459 else if (strcmp (fmt_str, target_percent_s) == 0)
5460 {
5461 tree fn, arg, len;
5462 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5463
5464 if (! fn)
5465 return NULL_RTX;
5466 if (nargs != 3)
5467 return NULL_RTX;
5468 arg = CALL_EXPR_ARG (exp, 2);
5469 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5470 return NULL_RTX;
5471
5472 if (target != const0_rtx)
5473 {
5474 len = c_strlen (arg, 1);
5475 if (! len || TREE_CODE (len) != INTEGER_CST)
5476 return NULL_RTX;
5477 }
5478 else
5479 len = NULL_TREE;
5480
5481 expand_expr (build_call_expr (fn, 2, dest, arg),
5482 const0_rtx, VOIDmode, EXPAND_NORMAL);
5483
5484 if (target == const0_rtx)
5485 return const0_rtx;
5486 return expand_expr (len, target, mode, EXPAND_NORMAL);
5487 }
5488
5489 return NULL_RTX;
5490 }
5491
5492 /* Expand a call to either the entry or exit function profiler. */
5493
5494 static rtx
5495 expand_builtin_profile_func (bool exitp)
5496 {
5497 rtx this, which;
5498
5499 this = DECL_RTL (current_function_decl);
5500 gcc_assert (MEM_P (this));
5501 this = XEXP (this, 0);
5502
5503 if (exitp)
5504 which = profile_function_exit_libfunc;
5505 else
5506 which = profile_function_entry_libfunc;
5507
5508 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5509 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5510 0),
5511 Pmode);
5512
5513 return const0_rtx;
5514 }
5515
5516 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5517
5518 static rtx
5519 round_trampoline_addr (rtx tramp)
5520 {
5521 rtx temp, addend, mask;
5522
5523 /* If we don't need too much alignment, we'll have been guaranteed
5524 proper alignment by get_trampoline_type. */
5525 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5526 return tramp;
5527
5528 /* Round address up to desired boundary. */
5529 temp = gen_reg_rtx (Pmode);
5530 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5531 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5532
5533 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5534 temp, 0, OPTAB_LIB_WIDEN);
5535 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5536 temp, 0, OPTAB_LIB_WIDEN);
5537
5538 return tramp;
5539 }
5540
5541 static rtx
5542 expand_builtin_init_trampoline (tree exp)
5543 {
5544 tree t_tramp, t_func, t_chain;
5545 rtx r_tramp, r_func, r_chain;
5546 #ifdef TRAMPOLINE_TEMPLATE
5547 rtx blktramp;
5548 #endif
5549
5550 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5551 POINTER_TYPE, VOID_TYPE))
5552 return NULL_RTX;
5553
5554 t_tramp = CALL_EXPR_ARG (exp, 0);
5555 t_func = CALL_EXPR_ARG (exp, 1);
5556 t_chain = CALL_EXPR_ARG (exp, 2);
5557
5558 r_tramp = expand_normal (t_tramp);
5559 r_func = expand_normal (t_func);
5560 r_chain = expand_normal (t_chain);
5561
5562 /* Generate insns to initialize the trampoline. */
5563 r_tramp = round_trampoline_addr (r_tramp);
5564 #ifdef TRAMPOLINE_TEMPLATE
5565 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5566 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5567 emit_block_move (blktramp, assemble_trampoline_template (),
5568 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5569 #endif
5570 trampolines_created = 1;
5571 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5572
5573 return const0_rtx;
5574 }
5575
5576 static rtx
5577 expand_builtin_adjust_trampoline (tree exp)
5578 {
5579 rtx tramp;
5580
5581 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5582 return NULL_RTX;
5583
5584 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5585 tramp = round_trampoline_addr (tramp);
5586 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5587 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5588 #endif
5589
5590 return tramp;
5591 }
5592
5593 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5594 signbitd64, or signbitd128 function.
5595 Return NULL_RTX if a normal call should be emitted rather than expanding
5596 the function in-line. EXP is the expression that is a call to the builtin
5597 function; if convenient, the result should be placed in TARGET. */
5598
5599 static rtx
5600 expand_builtin_signbit (tree exp, rtx target)
5601 {
5602 const struct real_format *fmt;
5603 enum machine_mode fmode, imode, rmode;
5604 HOST_WIDE_INT hi, lo;
5605 tree arg;
5606 int word, bitpos;
5607 rtx temp;
5608
5609 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5610 return NULL_RTX;
5611
5612 arg = CALL_EXPR_ARG (exp, 0);
5613 fmode = TYPE_MODE (TREE_TYPE (arg));
5614 rmode = TYPE_MODE (TREE_TYPE (exp));
5615 fmt = REAL_MODE_FORMAT (fmode);
5616
5617 /* For floating point formats without a sign bit, implement signbit
5618 as "ARG < 0.0". */
5619 bitpos = fmt->signbit_ro;
5620 if (bitpos < 0)
5621 {
5622 /* But we can't do this if the format supports signed zero. */
5623 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5624 return NULL_RTX;
5625
5626 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5627 build_real (TREE_TYPE (arg), dconst0));
5628 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5629 }
5630
5631 temp = expand_normal (arg);
5632 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5633 {
5634 imode = int_mode_for_mode (fmode);
5635 if (imode == BLKmode)
5636 return NULL_RTX;
5637 temp = gen_lowpart (imode, temp);
5638 }
5639 else
5640 {
5641 imode = word_mode;
5642 /* Handle targets with different FP word orders. */
5643 if (FLOAT_WORDS_BIG_ENDIAN)
5644 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5645 else
5646 word = bitpos / BITS_PER_WORD;
5647 temp = operand_subword_force (temp, word, fmode);
5648 bitpos = bitpos % BITS_PER_WORD;
5649 }
5650
5651 /* Force the intermediate word_mode (or narrower) result into a
5652 register. This avoids attempting to create paradoxical SUBREGs
5653 of floating point modes below. */
5654 temp = force_reg (imode, temp);
5655
5656 /* If the bitpos is within the "result mode" lowpart, the operation
5657 can be implement with a single bitwise AND. Otherwise, we need
5658 a right shift and an AND. */
5659
5660 if (bitpos < GET_MODE_BITSIZE (rmode))
5661 {
5662 if (bitpos < HOST_BITS_PER_WIDE_INT)
5663 {
5664 hi = 0;
5665 lo = (HOST_WIDE_INT) 1 << bitpos;
5666 }
5667 else
5668 {
5669 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5670 lo = 0;
5671 }
5672
5673 if (imode != rmode)
5674 temp = gen_lowpart (rmode, temp);
5675 temp = expand_binop (rmode, and_optab, temp,
5676 immed_double_const (lo, hi, rmode),
5677 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5678 }
5679 else
5680 {
5681 /* Perform a logical right shift to place the signbit in the least
5682 significant bit, then truncate the result to the desired mode
5683 and mask just this bit. */
5684 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5685 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5686 temp = gen_lowpart (rmode, temp);
5687 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5689 }
5690
5691 return temp;
5692 }
5693
5694 /* Expand fork or exec calls. TARGET is the desired target of the
5695 call. EXP is the call. FN is the
5696 identificator of the actual function. IGNORE is nonzero if the
5697 value is to be ignored. */
5698
5699 static rtx
5700 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5701 {
5702 tree id, decl;
5703 tree call;
5704
5705 /* If we are not profiling, just call the function. */
5706 if (!profile_arc_flag)
5707 return NULL_RTX;
5708
5709 /* Otherwise call the wrapper. This should be equivalent for the rest of
5710 compiler, so the code does not diverge, and the wrapper may run the
5711 code necessary for keeping the profiling sane. */
5712
5713 switch (DECL_FUNCTION_CODE (fn))
5714 {
5715 case BUILT_IN_FORK:
5716 id = get_identifier ("__gcov_fork");
5717 break;
5718
5719 case BUILT_IN_EXECL:
5720 id = get_identifier ("__gcov_execl");
5721 break;
5722
5723 case BUILT_IN_EXECV:
5724 id = get_identifier ("__gcov_execv");
5725 break;
5726
5727 case BUILT_IN_EXECLP:
5728 id = get_identifier ("__gcov_execlp");
5729 break;
5730
5731 case BUILT_IN_EXECLE:
5732 id = get_identifier ("__gcov_execle");
5733 break;
5734
5735 case BUILT_IN_EXECVP:
5736 id = get_identifier ("__gcov_execvp");
5737 break;
5738
5739 case BUILT_IN_EXECVE:
5740 id = get_identifier ("__gcov_execve");
5741 break;
5742
5743 default:
5744 gcc_unreachable ();
5745 }
5746
5747 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5748 DECL_EXTERNAL (decl) = 1;
5749 TREE_PUBLIC (decl) = 1;
5750 DECL_ARTIFICIAL (decl) = 1;
5751 TREE_NOTHROW (decl) = 1;
5752 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5753 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5754 call = rewrite_call_expr (exp, 0, decl, 0);
5755 return expand_call (call, target, ignore);
5756 }
5757
5758
5759 \f
5760 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5761 the pointer in these functions is void*, the tree optimizers may remove
5762 casts. The mode computed in expand_builtin isn't reliable either, due
5763 to __sync_bool_compare_and_swap.
5764
5765 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5766 group of builtins. This gives us log2 of the mode size. */
5767
5768 static inline enum machine_mode
5769 get_builtin_sync_mode (int fcode_diff)
5770 {
5771 /* The size is not negotiable, so ask not to get BLKmode in return
5772 if the target indicates that a smaller size would be better. */
5773 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5774 }
5775
5776 /* Expand the memory expression LOC and return the appropriate memory operand
5777 for the builtin_sync operations. */
5778
5779 static rtx
5780 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5781 {
5782 rtx addr, mem;
5783
5784 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5785
5786 /* Note that we explicitly do not want any alias information for this
5787 memory, so that we kill all other live memories. Otherwise we don't
5788 satisfy the full barrier semantics of the intrinsic. */
5789 mem = validize_mem (gen_rtx_MEM (mode, addr));
5790
5791 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5792 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5793 MEM_VOLATILE_P (mem) = 1;
5794
5795 return mem;
5796 }
5797
5798 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5799 EXP is the CALL_EXPR. CODE is the rtx code
5800 that corresponds to the arithmetic or logical operation from the name;
5801 an exception here is that NOT actually means NAND. TARGET is an optional
5802 place for us to store the results; AFTER is true if this is the
5803 fetch_and_xxx form. IGNORE is true if we don't actually care about
5804 the result of the operation at all. */
5805
5806 static rtx
5807 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5808 enum rtx_code code, bool after,
5809 rtx target, bool ignore)
5810 {
5811 rtx val, mem;
5812 enum machine_mode old_mode;
5813
5814 /* Expand the operands. */
5815 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5816
5817 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5818 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5819 of CONST_INTs, where we know the old_mode only from the call argument. */
5820 old_mode = GET_MODE (val);
5821 if (old_mode == VOIDmode)
5822 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5823 val = convert_modes (mode, old_mode, val, 1);
5824
5825 if (ignore)
5826 return expand_sync_operation (mem, val, code);
5827 else
5828 return expand_sync_fetch_operation (mem, val, code, after, target);
5829 }
5830
5831 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5832 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5833 true if this is the boolean form. TARGET is a place for us to store the
5834 results; this is NOT optional if IS_BOOL is true. */
5835
5836 static rtx
5837 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5838 bool is_bool, rtx target)
5839 {
5840 rtx old_val, new_val, mem;
5841 enum machine_mode old_mode;
5842
5843 /* Expand the operands. */
5844 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5845
5846
5847 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5848 mode, EXPAND_NORMAL);
5849 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5850 of CONST_INTs, where we know the old_mode only from the call argument. */
5851 old_mode = GET_MODE (old_val);
5852 if (old_mode == VOIDmode)
5853 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5854 old_val = convert_modes (mode, old_mode, old_val, 1);
5855
5856 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5857 mode, EXPAND_NORMAL);
5858 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5859 of CONST_INTs, where we know the old_mode only from the call argument. */
5860 old_mode = GET_MODE (new_val);
5861 if (old_mode == VOIDmode)
5862 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5863 new_val = convert_modes (mode, old_mode, new_val, 1);
5864
5865 if (is_bool)
5866 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5867 else
5868 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5869 }
5870
5871 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5872 general form is actually an atomic exchange, and some targets only
5873 support a reduced form with the second argument being a constant 1.
5874 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5875 the results. */
5876
5877 static rtx
5878 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5879 rtx target)
5880 {
5881 rtx val, mem;
5882 enum machine_mode old_mode;
5883
5884 /* Expand the operands. */
5885 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5886 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5887 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5888 of CONST_INTs, where we know the old_mode only from the call argument. */
5889 old_mode = GET_MODE (val);
5890 if (old_mode == VOIDmode)
5891 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5892 val = convert_modes (mode, old_mode, val, 1);
5893
5894 return expand_sync_lock_test_and_set (mem, val, target);
5895 }
5896
5897 /* Expand the __sync_synchronize intrinsic. */
5898
5899 static void
5900 expand_builtin_synchronize (void)
5901 {
5902 tree x;
5903
5904 #ifdef HAVE_memory_barrier
5905 if (HAVE_memory_barrier)
5906 {
5907 emit_insn (gen_memory_barrier ());
5908 return;
5909 }
5910 #endif
5911
5912 /* If no explicit memory barrier instruction is available, create an
5913 empty asm stmt with a memory clobber. */
5914 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5915 tree_cons (NULL, build_string (6, "memory"), NULL));
5916 ASM_VOLATILE_P (x) = 1;
5917 expand_asm_expr (x);
5918 }
5919
5920 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5921
5922 static void
5923 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5924 {
5925 enum insn_code icode;
5926 rtx mem, insn;
5927 rtx val = const0_rtx;
5928
5929 /* Expand the operands. */
5930 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5931
5932 /* If there is an explicit operation in the md file, use it. */
5933 icode = sync_lock_release[mode];
5934 if (icode != CODE_FOR_nothing)
5935 {
5936 if (!insn_data[icode].operand[1].predicate (val, mode))
5937 val = force_reg (mode, val);
5938
5939 insn = GEN_FCN (icode) (mem, val);
5940 if (insn)
5941 {
5942 emit_insn (insn);
5943 return;
5944 }
5945 }
5946
5947 /* Otherwise we can implement this operation by emitting a barrier
5948 followed by a store of zero. */
5949 expand_builtin_synchronize ();
5950 emit_move_insn (mem, val);
5951 }
5952 \f
5953 /* Expand an expression EXP that calls a built-in function,
5954 with result going to TARGET if that's convenient
5955 (and in mode MODE if that's convenient).
5956 SUBTARGET may be used as the target for computing one of EXP's operands.
5957 IGNORE is nonzero if the value is to be ignored. */
5958
5959 rtx
5960 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5961 int ignore)
5962 {
5963 tree fndecl = get_callee_fndecl (exp);
5964 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5965 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5966
5967 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5968 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5969
5970 /* When not optimizing, generate calls to library functions for a certain
5971 set of builtins. */
5972 if (!optimize
5973 && !called_as_built_in (fndecl)
5974 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5975 && fcode != BUILT_IN_ALLOCA)
5976 return expand_call (exp, target, ignore);
5977
5978 /* The built-in function expanders test for target == const0_rtx
5979 to determine whether the function's result will be ignored. */
5980 if (ignore)
5981 target = const0_rtx;
5982
5983 /* If the result of a pure or const built-in function is ignored, and
5984 none of its arguments are volatile, we can avoid expanding the
5985 built-in call and just evaluate the arguments for side-effects. */
5986 if (target == const0_rtx
5987 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5988 {
5989 bool volatilep = false;
5990 tree arg;
5991 call_expr_arg_iterator iter;
5992
5993 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5994 if (TREE_THIS_VOLATILE (arg))
5995 {
5996 volatilep = true;
5997 break;
5998 }
5999
6000 if (! volatilep)
6001 {
6002 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6003 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6004 return const0_rtx;
6005 }
6006 }
6007
6008 switch (fcode)
6009 {
6010 CASE_FLT_FN (BUILT_IN_FABS):
6011 target = expand_builtin_fabs (exp, target, subtarget);
6012 if (target)
6013 return target;
6014 break;
6015
6016 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6017 target = expand_builtin_copysign (exp, target, subtarget);
6018 if (target)
6019 return target;
6020 break;
6021
6022 /* Just do a normal library call if we were unable to fold
6023 the values. */
6024 CASE_FLT_FN (BUILT_IN_CABS):
6025 break;
6026
6027 CASE_FLT_FN (BUILT_IN_EXP):
6028 CASE_FLT_FN (BUILT_IN_EXP10):
6029 CASE_FLT_FN (BUILT_IN_POW10):
6030 CASE_FLT_FN (BUILT_IN_EXP2):
6031 CASE_FLT_FN (BUILT_IN_EXPM1):
6032 CASE_FLT_FN (BUILT_IN_LOGB):
6033 CASE_FLT_FN (BUILT_IN_LOG):
6034 CASE_FLT_FN (BUILT_IN_LOG10):
6035 CASE_FLT_FN (BUILT_IN_LOG2):
6036 CASE_FLT_FN (BUILT_IN_LOG1P):
6037 CASE_FLT_FN (BUILT_IN_TAN):
6038 CASE_FLT_FN (BUILT_IN_ASIN):
6039 CASE_FLT_FN (BUILT_IN_ACOS):
6040 CASE_FLT_FN (BUILT_IN_ATAN):
6041 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6042 because of possible accuracy problems. */
6043 if (! flag_unsafe_math_optimizations)
6044 break;
6045 CASE_FLT_FN (BUILT_IN_SQRT):
6046 CASE_FLT_FN (BUILT_IN_FLOOR):
6047 CASE_FLT_FN (BUILT_IN_CEIL):
6048 CASE_FLT_FN (BUILT_IN_TRUNC):
6049 CASE_FLT_FN (BUILT_IN_ROUND):
6050 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6051 CASE_FLT_FN (BUILT_IN_RINT):
6052 target = expand_builtin_mathfn (exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
6057 CASE_FLT_FN (BUILT_IN_ILOGB):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6060 CASE_FLT_FN (BUILT_IN_ISINF):
6061 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6062 if (target)
6063 return target;
6064 break;
6065
6066 CASE_FLT_FN (BUILT_IN_LCEIL):
6067 CASE_FLT_FN (BUILT_IN_LLCEIL):
6068 CASE_FLT_FN (BUILT_IN_LFLOOR):
6069 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6070 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6071 if (target)
6072 return target;
6073 break;
6074
6075 CASE_FLT_FN (BUILT_IN_LRINT):
6076 CASE_FLT_FN (BUILT_IN_LLRINT):
6077 CASE_FLT_FN (BUILT_IN_LROUND):
6078 CASE_FLT_FN (BUILT_IN_LLROUND):
6079 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_FLT_FN (BUILT_IN_POW):
6085 target = expand_builtin_pow (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6089
6090 CASE_FLT_FN (BUILT_IN_POWI):
6091 target = expand_builtin_powi (exp, target, subtarget);
6092 if (target)
6093 return target;
6094 break;
6095
6096 CASE_FLT_FN (BUILT_IN_ATAN2):
6097 CASE_FLT_FN (BUILT_IN_LDEXP):
6098 CASE_FLT_FN (BUILT_IN_SCALB):
6099 CASE_FLT_FN (BUILT_IN_SCALBN):
6100 CASE_FLT_FN (BUILT_IN_SCALBLN):
6101 if (! flag_unsafe_math_optimizations)
6102 break;
6103
6104 CASE_FLT_FN (BUILT_IN_FMOD):
6105 CASE_FLT_FN (BUILT_IN_REMAINDER):
6106 CASE_FLT_FN (BUILT_IN_DREM):
6107 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6108 if (target)
6109 return target;
6110 break;
6111
6112 CASE_FLT_FN (BUILT_IN_CEXPI):
6113 target = expand_builtin_cexpi (exp, target, subtarget);
6114 gcc_assert (target);
6115 return target;
6116
6117 CASE_FLT_FN (BUILT_IN_SIN):
6118 CASE_FLT_FN (BUILT_IN_COS):
6119 if (! flag_unsafe_math_optimizations)
6120 break;
6121 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6122 if (target)
6123 return target;
6124 break;
6125
6126 CASE_FLT_FN (BUILT_IN_SINCOS):
6127 if (! flag_unsafe_math_optimizations)
6128 break;
6129 target = expand_builtin_sincos (exp);
6130 if (target)
6131 return target;
6132 break;
6133
6134 case BUILT_IN_APPLY_ARGS:
6135 return expand_builtin_apply_args ();
6136
6137 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6138 FUNCTION with a copy of the parameters described by
6139 ARGUMENTS, and ARGSIZE. It returns a block of memory
6140 allocated on the stack into which is stored all the registers
6141 that might possibly be used for returning the result of a
6142 function. ARGUMENTS is the value returned by
6143 __builtin_apply_args. ARGSIZE is the number of bytes of
6144 arguments that must be copied. ??? How should this value be
6145 computed? We'll also need a safe worst case value for varargs
6146 functions. */
6147 case BUILT_IN_APPLY:
6148 if (!validate_arglist (exp, POINTER_TYPE,
6149 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6150 && !validate_arglist (exp, REFERENCE_TYPE,
6151 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6152 return const0_rtx;
6153 else
6154 {
6155 rtx ops[3];
6156
6157 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6158 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6159 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6160
6161 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6162 }
6163
6164 /* __builtin_return (RESULT) causes the function to return the
6165 value described by RESULT. RESULT is address of the block of
6166 memory returned by __builtin_apply. */
6167 case BUILT_IN_RETURN:
6168 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6169 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6170 return const0_rtx;
6171
6172 case BUILT_IN_SAVEREGS:
6173 return expand_builtin_saveregs ();
6174
6175 case BUILT_IN_ARGS_INFO:
6176 return expand_builtin_args_info (exp);
6177
6178 /* Return the address of the first anonymous stack arg. */
6179 case BUILT_IN_NEXT_ARG:
6180 if (fold_builtin_next_arg (exp, false))
6181 return const0_rtx;
6182 return expand_builtin_next_arg ();
6183
6184 case BUILT_IN_CLASSIFY_TYPE:
6185 return expand_builtin_classify_type (exp);
6186
6187 case BUILT_IN_CONSTANT_P:
6188 return const0_rtx;
6189
6190 case BUILT_IN_FRAME_ADDRESS:
6191 case BUILT_IN_RETURN_ADDRESS:
6192 return expand_builtin_frame_address (fndecl, exp);
6193
6194 /* Returns the address of the area where the structure is returned.
6195 0 otherwise. */
6196 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6197 if (call_expr_nargs (exp) != 0
6198 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6199 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6200 return const0_rtx;
6201 else
6202 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6203
6204 case BUILT_IN_ALLOCA:
6205 target = expand_builtin_alloca (exp, target);
6206 if (target)
6207 return target;
6208 break;
6209
6210 case BUILT_IN_STACK_SAVE:
6211 return expand_stack_save ();
6212
6213 case BUILT_IN_STACK_RESTORE:
6214 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6215 return const0_rtx;
6216
6217 case BUILT_IN_BSWAP32:
6218 case BUILT_IN_BSWAP64:
6219 target = expand_builtin_bswap (exp, target, subtarget);
6220
6221 if (target)
6222 return target;
6223 break;
6224
6225 CASE_INT_FN (BUILT_IN_FFS):
6226 case BUILT_IN_FFSIMAX:
6227 target = expand_builtin_unop (target_mode, exp, target,
6228 subtarget, ffs_optab);
6229 if (target)
6230 return target;
6231 break;
6232
6233 CASE_INT_FN (BUILT_IN_CLZ):
6234 case BUILT_IN_CLZIMAX:
6235 target = expand_builtin_unop (target_mode, exp, target,
6236 subtarget, clz_optab);
6237 if (target)
6238 return target;
6239 break;
6240
6241 CASE_INT_FN (BUILT_IN_CTZ):
6242 case BUILT_IN_CTZIMAX:
6243 target = expand_builtin_unop (target_mode, exp, target,
6244 subtarget, ctz_optab);
6245 if (target)
6246 return target;
6247 break;
6248
6249 CASE_INT_FN (BUILT_IN_POPCOUNT):
6250 case BUILT_IN_POPCOUNTIMAX:
6251 target = expand_builtin_unop (target_mode, exp, target,
6252 subtarget, popcount_optab);
6253 if (target)
6254 return target;
6255 break;
6256
6257 CASE_INT_FN (BUILT_IN_PARITY):
6258 case BUILT_IN_PARITYIMAX:
6259 target = expand_builtin_unop (target_mode, exp, target,
6260 subtarget, parity_optab);
6261 if (target)
6262 return target;
6263 break;
6264
6265 case BUILT_IN_STRLEN:
6266 target = expand_builtin_strlen (exp, target, target_mode);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_STRCPY:
6272 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6273 if (target)
6274 return target;
6275 break;
6276
6277 case BUILT_IN_STRNCPY:
6278 target = expand_builtin_strncpy (exp, target, mode);
6279 if (target)
6280 return target;
6281 break;
6282
6283 case BUILT_IN_STPCPY:
6284 target = expand_builtin_stpcpy (exp, target, mode);
6285 if (target)
6286 return target;
6287 break;
6288
6289 case BUILT_IN_STRCAT:
6290 target = expand_builtin_strcat (fndecl, exp, target, mode);
6291 if (target)
6292 return target;
6293 break;
6294
6295 case BUILT_IN_STRNCAT:
6296 target = expand_builtin_strncat (exp, target, mode);
6297 if (target)
6298 return target;
6299 break;
6300
6301 case BUILT_IN_STRSPN:
6302 target = expand_builtin_strspn (exp, target, mode);
6303 if (target)
6304 return target;
6305 break;
6306
6307 case BUILT_IN_STRCSPN:
6308 target = expand_builtin_strcspn (exp, target, mode);
6309 if (target)
6310 return target;
6311 break;
6312
6313 case BUILT_IN_STRSTR:
6314 target = expand_builtin_strstr (exp, target, mode);
6315 if (target)
6316 return target;
6317 break;
6318
6319 case BUILT_IN_STRPBRK:
6320 target = expand_builtin_strpbrk (exp, target, mode);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_INDEX:
6326 case BUILT_IN_STRCHR:
6327 target = expand_builtin_strchr (exp, target, mode);
6328 if (target)
6329 return target;
6330 break;
6331
6332 case BUILT_IN_RINDEX:
6333 case BUILT_IN_STRRCHR:
6334 target = expand_builtin_strrchr (exp, target, mode);
6335 if (target)
6336 return target;
6337 break;
6338
6339 case BUILT_IN_MEMCPY:
6340 target = expand_builtin_memcpy (exp, target, mode);
6341 if (target)
6342 return target;
6343 break;
6344
6345 case BUILT_IN_MEMPCPY:
6346 target = expand_builtin_mempcpy (exp, target, mode);
6347 if (target)
6348 return target;
6349 break;
6350
6351 case BUILT_IN_MEMMOVE:
6352 target = expand_builtin_memmove (exp, target, mode, ignore);
6353 if (target)
6354 return target;
6355 break;
6356
6357 case BUILT_IN_BCOPY:
6358 target = expand_builtin_bcopy (exp, ignore);
6359 if (target)
6360 return target;
6361 break;
6362
6363 case BUILT_IN_MEMSET:
6364 target = expand_builtin_memset (exp, target, mode);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_BZERO:
6370 target = expand_builtin_bzero (exp);
6371 if (target)
6372 return target;
6373 break;
6374
6375 case BUILT_IN_STRCMP:
6376 target = expand_builtin_strcmp (exp, target, mode);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_STRNCMP:
6382 target = expand_builtin_strncmp (exp, target, mode);
6383 if (target)
6384 return target;
6385 break;
6386
6387 case BUILT_IN_MEMCHR:
6388 target = expand_builtin_memchr (exp, target, mode);
6389 if (target)
6390 return target;
6391 break;
6392
6393 case BUILT_IN_BCMP:
6394 case BUILT_IN_MEMCMP:
6395 target = expand_builtin_memcmp (exp, target, mode);
6396 if (target)
6397 return target;
6398 break;
6399
6400 case BUILT_IN_SETJMP:
6401 /* This should have been lowered to the builtins below. */
6402 gcc_unreachable ();
6403
6404 case BUILT_IN_SETJMP_SETUP:
6405 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6406 and the receiver label. */
6407 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6408 {
6409 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6410 VOIDmode, EXPAND_NORMAL);
6411 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6412 rtx label_r = label_rtx (label);
6413
6414 /* This is copied from the handling of non-local gotos. */
6415 expand_builtin_setjmp_setup (buf_addr, label_r);
6416 nonlocal_goto_handler_labels
6417 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6418 nonlocal_goto_handler_labels);
6419 /* ??? Do not let expand_label treat us as such since we would
6420 not want to be both on the list of non-local labels and on
6421 the list of forced labels. */
6422 FORCED_LABEL (label) = 0;
6423 return const0_rtx;
6424 }
6425 break;
6426
6427 case BUILT_IN_SETJMP_DISPATCHER:
6428 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6429 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6430 {
6431 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6432 rtx label_r = label_rtx (label);
6433
6434 /* Remove the dispatcher label from the list of non-local labels
6435 since the receiver labels have been added to it above. */
6436 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6437 return const0_rtx;
6438 }
6439 break;
6440
6441 case BUILT_IN_SETJMP_RECEIVER:
6442 /* __builtin_setjmp_receiver is passed the receiver label. */
6443 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6444 {
6445 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6446 rtx label_r = label_rtx (label);
6447
6448 expand_builtin_setjmp_receiver (label_r);
6449 return const0_rtx;
6450 }
6451 break;
6452
6453 /* __builtin_longjmp is passed a pointer to an array of five words.
6454 It's similar to the C library longjmp function but works with
6455 __builtin_setjmp above. */
6456 case BUILT_IN_LONGJMP:
6457 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6458 {
6459 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6460 VOIDmode, EXPAND_NORMAL);
6461 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6462
6463 if (value != const1_rtx)
6464 {
6465 error ("%<__builtin_longjmp%> second argument must be 1");
6466 return const0_rtx;
6467 }
6468
6469 expand_builtin_longjmp (buf_addr, value);
6470 return const0_rtx;
6471 }
6472 break;
6473
6474 case BUILT_IN_NONLOCAL_GOTO:
6475 target = expand_builtin_nonlocal_goto (exp);
6476 if (target)
6477 return target;
6478 break;
6479
6480 /* This updates the setjmp buffer that is its argument with the value
6481 of the current stack pointer. */
6482 case BUILT_IN_UPDATE_SETJMP_BUF:
6483 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6484 {
6485 rtx buf_addr
6486 = expand_normal (CALL_EXPR_ARG (exp, 0));
6487
6488 expand_builtin_update_setjmp_buf (buf_addr);
6489 return const0_rtx;
6490 }
6491 break;
6492
6493 case BUILT_IN_TRAP:
6494 expand_builtin_trap ();
6495 return const0_rtx;
6496
6497 case BUILT_IN_PRINTF:
6498 target = expand_builtin_printf (exp, target, mode, false);
6499 if (target)
6500 return target;
6501 break;
6502
6503 case BUILT_IN_PRINTF_UNLOCKED:
6504 target = expand_builtin_printf (exp, target, mode, true);
6505 if (target)
6506 return target;
6507 break;
6508
6509 case BUILT_IN_FPUTS:
6510 target = expand_builtin_fputs (exp, target, false);
6511 if (target)
6512 return target;
6513 break;
6514 case BUILT_IN_FPUTS_UNLOCKED:
6515 target = expand_builtin_fputs (exp, target, true);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_FPRINTF:
6521 target = expand_builtin_fprintf (exp, target, mode, false);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_FPRINTF_UNLOCKED:
6527 target = expand_builtin_fprintf (exp, target, mode, true);
6528 if (target)
6529 return target;
6530 break;
6531
6532 case BUILT_IN_SPRINTF:
6533 target = expand_builtin_sprintf (exp, target, mode);
6534 if (target)
6535 return target;
6536 break;
6537
6538 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6539 case BUILT_IN_SIGNBITD32:
6540 case BUILT_IN_SIGNBITD64:
6541 case BUILT_IN_SIGNBITD128:
6542 target = expand_builtin_signbit (exp, target);
6543 if (target)
6544 return target;
6545 break;
6546
6547 /* Various hooks for the DWARF 2 __throw routine. */
6548 case BUILT_IN_UNWIND_INIT:
6549 expand_builtin_unwind_init ();
6550 return const0_rtx;
6551 case BUILT_IN_DWARF_CFA:
6552 return virtual_cfa_rtx;
6553 #ifdef DWARF2_UNWIND_INFO
6554 case BUILT_IN_DWARF_SP_COLUMN:
6555 return expand_builtin_dwarf_sp_column ();
6556 case BUILT_IN_INIT_DWARF_REG_SIZES:
6557 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6558 return const0_rtx;
6559 #endif
6560 case BUILT_IN_FROB_RETURN_ADDR:
6561 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6562 case BUILT_IN_EXTRACT_RETURN_ADDR:
6563 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6564 case BUILT_IN_EH_RETURN:
6565 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6566 CALL_EXPR_ARG (exp, 1));
6567 return const0_rtx;
6568 #ifdef EH_RETURN_DATA_REGNO
6569 case BUILT_IN_EH_RETURN_DATA_REGNO:
6570 return expand_builtin_eh_return_data_regno (exp);
6571 #endif
6572 case BUILT_IN_EXTEND_POINTER:
6573 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6574
6575 case BUILT_IN_VA_START:
6576 case BUILT_IN_STDARG_START:
6577 return expand_builtin_va_start (exp);
6578 case BUILT_IN_VA_END:
6579 return expand_builtin_va_end (exp);
6580 case BUILT_IN_VA_COPY:
6581 return expand_builtin_va_copy (exp);
6582 case BUILT_IN_EXPECT:
6583 return expand_builtin_expect (exp, target);
6584 case BUILT_IN_PREFETCH:
6585 expand_builtin_prefetch (exp);
6586 return const0_rtx;
6587
6588 case BUILT_IN_PROFILE_FUNC_ENTER:
6589 return expand_builtin_profile_func (false);
6590 case BUILT_IN_PROFILE_FUNC_EXIT:
6591 return expand_builtin_profile_func (true);
6592
6593 case BUILT_IN_INIT_TRAMPOLINE:
6594 return expand_builtin_init_trampoline (exp);
6595 case BUILT_IN_ADJUST_TRAMPOLINE:
6596 return expand_builtin_adjust_trampoline (exp);
6597
6598 case BUILT_IN_FORK:
6599 case BUILT_IN_EXECL:
6600 case BUILT_IN_EXECV:
6601 case BUILT_IN_EXECLP:
6602 case BUILT_IN_EXECLE:
6603 case BUILT_IN_EXECVP:
6604 case BUILT_IN_EXECVE:
6605 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6606 if (target)
6607 return target;
6608 break;
6609
6610 case BUILT_IN_FETCH_AND_ADD_1:
6611 case BUILT_IN_FETCH_AND_ADD_2:
6612 case BUILT_IN_FETCH_AND_ADD_4:
6613 case BUILT_IN_FETCH_AND_ADD_8:
6614 case BUILT_IN_FETCH_AND_ADD_16:
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6616 target = expand_builtin_sync_operation (mode, exp, PLUS,
6617 false, target, ignore);
6618 if (target)
6619 return target;
6620 break;
6621
6622 case BUILT_IN_FETCH_AND_SUB_1:
6623 case BUILT_IN_FETCH_AND_SUB_2:
6624 case BUILT_IN_FETCH_AND_SUB_4:
6625 case BUILT_IN_FETCH_AND_SUB_8:
6626 case BUILT_IN_FETCH_AND_SUB_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6628 target = expand_builtin_sync_operation (mode, exp, MINUS,
6629 false, target, ignore);
6630 if (target)
6631 return target;
6632 break;
6633
6634 case BUILT_IN_FETCH_AND_OR_1:
6635 case BUILT_IN_FETCH_AND_OR_2:
6636 case BUILT_IN_FETCH_AND_OR_4:
6637 case BUILT_IN_FETCH_AND_OR_8:
6638 case BUILT_IN_FETCH_AND_OR_16:
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6640 target = expand_builtin_sync_operation (mode, exp, IOR,
6641 false, target, ignore);
6642 if (target)
6643 return target;
6644 break;
6645
6646 case BUILT_IN_FETCH_AND_AND_1:
6647 case BUILT_IN_FETCH_AND_AND_2:
6648 case BUILT_IN_FETCH_AND_AND_4:
6649 case BUILT_IN_FETCH_AND_AND_8:
6650 case BUILT_IN_FETCH_AND_AND_16:
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6652 target = expand_builtin_sync_operation (mode, exp, AND,
6653 false, target, ignore);
6654 if (target)
6655 return target;
6656 break;
6657
6658 case BUILT_IN_FETCH_AND_XOR_1:
6659 case BUILT_IN_FETCH_AND_XOR_2:
6660 case BUILT_IN_FETCH_AND_XOR_4:
6661 case BUILT_IN_FETCH_AND_XOR_8:
6662 case BUILT_IN_FETCH_AND_XOR_16:
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6664 target = expand_builtin_sync_operation (mode, exp, XOR,
6665 false, target, ignore);
6666 if (target)
6667 return target;
6668 break;
6669
6670 case BUILT_IN_FETCH_AND_NAND_1:
6671 case BUILT_IN_FETCH_AND_NAND_2:
6672 case BUILT_IN_FETCH_AND_NAND_4:
6673 case BUILT_IN_FETCH_AND_NAND_8:
6674 case BUILT_IN_FETCH_AND_NAND_16:
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6676 target = expand_builtin_sync_operation (mode, exp, NOT,
6677 false, target, ignore);
6678 if (target)
6679 return target;
6680 break;
6681
6682 case BUILT_IN_ADD_AND_FETCH_1:
6683 case BUILT_IN_ADD_AND_FETCH_2:
6684 case BUILT_IN_ADD_AND_FETCH_4:
6685 case BUILT_IN_ADD_AND_FETCH_8:
6686 case BUILT_IN_ADD_AND_FETCH_16:
6687 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6688 target = expand_builtin_sync_operation (mode, exp, PLUS,
6689 true, target, ignore);
6690 if (target)
6691 return target;
6692 break;
6693
6694 case BUILT_IN_SUB_AND_FETCH_1:
6695 case BUILT_IN_SUB_AND_FETCH_2:
6696 case BUILT_IN_SUB_AND_FETCH_4:
6697 case BUILT_IN_SUB_AND_FETCH_8:
6698 case BUILT_IN_SUB_AND_FETCH_16:
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6700 target = expand_builtin_sync_operation (mode, exp, MINUS,
6701 true, target, ignore);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_OR_AND_FETCH_1:
6707 case BUILT_IN_OR_AND_FETCH_2:
6708 case BUILT_IN_OR_AND_FETCH_4:
6709 case BUILT_IN_OR_AND_FETCH_8:
6710 case BUILT_IN_OR_AND_FETCH_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6712 target = expand_builtin_sync_operation (mode, exp, IOR,
6713 true, target, ignore);
6714 if (target)
6715 return target;
6716 break;
6717
6718 case BUILT_IN_AND_AND_FETCH_1:
6719 case BUILT_IN_AND_AND_FETCH_2:
6720 case BUILT_IN_AND_AND_FETCH_4:
6721 case BUILT_IN_AND_AND_FETCH_8:
6722 case BUILT_IN_AND_AND_FETCH_16:
6723 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6724 target = expand_builtin_sync_operation (mode, exp, AND,
6725 true, target, ignore);
6726 if (target)
6727 return target;
6728 break;
6729
6730 case BUILT_IN_XOR_AND_FETCH_1:
6731 case BUILT_IN_XOR_AND_FETCH_2:
6732 case BUILT_IN_XOR_AND_FETCH_4:
6733 case BUILT_IN_XOR_AND_FETCH_8:
6734 case BUILT_IN_XOR_AND_FETCH_16:
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6736 target = expand_builtin_sync_operation (mode, exp, XOR,
6737 true, target, ignore);
6738 if (target)
6739 return target;
6740 break;
6741
6742 case BUILT_IN_NAND_AND_FETCH_1:
6743 case BUILT_IN_NAND_AND_FETCH_2:
6744 case BUILT_IN_NAND_AND_FETCH_4:
6745 case BUILT_IN_NAND_AND_FETCH_8:
6746 case BUILT_IN_NAND_AND_FETCH_16:
6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6748 target = expand_builtin_sync_operation (mode, exp, NOT,
6749 true, target, ignore);
6750 if (target)
6751 return target;
6752 break;
6753
6754 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6755 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6756 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6757 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6758 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6759 if (mode == VOIDmode)
6760 mode = TYPE_MODE (boolean_type_node);
6761 if (!target || !register_operand (target, mode))
6762 target = gen_reg_rtx (mode);
6763
6764 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6765 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6766 if (target)
6767 return target;
6768 break;
6769
6770 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6771 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6772 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6773 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6774 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6775 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6776 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6777 if (target)
6778 return target;
6779 break;
6780
6781 case BUILT_IN_LOCK_TEST_AND_SET_1:
6782 case BUILT_IN_LOCK_TEST_AND_SET_2:
6783 case BUILT_IN_LOCK_TEST_AND_SET_4:
6784 case BUILT_IN_LOCK_TEST_AND_SET_8:
6785 case BUILT_IN_LOCK_TEST_AND_SET_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6787 target = expand_builtin_lock_test_and_set (mode, exp, target);
6788 if (target)
6789 return target;
6790 break;
6791
6792 case BUILT_IN_LOCK_RELEASE_1:
6793 case BUILT_IN_LOCK_RELEASE_2:
6794 case BUILT_IN_LOCK_RELEASE_4:
6795 case BUILT_IN_LOCK_RELEASE_8:
6796 case BUILT_IN_LOCK_RELEASE_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6798 expand_builtin_lock_release (mode, exp);
6799 return const0_rtx;
6800
6801 case BUILT_IN_SYNCHRONIZE:
6802 expand_builtin_synchronize ();
6803 return const0_rtx;
6804
6805 case BUILT_IN_OBJECT_SIZE:
6806 return expand_builtin_object_size (exp);
6807
6808 case BUILT_IN_MEMCPY_CHK:
6809 case BUILT_IN_MEMPCPY_CHK:
6810 case BUILT_IN_MEMMOVE_CHK:
6811 case BUILT_IN_MEMSET_CHK:
6812 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_STRCPY_CHK:
6818 case BUILT_IN_STPCPY_CHK:
6819 case BUILT_IN_STRNCPY_CHK:
6820 case BUILT_IN_STRCAT_CHK:
6821 case BUILT_IN_STRNCAT_CHK:
6822 case BUILT_IN_SNPRINTF_CHK:
6823 case BUILT_IN_VSNPRINTF_CHK:
6824 maybe_emit_chk_warning (exp, fcode);
6825 break;
6826
6827 case BUILT_IN_SPRINTF_CHK:
6828 case BUILT_IN_VSPRINTF_CHK:
6829 maybe_emit_sprintf_chk_warning (exp, fcode);
6830 break;
6831
6832 default: /* just do library call, if unknown builtin */
6833 break;
6834 }
6835
6836 /* The switch statement above can drop through to cause the function
6837 to be called normally. */
6838 return expand_call (exp, target, ignore);
6839 }
6840
6841 /* Determine whether a tree node represents a call to a built-in
6842 function. If the tree T is a call to a built-in function with
6843 the right number of arguments of the appropriate types, return
6844 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6845 Otherwise the return value is END_BUILTINS. */
6846
6847 enum built_in_function
6848 builtin_mathfn_code (tree t)
6849 {
6850 tree fndecl, arg, parmlist;
6851 tree argtype, parmtype;
6852 call_expr_arg_iterator iter;
6853
6854 if (TREE_CODE (t) != CALL_EXPR
6855 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6856 return END_BUILTINS;
6857
6858 fndecl = get_callee_fndecl (t);
6859 if (fndecl == NULL_TREE
6860 || TREE_CODE (fndecl) != FUNCTION_DECL
6861 || ! DECL_BUILT_IN (fndecl)
6862 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6863 return END_BUILTINS;
6864
6865 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6866 init_call_expr_arg_iterator (t, &iter);
6867 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6868 {
6869 /* If a function doesn't take a variable number of arguments,
6870 the last element in the list will have type `void'. */
6871 parmtype = TREE_VALUE (parmlist);
6872 if (VOID_TYPE_P (parmtype))
6873 {
6874 if (more_call_expr_args_p (&iter))
6875 return END_BUILTINS;
6876 return DECL_FUNCTION_CODE (fndecl);
6877 }
6878
6879 if (! more_call_expr_args_p (&iter))
6880 return END_BUILTINS;
6881
6882 arg = next_call_expr_arg (&iter);
6883 argtype = TREE_TYPE (arg);
6884
6885 if (SCALAR_FLOAT_TYPE_P (parmtype))
6886 {
6887 if (! SCALAR_FLOAT_TYPE_P (argtype))
6888 return END_BUILTINS;
6889 }
6890 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6891 {
6892 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6893 return END_BUILTINS;
6894 }
6895 else if (POINTER_TYPE_P (parmtype))
6896 {
6897 if (! POINTER_TYPE_P (argtype))
6898 return END_BUILTINS;
6899 }
6900 else if (INTEGRAL_TYPE_P (parmtype))
6901 {
6902 if (! INTEGRAL_TYPE_P (argtype))
6903 return END_BUILTINS;
6904 }
6905 else
6906 return END_BUILTINS;
6907 }
6908
6909 /* Variable-length argument list. */
6910 return DECL_FUNCTION_CODE (fndecl);
6911 }
6912
6913 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6914 evaluate to a constant. */
6915
6916 static tree
6917 fold_builtin_constant_p (tree arg)
6918 {
6919 /* We return 1 for a numeric type that's known to be a constant
6920 value at compile-time or for an aggregate type that's a
6921 literal constant. */
6922 STRIP_NOPS (arg);
6923
6924 /* If we know this is a constant, emit the constant of one. */
6925 if (CONSTANT_CLASS_P (arg)
6926 || (TREE_CODE (arg) == CONSTRUCTOR
6927 && TREE_CONSTANT (arg)))
6928 return integer_one_node;
6929 if (TREE_CODE (arg) == ADDR_EXPR)
6930 {
6931 tree op = TREE_OPERAND (arg, 0);
6932 if (TREE_CODE (op) == STRING_CST
6933 || (TREE_CODE (op) == ARRAY_REF
6934 && integer_zerop (TREE_OPERAND (op, 1))
6935 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6936 return integer_one_node;
6937 }
6938
6939 /* If this expression has side effects, show we don't know it to be a
6940 constant. Likewise if it's a pointer or aggregate type since in
6941 those case we only want literals, since those are only optimized
6942 when generating RTL, not later.
6943 And finally, if we are compiling an initializer, not code, we
6944 need to return a definite result now; there's not going to be any
6945 more optimization done. */
6946 if (TREE_SIDE_EFFECTS (arg)
6947 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6948 || POINTER_TYPE_P (TREE_TYPE (arg))
6949 || cfun == 0
6950 || folding_initializer)
6951 return integer_zero_node;
6952
6953 return NULL_TREE;
6954 }
6955
6956 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6957 comparison against the argument will fold to a constant. In practice,
6958 this means a true constant or the address of a non-weak symbol. */
6959
6960 static tree
6961 fold_builtin_expect (tree arg)
6962 {
6963 tree inner;
6964
6965 /* If the argument isn't invariant, then there's nothing we can do. */
6966 if (!TREE_INVARIANT (arg))
6967 return NULL_TREE;
6968
6969 /* If we're looking at an address of a weak decl, then do not fold. */
6970 inner = arg;
6971 STRIP_NOPS (inner);
6972 if (TREE_CODE (inner) == ADDR_EXPR)
6973 {
6974 do
6975 {
6976 inner = TREE_OPERAND (inner, 0);
6977 }
6978 while (TREE_CODE (inner) == COMPONENT_REF
6979 || TREE_CODE (inner) == ARRAY_REF);
6980 if (DECL_P (inner) && DECL_WEAK (inner))
6981 return NULL_TREE;
6982 }
6983
6984 /* Otherwise, ARG already has the proper type for the return value. */
6985 return arg;
6986 }
6987
6988 /* Fold a call to __builtin_classify_type with argument ARG. */
6989
6990 static tree
6991 fold_builtin_classify_type (tree arg)
6992 {
6993 if (arg == 0)
6994 return build_int_cst (NULL_TREE, no_type_class);
6995
6996 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6997 }
6998
6999 /* Fold a call to __builtin_strlen with argument ARG. */
7000
7001 static tree
7002 fold_builtin_strlen (tree arg)
7003 {
7004 if (!validate_arg (arg, POINTER_TYPE))
7005 return NULL_TREE;
7006 else
7007 {
7008 tree len = c_strlen (arg, 0);
7009
7010 if (len)
7011 {
7012 /* Convert from the internal "sizetype" type to "size_t". */
7013 if (size_type_node)
7014 len = fold_convert (size_type_node, len);
7015 return len;
7016 }
7017
7018 return NULL_TREE;
7019 }
7020 }
7021
7022 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7023
7024 static tree
7025 fold_builtin_inf (tree type, int warn)
7026 {
7027 REAL_VALUE_TYPE real;
7028
7029 /* __builtin_inff is intended to be usable to define INFINITY on all
7030 targets. If an infinity is not available, INFINITY expands "to a
7031 positive constant of type float that overflows at translation
7032 time", footnote "In this case, using INFINITY will violate the
7033 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7034 Thus we pedwarn to ensure this constraint violation is
7035 diagnosed. */
7036 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7037 pedwarn ("target format does not support infinity");
7038
7039 real_inf (&real);
7040 return build_real (type, real);
7041 }
7042
7043 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7044
7045 static tree
7046 fold_builtin_nan (tree arg, tree type, int quiet)
7047 {
7048 REAL_VALUE_TYPE real;
7049 const char *str;
7050
7051 if (!validate_arg (arg, POINTER_TYPE))
7052 return NULL_TREE;
7053 str = c_getstr (arg);
7054 if (!str)
7055 return NULL_TREE;
7056
7057 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7058 return NULL_TREE;
7059
7060 return build_real (type, real);
7061 }
7062
7063 /* Return true if the floating point expression T has an integer value.
7064 We also allow +Inf, -Inf and NaN to be considered integer values. */
7065
7066 static bool
7067 integer_valued_real_p (tree t)
7068 {
7069 switch (TREE_CODE (t))
7070 {
7071 case FLOAT_EXPR:
7072 return true;
7073
7074 case ABS_EXPR:
7075 case SAVE_EXPR:
7076 case NON_LVALUE_EXPR:
7077 return integer_valued_real_p (TREE_OPERAND (t, 0));
7078
7079 case COMPOUND_EXPR:
7080 case MODIFY_EXPR:
7081 case BIND_EXPR:
7082 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7083
7084 case PLUS_EXPR:
7085 case MINUS_EXPR:
7086 case MULT_EXPR:
7087 case MIN_EXPR:
7088 case MAX_EXPR:
7089 return integer_valued_real_p (TREE_OPERAND (t, 0))
7090 && integer_valued_real_p (TREE_OPERAND (t, 1));
7091
7092 case COND_EXPR:
7093 return integer_valued_real_p (TREE_OPERAND (t, 1))
7094 && integer_valued_real_p (TREE_OPERAND (t, 2));
7095
7096 case REAL_CST:
7097 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7098
7099 case NOP_EXPR:
7100 {
7101 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7102 if (TREE_CODE (type) == INTEGER_TYPE)
7103 return true;
7104 if (TREE_CODE (type) == REAL_TYPE)
7105 return integer_valued_real_p (TREE_OPERAND (t, 0));
7106 break;
7107 }
7108
7109 case CALL_EXPR:
7110 switch (builtin_mathfn_code (t))
7111 {
7112 CASE_FLT_FN (BUILT_IN_CEIL):
7113 CASE_FLT_FN (BUILT_IN_FLOOR):
7114 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7115 CASE_FLT_FN (BUILT_IN_RINT):
7116 CASE_FLT_FN (BUILT_IN_ROUND):
7117 CASE_FLT_FN (BUILT_IN_TRUNC):
7118 return true;
7119
7120 CASE_FLT_FN (BUILT_IN_FMIN):
7121 CASE_FLT_FN (BUILT_IN_FMAX):
7122 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7123 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7124
7125 default:
7126 break;
7127 }
7128 break;
7129
7130 default:
7131 break;
7132 }
7133 return false;
7134 }
7135
7136 /* FNDECL is assumed to be a builtin where truncation can be propagated
7137 across (for instance floor((double)f) == (double)floorf (f).
7138 Do the transformation for a call with argument ARG. */
7139
7140 static tree
7141 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7142 {
7143 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7144
7145 if (!validate_arg (arg, REAL_TYPE))
7146 return NULL_TREE;
7147
7148 /* Integer rounding functions are idempotent. */
7149 if (fcode == builtin_mathfn_code (arg))
7150 return arg;
7151
7152 /* If argument is already integer valued, and we don't need to worry
7153 about setting errno, there's no need to perform rounding. */
7154 if (! flag_errno_math && integer_valued_real_p (arg))
7155 return arg;
7156
7157 if (optimize)
7158 {
7159 tree arg0 = strip_float_extensions (arg);
7160 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7161 tree newtype = TREE_TYPE (arg0);
7162 tree decl;
7163
7164 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7165 && (decl = mathfn_built_in (newtype, fcode)))
7166 return fold_convert (ftype,
7167 build_call_expr (decl, 1,
7168 fold_convert (newtype, arg0)));
7169 }
7170 return NULL_TREE;
7171 }
7172
7173 /* FNDECL is assumed to be builtin which can narrow the FP type of
7174 the argument, for instance lround((double)f) -> lroundf (f).
7175 Do the transformation for a call with argument ARG. */
7176
7177 static tree
7178 fold_fixed_mathfn (tree fndecl, tree arg)
7179 {
7180 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7181
7182 if (!validate_arg (arg, REAL_TYPE))
7183 return NULL_TREE;
7184
7185 /* If argument is already integer valued, and we don't need to worry
7186 about setting errno, there's no need to perform rounding. */
7187 if (! flag_errno_math && integer_valued_real_p (arg))
7188 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7189
7190 if (optimize)
7191 {
7192 tree ftype = TREE_TYPE (arg);
7193 tree arg0 = strip_float_extensions (arg);
7194 tree newtype = TREE_TYPE (arg0);
7195 tree decl;
7196
7197 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7198 && (decl = mathfn_built_in (newtype, fcode)))
7199 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7200 }
7201
7202 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7203 sizeof (long long) == sizeof (long). */
7204 if (TYPE_PRECISION (long_long_integer_type_node)
7205 == TYPE_PRECISION (long_integer_type_node))
7206 {
7207 tree newfn = NULL_TREE;
7208 switch (fcode)
7209 {
7210 CASE_FLT_FN (BUILT_IN_LLCEIL):
7211 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7212 break;
7213
7214 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7215 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7216 break;
7217
7218 CASE_FLT_FN (BUILT_IN_LLROUND):
7219 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7220 break;
7221
7222 CASE_FLT_FN (BUILT_IN_LLRINT):
7223 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7224 break;
7225
7226 default:
7227 break;
7228 }
7229
7230 if (newfn)
7231 {
7232 tree newcall = build_call_expr(newfn, 1, arg);
7233 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7234 }
7235 }
7236
7237 return NULL_TREE;
7238 }
7239
7240 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7241 return type. Return NULL_TREE if no simplification can be made. */
7242
7243 static tree
7244 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7245 {
7246 tree res;
7247
7248 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7249 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7250 return NULL_TREE;
7251
7252 /* Calculate the result when the argument is a constant. */
7253 if (TREE_CODE (arg) == COMPLEX_CST
7254 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7255 type, mpfr_hypot)))
7256 return res;
7257
7258 if (TREE_CODE (arg) == COMPLEX_EXPR)
7259 {
7260 tree real = TREE_OPERAND (arg, 0);
7261 tree imag = TREE_OPERAND (arg, 1);
7262
7263 /* If either part is zero, cabs is fabs of the other. */
7264 if (real_zerop (real))
7265 return fold_build1 (ABS_EXPR, type, imag);
7266 if (real_zerop (imag))
7267 return fold_build1 (ABS_EXPR, type, real);
7268
7269 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7270 if (flag_unsafe_math_optimizations
7271 && operand_equal_p (real, imag, OEP_PURE_SAME))
7272 {
7273 const REAL_VALUE_TYPE sqrt2_trunc
7274 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7275 STRIP_NOPS (real);
7276 return fold_build2 (MULT_EXPR, type,
7277 fold_build1 (ABS_EXPR, type, real),
7278 build_real (type, sqrt2_trunc));
7279 }
7280 }
7281
7282 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7283 if (TREE_CODE (arg) == NEGATE_EXPR
7284 || TREE_CODE (arg) == CONJ_EXPR)
7285 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7286
7287 /* Don't do this when optimizing for size. */
7288 if (flag_unsafe_math_optimizations
7289 && optimize && !optimize_size)
7290 {
7291 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7292
7293 if (sqrtfn != NULL_TREE)
7294 {
7295 tree rpart, ipart, result;
7296
7297 arg = builtin_save_expr (arg);
7298
7299 rpart = fold_build1 (REALPART_EXPR, type, arg);
7300 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7301
7302 rpart = builtin_save_expr (rpart);
7303 ipart = builtin_save_expr (ipart);
7304
7305 result = fold_build2 (PLUS_EXPR, type,
7306 fold_build2 (MULT_EXPR, type,
7307 rpart, rpart),
7308 fold_build2 (MULT_EXPR, type,
7309 ipart, ipart));
7310
7311 return build_call_expr (sqrtfn, 1, result);
7312 }
7313 }
7314
7315 return NULL_TREE;
7316 }
7317
7318 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7319 Return NULL_TREE if no simplification can be made. */
7320
7321 static tree
7322 fold_builtin_sqrt (tree arg, tree type)
7323 {
7324
7325 enum built_in_function fcode;
7326 tree res;
7327
7328 if (!validate_arg (arg, REAL_TYPE))
7329 return NULL_TREE;
7330
7331 /* Calculate the result when the argument is a constant. */
7332 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7333 return res;
7334
7335 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7336 fcode = builtin_mathfn_code (arg);
7337 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7338 {
7339 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7340 arg = fold_build2 (MULT_EXPR, type,
7341 CALL_EXPR_ARG (arg, 0),
7342 build_real (type, dconsthalf));
7343 return build_call_expr (expfn, 1, arg);
7344 }
7345
7346 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7347 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7348 {
7349 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7350
7351 if (powfn)
7352 {
7353 tree arg0 = CALL_EXPR_ARG (arg, 0);
7354 tree tree_root;
7355 /* The inner root was either sqrt or cbrt. */
7356 REAL_VALUE_TYPE dconstroot =
7357 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7358
7359 /* Adjust for the outer root. */
7360 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7361 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7362 tree_root = build_real (type, dconstroot);
7363 return build_call_expr (powfn, 2, arg0, tree_root);
7364 }
7365 }
7366
7367 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7368 if (flag_unsafe_math_optimizations
7369 && (fcode == BUILT_IN_POW
7370 || fcode == BUILT_IN_POWF
7371 || fcode == BUILT_IN_POWL))
7372 {
7373 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7374 tree arg0 = CALL_EXPR_ARG (arg, 0);
7375 tree arg1 = CALL_EXPR_ARG (arg, 1);
7376 tree narg1;
7377 if (!tree_expr_nonnegative_p (arg0))
7378 arg0 = build1 (ABS_EXPR, type, arg0);
7379 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7380 build_real (type, dconsthalf));
7381 return build_call_expr (powfn, 2, arg0, narg1);
7382 }
7383
7384 return NULL_TREE;
7385 }
7386
7387 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7388 Return NULL_TREE if no simplification can be made. */
7389
7390 static tree
7391 fold_builtin_cbrt (tree arg, tree type)
7392 {
7393 const enum built_in_function fcode = builtin_mathfn_code (arg);
7394 tree res;
7395
7396 if (!validate_arg (arg, REAL_TYPE))
7397 return NULL_TREE;
7398
7399 /* Calculate the result when the argument is a constant. */
7400 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7401 return res;
7402
7403 if (flag_unsafe_math_optimizations)
7404 {
7405 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7406 if (BUILTIN_EXPONENT_P (fcode))
7407 {
7408 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7409 const REAL_VALUE_TYPE third_trunc =
7410 real_value_truncate (TYPE_MODE (type), dconstthird);
7411 arg = fold_build2 (MULT_EXPR, type,
7412 CALL_EXPR_ARG (arg, 0),
7413 build_real (type, third_trunc));
7414 return build_call_expr (expfn, 1, arg);
7415 }
7416
7417 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7418 if (BUILTIN_SQRT_P (fcode))
7419 {
7420 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7421
7422 if (powfn)
7423 {
7424 tree arg0 = CALL_EXPR_ARG (arg, 0);
7425 tree tree_root;
7426 REAL_VALUE_TYPE dconstroot = dconstthird;
7427
7428 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7429 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7430 tree_root = build_real (type, dconstroot);
7431 return build_call_expr (powfn, 2, arg0, tree_root);
7432 }
7433 }
7434
7435 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7436 if (BUILTIN_CBRT_P (fcode))
7437 {
7438 tree arg0 = CALL_EXPR_ARG (arg, 0);
7439 if (tree_expr_nonnegative_p (arg0))
7440 {
7441 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7442
7443 if (powfn)
7444 {
7445 tree tree_root;
7446 REAL_VALUE_TYPE dconstroot;
7447
7448 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7449 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7450 tree_root = build_real (type, dconstroot);
7451 return build_call_expr (powfn, 2, arg0, tree_root);
7452 }
7453 }
7454 }
7455
7456 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7457 if (fcode == BUILT_IN_POW
7458 || fcode == BUILT_IN_POWF
7459 || fcode == BUILT_IN_POWL)
7460 {
7461 tree arg00 = CALL_EXPR_ARG (arg, 0);
7462 tree arg01 = CALL_EXPR_ARG (arg, 1);
7463 if (tree_expr_nonnegative_p (arg00))
7464 {
7465 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7466 const REAL_VALUE_TYPE dconstroot
7467 = real_value_truncate (TYPE_MODE (type), dconstthird);
7468 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7469 build_real (type, dconstroot));
7470 return build_call_expr (powfn, 2, arg00, narg01);
7471 }
7472 }
7473 }
7474 return NULL_TREE;
7475 }
7476
7477 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7478 TYPE is the type of the return value. Return NULL_TREE if no
7479 simplification can be made. */
7480
7481 static tree
7482 fold_builtin_cos (tree arg, tree type, tree fndecl)
7483 {
7484 tree res, narg;
7485
7486 if (!validate_arg (arg, REAL_TYPE))
7487 return NULL_TREE;
7488
7489 /* Calculate the result when the argument is a constant. */
7490 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7491 return res;
7492
7493 /* Optimize cos(-x) into cos (x). */
7494 if ((narg = fold_strip_sign_ops (arg)))
7495 return build_call_expr (fndecl, 1, narg);
7496
7497 return NULL_TREE;
7498 }
7499
7500 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7501 Return NULL_TREE if no simplification can be made. */
7502
7503 static tree
7504 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7505 {
7506 if (validate_arg (arg, REAL_TYPE))
7507 {
7508 tree res, narg;
7509
7510 /* Calculate the result when the argument is a constant. */
7511 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7512 return res;
7513
7514 /* Optimize cosh(-x) into cosh (x). */
7515 if ((narg = fold_strip_sign_ops (arg)))
7516 return build_call_expr (fndecl, 1, narg);
7517 }
7518
7519 return NULL_TREE;
7520 }
7521
7522 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7523 Return NULL_TREE if no simplification can be made. */
7524
7525 static tree
7526 fold_builtin_tan (tree arg, tree type)
7527 {
7528 enum built_in_function fcode;
7529 tree res;
7530
7531 if (!validate_arg (arg, REAL_TYPE))
7532 return NULL_TREE;
7533
7534 /* Calculate the result when the argument is a constant. */
7535 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7536 return res;
7537
7538 /* Optimize tan(atan(x)) = x. */
7539 fcode = builtin_mathfn_code (arg);
7540 if (flag_unsafe_math_optimizations
7541 && (fcode == BUILT_IN_ATAN
7542 || fcode == BUILT_IN_ATANF
7543 || fcode == BUILT_IN_ATANL))
7544 return CALL_EXPR_ARG (arg, 0);
7545
7546 return NULL_TREE;
7547 }
7548
7549 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7550 NULL_TREE if no simplification can be made. */
7551
7552 static tree
7553 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7554 {
7555 tree type;
7556 tree res, fn, call;
7557
7558 if (!validate_arg (arg0, REAL_TYPE)
7559 || !validate_arg (arg1, POINTER_TYPE)
7560 || !validate_arg (arg2, POINTER_TYPE))
7561 return NULL_TREE;
7562
7563 type = TREE_TYPE (arg0);
7564
7565 /* Calculate the result when the argument is a constant. */
7566 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7567 return res;
7568
7569 /* Canonicalize sincos to cexpi. */
7570 if (!TARGET_C99_FUNCTIONS)
7571 return NULL_TREE;
7572 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7573 if (!fn)
7574 return NULL_TREE;
7575
7576 call = build_call_expr (fn, 1, arg0);
7577 call = builtin_save_expr (call);
7578
7579 return build2 (COMPOUND_EXPR, type,
7580 build2 (MODIFY_EXPR, void_type_node,
7581 build_fold_indirect_ref (arg1),
7582 build1 (IMAGPART_EXPR, type, call)),
7583 build2 (MODIFY_EXPR, void_type_node,
7584 build_fold_indirect_ref (arg2),
7585 build1 (REALPART_EXPR, type, call)));
7586 }
7587
7588 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7589 NULL_TREE if no simplification can be made. */
7590
7591 static tree
7592 fold_builtin_cexp (tree arg0, tree type)
7593 {
7594 tree rtype;
7595 tree realp, imagp, ifn;
7596
7597 if (!validate_arg (arg0, COMPLEX_TYPE))
7598 return NULL_TREE;
7599
7600 rtype = TREE_TYPE (TREE_TYPE (arg0));
7601
7602 /* In case we can figure out the real part of arg0 and it is constant zero
7603 fold to cexpi. */
7604 if (!TARGET_C99_FUNCTIONS)
7605 return NULL_TREE;
7606 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7607 if (!ifn)
7608 return NULL_TREE;
7609
7610 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7611 && real_zerop (realp))
7612 {
7613 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7614 return build_call_expr (ifn, 1, narg);
7615 }
7616
7617 /* In case we can easily decompose real and imaginary parts split cexp
7618 to exp (r) * cexpi (i). */
7619 if (flag_unsafe_math_optimizations
7620 && realp)
7621 {
7622 tree rfn, rcall, icall;
7623
7624 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7625 if (!rfn)
7626 return NULL_TREE;
7627
7628 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7629 if (!imagp)
7630 return NULL_TREE;
7631
7632 icall = build_call_expr (ifn, 1, imagp);
7633 icall = builtin_save_expr (icall);
7634 rcall = build_call_expr (rfn, 1, realp);
7635 rcall = builtin_save_expr (rcall);
7636 return build2 (COMPLEX_EXPR, type,
7637 build2 (MULT_EXPR, rtype,
7638 rcall,
7639 build1 (REALPART_EXPR, rtype, icall)),
7640 build2 (MULT_EXPR, rtype,
7641 rcall,
7642 build1 (IMAGPART_EXPR, rtype, icall)));
7643 }
7644
7645 return NULL_TREE;
7646 }
7647
7648 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7649 Return NULL_TREE if no simplification can be made. */
7650
7651 static tree
7652 fold_builtin_trunc (tree fndecl, tree arg)
7653 {
7654 if (!validate_arg (arg, REAL_TYPE))
7655 return NULL_TREE;
7656
7657 /* Optimize trunc of constant value. */
7658 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7659 {
7660 REAL_VALUE_TYPE r, x;
7661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7662
7663 x = TREE_REAL_CST (arg);
7664 real_trunc (&r, TYPE_MODE (type), &x);
7665 return build_real (type, r);
7666 }
7667
7668 return fold_trunc_transparent_mathfn (fndecl, arg);
7669 }
7670
7671 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7672 Return NULL_TREE if no simplification can be made. */
7673
7674 static tree
7675 fold_builtin_floor (tree fndecl, tree arg)
7676 {
7677 if (!validate_arg (arg, REAL_TYPE))
7678 return NULL_TREE;
7679
7680 /* Optimize floor of constant value. */
7681 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7682 {
7683 REAL_VALUE_TYPE x;
7684
7685 x = TREE_REAL_CST (arg);
7686 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7687 {
7688 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7689 REAL_VALUE_TYPE r;
7690
7691 real_floor (&r, TYPE_MODE (type), &x);
7692 return build_real (type, r);
7693 }
7694 }
7695
7696 /* Fold floor (x) where x is nonnegative to trunc (x). */
7697 if (tree_expr_nonnegative_p (arg))
7698 {
7699 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7700 if (truncfn)
7701 return build_call_expr (truncfn, 1, arg);
7702 }
7703
7704 return fold_trunc_transparent_mathfn (fndecl, arg);
7705 }
7706
7707 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7708 Return NULL_TREE if no simplification can be made. */
7709
7710 static tree
7711 fold_builtin_ceil (tree fndecl, tree arg)
7712 {
7713 if (!validate_arg (arg, REAL_TYPE))
7714 return NULL_TREE;
7715
7716 /* Optimize ceil of constant value. */
7717 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7718 {
7719 REAL_VALUE_TYPE x;
7720
7721 x = TREE_REAL_CST (arg);
7722 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7723 {
7724 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7725 REAL_VALUE_TYPE r;
7726
7727 real_ceil (&r, TYPE_MODE (type), &x);
7728 return build_real (type, r);
7729 }
7730 }
7731
7732 return fold_trunc_transparent_mathfn (fndecl, arg);
7733 }
7734
7735 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7736 Return NULL_TREE if no simplification can be made. */
7737
7738 static tree
7739 fold_builtin_round (tree fndecl, tree arg)
7740 {
7741 if (!validate_arg (arg, REAL_TYPE))
7742 return NULL_TREE;
7743
7744 /* Optimize round of constant value. */
7745 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7746 {
7747 REAL_VALUE_TYPE x;
7748
7749 x = TREE_REAL_CST (arg);
7750 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7751 {
7752 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7753 REAL_VALUE_TYPE r;
7754
7755 real_round (&r, TYPE_MODE (type), &x);
7756 return build_real (type, r);
7757 }
7758 }
7759
7760 return fold_trunc_transparent_mathfn (fndecl, arg);
7761 }
7762
7763 /* Fold function call to builtin lround, lroundf or lroundl (or the
7764 corresponding long long versions) and other rounding functions. ARG
7765 is the argument to the call. Return NULL_TREE if no simplification
7766 can be made. */
7767
7768 static tree
7769 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7770 {
7771 if (!validate_arg (arg, REAL_TYPE))
7772 return NULL_TREE;
7773
7774 /* Optimize lround of constant value. */
7775 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7776 {
7777 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7778
7779 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7780 {
7781 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7782 tree ftype = TREE_TYPE (arg);
7783 unsigned HOST_WIDE_INT lo2;
7784 HOST_WIDE_INT hi, lo;
7785 REAL_VALUE_TYPE r;
7786
7787 switch (DECL_FUNCTION_CODE (fndecl))
7788 {
7789 CASE_FLT_FN (BUILT_IN_LFLOOR):
7790 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7791 real_floor (&r, TYPE_MODE (ftype), &x);
7792 break;
7793
7794 CASE_FLT_FN (BUILT_IN_LCEIL):
7795 CASE_FLT_FN (BUILT_IN_LLCEIL):
7796 real_ceil (&r, TYPE_MODE (ftype), &x);
7797 break;
7798
7799 CASE_FLT_FN (BUILT_IN_LROUND):
7800 CASE_FLT_FN (BUILT_IN_LLROUND):
7801 real_round (&r, TYPE_MODE (ftype), &x);
7802 break;
7803
7804 default:
7805 gcc_unreachable ();
7806 }
7807
7808 REAL_VALUE_TO_INT (&lo, &hi, r);
7809 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7810 return build_int_cst_wide (itype, lo2, hi);
7811 }
7812 }
7813
7814 switch (DECL_FUNCTION_CODE (fndecl))
7815 {
7816 CASE_FLT_FN (BUILT_IN_LFLOOR):
7817 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7818 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7819 if (tree_expr_nonnegative_p (arg))
7820 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7821 arg);
7822 break;
7823 default:;
7824 }
7825
7826 return fold_fixed_mathfn (fndecl, arg);
7827 }
7828
7829 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7830 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7831 the argument to the call. Return NULL_TREE if no simplification can
7832 be made. */
7833
7834 static tree
7835 fold_builtin_bitop (tree fndecl, tree arg)
7836 {
7837 if (!validate_arg (arg, INTEGER_TYPE))
7838 return NULL_TREE;
7839
7840 /* Optimize for constant argument. */
7841 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7842 {
7843 HOST_WIDE_INT hi, width, result;
7844 unsigned HOST_WIDE_INT lo;
7845 tree type;
7846
7847 type = TREE_TYPE (arg);
7848 width = TYPE_PRECISION (type);
7849 lo = TREE_INT_CST_LOW (arg);
7850
7851 /* Clear all the bits that are beyond the type's precision. */
7852 if (width > HOST_BITS_PER_WIDE_INT)
7853 {
7854 hi = TREE_INT_CST_HIGH (arg);
7855 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7856 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7857 }
7858 else
7859 {
7860 hi = 0;
7861 if (width < HOST_BITS_PER_WIDE_INT)
7862 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7863 }
7864
7865 switch (DECL_FUNCTION_CODE (fndecl))
7866 {
7867 CASE_INT_FN (BUILT_IN_FFS):
7868 if (lo != 0)
7869 result = exact_log2 (lo & -lo) + 1;
7870 else if (hi != 0)
7871 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7872 else
7873 result = 0;
7874 break;
7875
7876 CASE_INT_FN (BUILT_IN_CLZ):
7877 if (hi != 0)
7878 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7879 else if (lo != 0)
7880 result = width - floor_log2 (lo) - 1;
7881 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7882 result = width;
7883 break;
7884
7885 CASE_INT_FN (BUILT_IN_CTZ):
7886 if (lo != 0)
7887 result = exact_log2 (lo & -lo);
7888 else if (hi != 0)
7889 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7890 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7891 result = width;
7892 break;
7893
7894 CASE_INT_FN (BUILT_IN_POPCOUNT):
7895 result = 0;
7896 while (lo)
7897 result++, lo &= lo - 1;
7898 while (hi)
7899 result++, hi &= hi - 1;
7900 break;
7901
7902 CASE_INT_FN (BUILT_IN_PARITY):
7903 result = 0;
7904 while (lo)
7905 result++, lo &= lo - 1;
7906 while (hi)
7907 result++, hi &= hi - 1;
7908 result &= 1;
7909 break;
7910
7911 default:
7912 gcc_unreachable ();
7913 }
7914
7915 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7916 }
7917
7918 return NULL_TREE;
7919 }
7920
7921 /* Fold function call to builtin_bswap and the long and long long
7922 variants. Return NULL_TREE if no simplification can be made. */
7923 static tree
7924 fold_builtin_bswap (tree fndecl, tree arg)
7925 {
7926 if (! validate_arg (arg, INTEGER_TYPE))
7927 return NULL_TREE;
7928
7929 /* Optimize constant value. */
7930 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7931 {
7932 HOST_WIDE_INT hi, width, r_hi = 0;
7933 unsigned HOST_WIDE_INT lo, r_lo = 0;
7934 tree type;
7935
7936 type = TREE_TYPE (arg);
7937 width = TYPE_PRECISION (type);
7938 lo = TREE_INT_CST_LOW (arg);
7939 hi = TREE_INT_CST_HIGH (arg);
7940
7941 switch (DECL_FUNCTION_CODE (fndecl))
7942 {
7943 case BUILT_IN_BSWAP32:
7944 case BUILT_IN_BSWAP64:
7945 {
7946 int s;
7947
7948 for (s = 0; s < width; s += 8)
7949 {
7950 int d = width - s - 8;
7951 unsigned HOST_WIDE_INT byte;
7952
7953 if (s < HOST_BITS_PER_WIDE_INT)
7954 byte = (lo >> s) & 0xff;
7955 else
7956 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7957
7958 if (d < HOST_BITS_PER_WIDE_INT)
7959 r_lo |= byte << d;
7960 else
7961 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7962 }
7963 }
7964
7965 break;
7966
7967 default:
7968 gcc_unreachable ();
7969 }
7970
7971 if (width < HOST_BITS_PER_WIDE_INT)
7972 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7973 else
7974 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7975 }
7976
7977 return NULL_TREE;
7978 }
7979
7980 /* Return true if EXPR is the real constant contained in VALUE. */
7981
7982 static bool
7983 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7984 {
7985 STRIP_NOPS (expr);
7986
7987 return ((TREE_CODE (expr) == REAL_CST
7988 && !TREE_OVERFLOW (expr)
7989 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7990 || (TREE_CODE (expr) == COMPLEX_CST
7991 && real_dconstp (TREE_REALPART (expr), value)
7992 && real_zerop (TREE_IMAGPART (expr))));
7993 }
7994
7995 /* A subroutine of fold_builtin to fold the various logarithmic
7996 functions. Return NULL_TREE if no simplification can me made.
7997 FUNC is the corresponding MPFR logarithm function. */
7998
7999 static tree
8000 fold_builtin_logarithm (tree fndecl, tree arg,
8001 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8002 {
8003 if (validate_arg (arg, REAL_TYPE))
8004 {
8005 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8006 tree res;
8007 const enum built_in_function fcode = builtin_mathfn_code (arg);
8008
8009 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8010 instead we'll look for 'e' truncated to MODE. So only do
8011 this if flag_unsafe_math_optimizations is set. */
8012 if (flag_unsafe_math_optimizations && func == mpfr_log)
8013 {
8014 const REAL_VALUE_TYPE e_truncated =
8015 real_value_truncate (TYPE_MODE (type), dconste);
8016 if (real_dconstp (arg, &e_truncated))
8017 return build_real (type, dconst1);
8018 }
8019
8020 /* Calculate the result when the argument is a constant. */
8021 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8022 return res;
8023
8024 /* Special case, optimize logN(expN(x)) = x. */
8025 if (flag_unsafe_math_optimizations
8026 && ((func == mpfr_log
8027 && (fcode == BUILT_IN_EXP
8028 || fcode == BUILT_IN_EXPF
8029 || fcode == BUILT_IN_EXPL))
8030 || (func == mpfr_log2
8031 && (fcode == BUILT_IN_EXP2
8032 || fcode == BUILT_IN_EXP2F
8033 || fcode == BUILT_IN_EXP2L))
8034 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8035 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8036
8037 /* Optimize logN(func()) for various exponential functions. We
8038 want to determine the value "x" and the power "exponent" in
8039 order to transform logN(x**exponent) into exponent*logN(x). */
8040 if (flag_unsafe_math_optimizations)
8041 {
8042 tree exponent = 0, x = 0;
8043
8044 switch (fcode)
8045 {
8046 CASE_FLT_FN (BUILT_IN_EXP):
8047 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8048 x = build_real (type,
8049 real_value_truncate (TYPE_MODE (type), dconste));
8050 exponent = CALL_EXPR_ARG (arg, 0);
8051 break;
8052 CASE_FLT_FN (BUILT_IN_EXP2):
8053 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8054 x = build_real (type, dconst2);
8055 exponent = CALL_EXPR_ARG (arg, 0);
8056 break;
8057 CASE_FLT_FN (BUILT_IN_EXP10):
8058 CASE_FLT_FN (BUILT_IN_POW10):
8059 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8060 x = build_real (type, dconst10);
8061 exponent = CALL_EXPR_ARG (arg, 0);
8062 break;
8063 CASE_FLT_FN (BUILT_IN_SQRT):
8064 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8065 x = CALL_EXPR_ARG (arg, 0);
8066 exponent = build_real (type, dconsthalf);
8067 break;
8068 CASE_FLT_FN (BUILT_IN_CBRT):
8069 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8070 x = CALL_EXPR_ARG (arg, 0);
8071 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8072 dconstthird));
8073 break;
8074 CASE_FLT_FN (BUILT_IN_POW):
8075 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8076 x = CALL_EXPR_ARG (arg, 0);
8077 exponent = CALL_EXPR_ARG (arg, 1);
8078 break;
8079 default:
8080 break;
8081 }
8082
8083 /* Now perform the optimization. */
8084 if (x && exponent)
8085 {
8086 tree logfn = build_call_expr (fndecl, 1, x);
8087 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8088 }
8089 }
8090 }
8091
8092 return NULL_TREE;
8093 }
8094
8095 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8096 NULL_TREE if no simplification can be made. */
8097
8098 static tree
8099 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8100 {
8101 tree res, narg0, narg1;
8102
8103 if (!validate_arg (arg0, REAL_TYPE)
8104 || !validate_arg (arg1, REAL_TYPE))
8105 return NULL_TREE;
8106
8107 /* Calculate the result when the argument is a constant. */
8108 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8109 return res;
8110
8111 /* If either argument to hypot has a negate or abs, strip that off.
8112 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8113 narg0 = fold_strip_sign_ops (arg0);
8114 narg1 = fold_strip_sign_ops (arg1);
8115 if (narg0 || narg1)
8116 {
8117 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8118 narg1 ? narg1 : arg1);
8119 }
8120
8121 /* If either argument is zero, hypot is fabs of the other. */
8122 if (real_zerop (arg0))
8123 return fold_build1 (ABS_EXPR, type, arg1);
8124 else if (real_zerop (arg1))
8125 return fold_build1 (ABS_EXPR, type, arg0);
8126
8127 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8128 if (flag_unsafe_math_optimizations
8129 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8130 {
8131 const REAL_VALUE_TYPE sqrt2_trunc
8132 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8133 return fold_build2 (MULT_EXPR, type,
8134 fold_build1 (ABS_EXPR, type, arg0),
8135 build_real (type, sqrt2_trunc));
8136 }
8137
8138 return NULL_TREE;
8139 }
8140
8141
8142 /* Fold a builtin function call to pow, powf, or powl. Return
8143 NULL_TREE if no simplification can be made. */
8144 static tree
8145 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8146 {
8147 tree res;
8148
8149 if (!validate_arg (arg0, REAL_TYPE)
8150 || !validate_arg (arg1, REAL_TYPE))
8151 return NULL_TREE;
8152
8153 /* Calculate the result when the argument is a constant. */
8154 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8155 return res;
8156
8157 /* Optimize pow(1.0,y) = 1.0. */
8158 if (real_onep (arg0))
8159 return omit_one_operand (type, build_real (type, dconst1), arg1);
8160
8161 if (TREE_CODE (arg1) == REAL_CST
8162 && !TREE_OVERFLOW (arg1))
8163 {
8164 REAL_VALUE_TYPE cint;
8165 REAL_VALUE_TYPE c;
8166 HOST_WIDE_INT n;
8167
8168 c = TREE_REAL_CST (arg1);
8169
8170 /* Optimize pow(x,0.0) = 1.0. */
8171 if (REAL_VALUES_EQUAL (c, dconst0))
8172 return omit_one_operand (type, build_real (type, dconst1),
8173 arg0);
8174
8175 /* Optimize pow(x,1.0) = x. */
8176 if (REAL_VALUES_EQUAL (c, dconst1))
8177 return arg0;
8178
8179 /* Optimize pow(x,-1.0) = 1.0/x. */
8180 if (REAL_VALUES_EQUAL (c, dconstm1))
8181 return fold_build2 (RDIV_EXPR, type,
8182 build_real (type, dconst1), arg0);
8183
8184 /* Optimize pow(x,0.5) = sqrt(x). */
8185 if (flag_unsafe_math_optimizations
8186 && REAL_VALUES_EQUAL (c, dconsthalf))
8187 {
8188 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8189
8190 if (sqrtfn != NULL_TREE)
8191 return build_call_expr (sqrtfn, 1, arg0);
8192 }
8193
8194 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8195 if (flag_unsafe_math_optimizations)
8196 {
8197 const REAL_VALUE_TYPE dconstroot
8198 = real_value_truncate (TYPE_MODE (type), dconstthird);
8199
8200 if (REAL_VALUES_EQUAL (c, dconstroot))
8201 {
8202 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8203 if (cbrtfn != NULL_TREE)
8204 return build_call_expr (cbrtfn, 1, arg0);
8205 }
8206 }
8207
8208 /* Check for an integer exponent. */
8209 n = real_to_integer (&c);
8210 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8211 if (real_identical (&c, &cint))
8212 {
8213 /* Attempt to evaluate pow at compile-time. */
8214 if (TREE_CODE (arg0) == REAL_CST
8215 && !TREE_OVERFLOW (arg0))
8216 {
8217 REAL_VALUE_TYPE x;
8218 bool inexact;
8219
8220 x = TREE_REAL_CST (arg0);
8221 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8222 if (flag_unsafe_math_optimizations || !inexact)
8223 return build_real (type, x);
8224 }
8225
8226 /* Strip sign ops from even integer powers. */
8227 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8228 {
8229 tree narg0 = fold_strip_sign_ops (arg0);
8230 if (narg0)
8231 return build_call_expr (fndecl, 2, narg0, arg1);
8232 }
8233 }
8234 }
8235
8236 if (flag_unsafe_math_optimizations)
8237 {
8238 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8239
8240 /* Optimize pow(expN(x),y) = expN(x*y). */
8241 if (BUILTIN_EXPONENT_P (fcode))
8242 {
8243 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8244 tree arg = CALL_EXPR_ARG (arg0, 0);
8245 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8246 return build_call_expr (expfn, 1, arg);
8247 }
8248
8249 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8250 if (BUILTIN_SQRT_P (fcode))
8251 {
8252 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8253 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8254 build_real (type, dconsthalf));
8255 return build_call_expr (fndecl, 2, narg0, narg1);
8256 }
8257
8258 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8259 if (BUILTIN_CBRT_P (fcode))
8260 {
8261 tree arg = CALL_EXPR_ARG (arg0, 0);
8262 if (tree_expr_nonnegative_p (arg))
8263 {
8264 const REAL_VALUE_TYPE dconstroot
8265 = real_value_truncate (TYPE_MODE (type), dconstthird);
8266 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8267 build_real (type, dconstroot));
8268 return build_call_expr (fndecl, 2, arg, narg1);
8269 }
8270 }
8271
8272 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8273 if (fcode == BUILT_IN_POW
8274 || fcode == BUILT_IN_POWF
8275 || fcode == BUILT_IN_POWL)
8276 {
8277 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8278 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8279 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8280 return build_call_expr (fndecl, 2, arg00, narg1);
8281 }
8282 }
8283
8284 return NULL_TREE;
8285 }
8286
8287 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8288 Return NULL_TREE if no simplification can be made. */
8289 static tree
8290 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8291 tree arg0, tree arg1, tree type)
8292 {
8293 if (!validate_arg (arg0, REAL_TYPE)
8294 || !validate_arg (arg1, INTEGER_TYPE))
8295 return NULL_TREE;
8296
8297 /* Optimize pow(1.0,y) = 1.0. */
8298 if (real_onep (arg0))
8299 return omit_one_operand (type, build_real (type, dconst1), arg1);
8300
8301 if (host_integerp (arg1, 0))
8302 {
8303 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8304
8305 /* Evaluate powi at compile-time. */
8306 if (TREE_CODE (arg0) == REAL_CST
8307 && !TREE_OVERFLOW (arg0))
8308 {
8309 REAL_VALUE_TYPE x;
8310 x = TREE_REAL_CST (arg0);
8311 real_powi (&x, TYPE_MODE (type), &x, c);
8312 return build_real (type, x);
8313 }
8314
8315 /* Optimize pow(x,0) = 1.0. */
8316 if (c == 0)
8317 return omit_one_operand (type, build_real (type, dconst1),
8318 arg0);
8319
8320 /* Optimize pow(x,1) = x. */
8321 if (c == 1)
8322 return arg0;
8323
8324 /* Optimize pow(x,-1) = 1.0/x. */
8325 if (c == -1)
8326 return fold_build2 (RDIV_EXPR, type,
8327 build_real (type, dconst1), arg0);
8328 }
8329
8330 return NULL_TREE;
8331 }
8332
8333 /* A subroutine of fold_builtin to fold the various exponent
8334 functions. Return NULL_TREE if no simplification can be made.
8335 FUNC is the corresponding MPFR exponent function. */
8336
8337 static tree
8338 fold_builtin_exponent (tree fndecl, tree arg,
8339 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8340 {
8341 if (validate_arg (arg, REAL_TYPE))
8342 {
8343 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8344 tree res;
8345
8346 /* Calculate the result when the argument is a constant. */
8347 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8348 return res;
8349
8350 /* Optimize expN(logN(x)) = x. */
8351 if (flag_unsafe_math_optimizations)
8352 {
8353 const enum built_in_function fcode = builtin_mathfn_code (arg);
8354
8355 if ((func == mpfr_exp
8356 && (fcode == BUILT_IN_LOG
8357 || fcode == BUILT_IN_LOGF
8358 || fcode == BUILT_IN_LOGL))
8359 || (func == mpfr_exp2
8360 && (fcode == BUILT_IN_LOG2
8361 || fcode == BUILT_IN_LOG2F
8362 || fcode == BUILT_IN_LOG2L))
8363 || (func == mpfr_exp10
8364 && (fcode == BUILT_IN_LOG10
8365 || fcode == BUILT_IN_LOG10F
8366 || fcode == BUILT_IN_LOG10L)))
8367 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8368 }
8369 }
8370
8371 return NULL_TREE;
8372 }
8373
8374 /* Return true if VAR is a VAR_DECL or a component thereof. */
8375
8376 static bool
8377 var_decl_component_p (tree var)
8378 {
8379 tree inner = var;
8380 while (handled_component_p (inner))
8381 inner = TREE_OPERAND (inner, 0);
8382 return SSA_VAR_P (inner);
8383 }
8384
8385 /* Fold function call to builtin memset. Return
8386 NULL_TREE if no simplification can be made. */
8387
8388 static tree
8389 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8390 {
8391 tree var, ret;
8392 unsigned HOST_WIDE_INT length, cval;
8393
8394 if (! validate_arg (dest, POINTER_TYPE)
8395 || ! validate_arg (c, INTEGER_TYPE)
8396 || ! validate_arg (len, INTEGER_TYPE))
8397 return NULL_TREE;
8398
8399 if (! host_integerp (len, 1))
8400 return NULL_TREE;
8401
8402 /* If the LEN parameter is zero, return DEST. */
8403 if (integer_zerop (len))
8404 return omit_one_operand (type, dest, c);
8405
8406 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8407 return NULL_TREE;
8408
8409 var = dest;
8410 STRIP_NOPS (var);
8411 if (TREE_CODE (var) != ADDR_EXPR)
8412 return NULL_TREE;
8413
8414 var = TREE_OPERAND (var, 0);
8415 if (TREE_THIS_VOLATILE (var))
8416 return NULL_TREE;
8417
8418 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8419 && !POINTER_TYPE_P (TREE_TYPE (var)))
8420 return NULL_TREE;
8421
8422 if (! var_decl_component_p (var))
8423 return NULL_TREE;
8424
8425 length = tree_low_cst (len, 1);
8426 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8427 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8428 < (int) length)
8429 return NULL_TREE;
8430
8431 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8432 return NULL_TREE;
8433
8434 if (integer_zerop (c))
8435 cval = 0;
8436 else
8437 {
8438 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8439 return NULL_TREE;
8440
8441 cval = tree_low_cst (c, 1);
8442 cval &= 0xff;
8443 cval |= cval << 8;
8444 cval |= cval << 16;
8445 cval |= (cval << 31) << 1;
8446 }
8447
8448 ret = build_int_cst_type (TREE_TYPE (var), cval);
8449 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8450 if (ignore)
8451 return ret;
8452
8453 return omit_one_operand (type, dest, ret);
8454 }
8455
8456 /* Fold function call to builtin memset. Return
8457 NULL_TREE if no simplification can be made. */
8458
8459 static tree
8460 fold_builtin_bzero (tree dest, tree size, bool ignore)
8461 {
8462 if (! validate_arg (dest, POINTER_TYPE)
8463 || ! validate_arg (size, INTEGER_TYPE))
8464 return NULL_TREE;
8465
8466 if (!ignore)
8467 return NULL_TREE;
8468
8469 /* New argument list transforming bzero(ptr x, int y) to
8470 memset(ptr x, int 0, size_t y). This is done this way
8471 so that if it isn't expanded inline, we fallback to
8472 calling bzero instead of memset. */
8473
8474 return fold_builtin_memset (dest, integer_zero_node,
8475 fold_convert (sizetype, size),
8476 void_type_node, ignore);
8477 }
8478
8479 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8480 NULL_TREE if no simplification can be made.
8481 If ENDP is 0, return DEST (like memcpy).
8482 If ENDP is 1, return DEST+LEN (like mempcpy).
8483 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8484 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8485 (memmove). */
8486
8487 static tree
8488 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8489 {
8490 tree destvar, srcvar, expr;
8491
8492 if (! validate_arg (dest, POINTER_TYPE)
8493 || ! validate_arg (src, POINTER_TYPE)
8494 || ! validate_arg (len, INTEGER_TYPE))
8495 return NULL_TREE;
8496
8497 /* If the LEN parameter is zero, return DEST. */
8498 if (integer_zerop (len))
8499 return omit_one_operand (type, dest, src);
8500
8501 /* If SRC and DEST are the same (and not volatile), return
8502 DEST{,+LEN,+LEN-1}. */
8503 if (operand_equal_p (src, dest, 0))
8504 expr = len;
8505 else
8506 {
8507 tree srctype, desttype;
8508 if (endp == 3)
8509 {
8510 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8511 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8512
8513 /* Both DEST and SRC must be pointer types.
8514 ??? This is what old code did. Is the testing for pointer types
8515 really mandatory?
8516
8517 If either SRC is readonly or length is 1, we can use memcpy. */
8518 if (dest_align && src_align
8519 && (readonly_data_expr (src)
8520 || (host_integerp (len, 1)
8521 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8522 tree_low_cst (len, 1)))))
8523 {
8524 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8525 if (!fn)
8526 return NULL_TREE;
8527 return build_call_expr (fn, 3, dest, src, len);
8528 }
8529 return NULL_TREE;
8530 }
8531
8532 if (!host_integerp (len, 0))
8533 return NULL_TREE;
8534 /* FIXME:
8535 This logic lose for arguments like (type *)malloc (sizeof (type)),
8536 since we strip the casts of up to VOID return value from malloc.
8537 Perhaps we ought to inherit type from non-VOID argument here? */
8538 STRIP_NOPS (src);
8539 STRIP_NOPS (dest);
8540 srctype = TREE_TYPE (TREE_TYPE (src));
8541 desttype = TREE_TYPE (TREE_TYPE (dest));
8542 if (!srctype || !desttype
8543 || !TYPE_SIZE_UNIT (srctype)
8544 || !TYPE_SIZE_UNIT (desttype)
8545 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8546 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8547 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8548 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8549 return NULL_TREE;
8550
8551 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8552 < (int) TYPE_ALIGN (desttype)
8553 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8554 < (int) TYPE_ALIGN (srctype)))
8555 return NULL_TREE;
8556
8557 if (!ignore)
8558 dest = builtin_save_expr (dest);
8559
8560 srcvar = build_fold_indirect_ref (src);
8561 if (TREE_THIS_VOLATILE (srcvar))
8562 return NULL_TREE;
8563 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8564 return NULL_TREE;
8565 /* With memcpy, it is possible to bypass aliasing rules, so without
8566 this check i. e. execute/20060930-2.c would be misoptimized, because
8567 it use conflicting alias set to hold argument for the memcpy call.
8568 This check is probably unnecesary with -fno-strict-aliasing.
8569 Similarly for destvar. See also PR29286. */
8570 if (!var_decl_component_p (srcvar)
8571 /* Accept: memcpy (*char_var, "test", 1); that simplify
8572 to char_var='t'; */
8573 || is_gimple_min_invariant (srcvar)
8574 || readonly_data_expr (src))
8575 return NULL_TREE;
8576
8577 destvar = build_fold_indirect_ref (dest);
8578 if (TREE_THIS_VOLATILE (destvar))
8579 return NULL_TREE;
8580 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8581 return NULL_TREE;
8582 if (!var_decl_component_p (destvar))
8583 return NULL_TREE;
8584
8585 if (srctype == desttype
8586 || (gimple_in_ssa_p (cfun)
8587 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8588 expr = srcvar;
8589 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8590 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8591 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8592 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8593 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8594 else
8595 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8596 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8597 }
8598
8599 if (ignore)
8600 return expr;
8601
8602 if (endp == 0 || endp == 3)
8603 return omit_one_operand (type, dest, expr);
8604
8605 if (expr == len)
8606 expr = NULL_TREE;
8607
8608 if (endp == 2)
8609 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8610 ssize_int (1));
8611
8612 len = fold_convert (TREE_TYPE (dest), len);
8613 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8614 dest = fold_convert (type, dest);
8615 if (expr)
8616 dest = omit_one_operand (type, dest, expr);
8617 return dest;
8618 }
8619
8620 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8621 If LEN is not NULL, it represents the length of the string to be
8622 copied. Return NULL_TREE if no simplification can be made. */
8623
8624 tree
8625 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8626 {
8627 tree fn;
8628
8629 if (!validate_arg (dest, POINTER_TYPE)
8630 || !validate_arg (src, POINTER_TYPE))
8631 return NULL_TREE;
8632
8633 /* If SRC and DEST are the same (and not volatile), return DEST. */
8634 if (operand_equal_p (src, dest, 0))
8635 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8636
8637 if (optimize_size)
8638 return NULL_TREE;
8639
8640 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8641 if (!fn)
8642 return NULL_TREE;
8643
8644 if (!len)
8645 {
8646 len = c_strlen (src, 1);
8647 if (! len || TREE_SIDE_EFFECTS (len))
8648 return NULL_TREE;
8649 }
8650
8651 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8652 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8653 build_call_expr (fn, 3, dest, src, len));
8654 }
8655
8656 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8657 If SLEN is not NULL, it represents the length of the source string.
8658 Return NULL_TREE if no simplification can be made. */
8659
8660 tree
8661 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8662 {
8663 tree fn;
8664
8665 if (!validate_arg (dest, POINTER_TYPE)
8666 || !validate_arg (src, POINTER_TYPE)
8667 || !validate_arg (len, INTEGER_TYPE))
8668 return NULL_TREE;
8669
8670 /* If the LEN parameter is zero, return DEST. */
8671 if (integer_zerop (len))
8672 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8673
8674 /* We can't compare slen with len as constants below if len is not a
8675 constant. */
8676 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8677 return NULL_TREE;
8678
8679 if (!slen)
8680 slen = c_strlen (src, 1);
8681
8682 /* Now, we must be passed a constant src ptr parameter. */
8683 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8684 return NULL_TREE;
8685
8686 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8687
8688 /* We do not support simplification of this case, though we do
8689 support it when expanding trees into RTL. */
8690 /* FIXME: generate a call to __builtin_memset. */
8691 if (tree_int_cst_lt (slen, len))
8692 return NULL_TREE;
8693
8694 /* OK transform into builtin memcpy. */
8695 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8696 if (!fn)
8697 return NULL_TREE;
8698 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8699 build_call_expr (fn, 3, dest, src, len));
8700 }
8701
8702 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8703 arguments to the call, and TYPE is its return type.
8704 Return NULL_TREE if no simplification can be made. */
8705
8706 static tree
8707 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8708 {
8709 if (!validate_arg (arg1, POINTER_TYPE)
8710 || !validate_arg (arg2, INTEGER_TYPE)
8711 || !validate_arg (len, INTEGER_TYPE))
8712 return NULL_TREE;
8713 else
8714 {
8715 const char *p1;
8716
8717 if (TREE_CODE (arg2) != INTEGER_CST
8718 || !host_integerp (len, 1))
8719 return NULL_TREE;
8720
8721 p1 = c_getstr (arg1);
8722 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8723 {
8724 char c;
8725 const char *r;
8726 tree tem;
8727
8728 if (target_char_cast (arg2, &c))
8729 return NULL_TREE;
8730
8731 r = memchr (p1, c, tree_low_cst (len, 1));
8732
8733 if (r == NULL)
8734 return build_int_cst (TREE_TYPE (arg1), 0);
8735
8736 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (arg1), arg1,
8737 build_int_cst (TREE_TYPE (arg1), r - p1));
8738 return fold_convert (type, tem);
8739 }
8740 return NULL_TREE;
8741 }
8742 }
8743
8744 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8745 Return NULL_TREE if no simplification can be made. */
8746
8747 static tree
8748 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8749 {
8750 const char *p1, *p2;
8751
8752 if (!validate_arg (arg1, POINTER_TYPE)
8753 || !validate_arg (arg2, POINTER_TYPE)
8754 || !validate_arg (len, INTEGER_TYPE))
8755 return NULL_TREE;
8756
8757 /* If the LEN parameter is zero, return zero. */
8758 if (integer_zerop (len))
8759 return omit_two_operands (integer_type_node, integer_zero_node,
8760 arg1, arg2);
8761
8762 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8763 if (operand_equal_p (arg1, arg2, 0))
8764 return omit_one_operand (integer_type_node, integer_zero_node, len);
8765
8766 p1 = c_getstr (arg1);
8767 p2 = c_getstr (arg2);
8768
8769 /* If all arguments are constant, and the value of len is not greater
8770 than the lengths of arg1 and arg2, evaluate at compile-time. */
8771 if (host_integerp (len, 1) && p1 && p2
8772 && compare_tree_int (len, strlen (p1) + 1) <= 0
8773 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8774 {
8775 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8776
8777 if (r > 0)
8778 return integer_one_node;
8779 else if (r < 0)
8780 return integer_minus_one_node;
8781 else
8782 return integer_zero_node;
8783 }
8784
8785 /* If len parameter is one, return an expression corresponding to
8786 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8787 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8788 {
8789 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8790 tree cst_uchar_ptr_node
8791 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8792
8793 tree ind1 = fold_convert (integer_type_node,
8794 build1 (INDIRECT_REF, cst_uchar_node,
8795 fold_convert (cst_uchar_ptr_node,
8796 arg1)));
8797 tree ind2 = fold_convert (integer_type_node,
8798 build1 (INDIRECT_REF, cst_uchar_node,
8799 fold_convert (cst_uchar_ptr_node,
8800 arg2)));
8801 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8802 }
8803
8804 return NULL_TREE;
8805 }
8806
8807 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8808 Return NULL_TREE if no simplification can be made. */
8809
8810 static tree
8811 fold_builtin_strcmp (tree arg1, tree arg2)
8812 {
8813 const char *p1, *p2;
8814
8815 if (!validate_arg (arg1, POINTER_TYPE)
8816 || !validate_arg (arg2, POINTER_TYPE))
8817 return NULL_TREE;
8818
8819 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8820 if (operand_equal_p (arg1, arg2, 0))
8821 return integer_zero_node;
8822
8823 p1 = c_getstr (arg1);
8824 p2 = c_getstr (arg2);
8825
8826 if (p1 && p2)
8827 {
8828 const int i = strcmp (p1, p2);
8829 if (i < 0)
8830 return integer_minus_one_node;
8831 else if (i > 0)
8832 return integer_one_node;
8833 else
8834 return integer_zero_node;
8835 }
8836
8837 /* If the second arg is "", return *(const unsigned char*)arg1. */
8838 if (p2 && *p2 == '\0')
8839 {
8840 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8841 tree cst_uchar_ptr_node
8842 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8843
8844 return fold_convert (integer_type_node,
8845 build1 (INDIRECT_REF, cst_uchar_node,
8846 fold_convert (cst_uchar_ptr_node,
8847 arg1)));
8848 }
8849
8850 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8851 if (p1 && *p1 == '\0')
8852 {
8853 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8854 tree cst_uchar_ptr_node
8855 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8856
8857 tree temp = fold_convert (integer_type_node,
8858 build1 (INDIRECT_REF, cst_uchar_node,
8859 fold_convert (cst_uchar_ptr_node,
8860 arg2)));
8861 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8862 }
8863
8864 return NULL_TREE;
8865 }
8866
8867 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8868 Return NULL_TREE if no simplification can be made. */
8869
8870 static tree
8871 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8872 {
8873 const char *p1, *p2;
8874
8875 if (!validate_arg (arg1, POINTER_TYPE)
8876 || !validate_arg (arg2, POINTER_TYPE)
8877 || !validate_arg (len, INTEGER_TYPE))
8878 return NULL_TREE;
8879
8880 /* If the LEN parameter is zero, return zero. */
8881 if (integer_zerop (len))
8882 return omit_two_operands (integer_type_node, integer_zero_node,
8883 arg1, arg2);
8884
8885 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8886 if (operand_equal_p (arg1, arg2, 0))
8887 return omit_one_operand (integer_type_node, integer_zero_node, len);
8888
8889 p1 = c_getstr (arg1);
8890 p2 = c_getstr (arg2);
8891
8892 if (host_integerp (len, 1) && p1 && p2)
8893 {
8894 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8895 if (i > 0)
8896 return integer_one_node;
8897 else if (i < 0)
8898 return integer_minus_one_node;
8899 else
8900 return integer_zero_node;
8901 }
8902
8903 /* If the second arg is "", and the length is greater than zero,
8904 return *(const unsigned char*)arg1. */
8905 if (p2 && *p2 == '\0'
8906 && TREE_CODE (len) == INTEGER_CST
8907 && tree_int_cst_sgn (len) == 1)
8908 {
8909 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8910 tree cst_uchar_ptr_node
8911 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8912
8913 return fold_convert (integer_type_node,
8914 build1 (INDIRECT_REF, cst_uchar_node,
8915 fold_convert (cst_uchar_ptr_node,
8916 arg1)));
8917 }
8918
8919 /* If the first arg is "", and the length is greater than zero,
8920 return -*(const unsigned char*)arg2. */
8921 if (p1 && *p1 == '\0'
8922 && TREE_CODE (len) == INTEGER_CST
8923 && tree_int_cst_sgn (len) == 1)
8924 {
8925 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8926 tree cst_uchar_ptr_node
8927 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8928
8929 tree temp = fold_convert (integer_type_node,
8930 build1 (INDIRECT_REF, cst_uchar_node,
8931 fold_convert (cst_uchar_ptr_node,
8932 arg2)));
8933 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8934 }
8935
8936 /* If len parameter is one, return an expression corresponding to
8937 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8938 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8939 {
8940 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8941 tree cst_uchar_ptr_node
8942 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8943
8944 tree ind1 = fold_convert (integer_type_node,
8945 build1 (INDIRECT_REF, cst_uchar_node,
8946 fold_convert (cst_uchar_ptr_node,
8947 arg1)));
8948 tree ind2 = fold_convert (integer_type_node,
8949 build1 (INDIRECT_REF, cst_uchar_node,
8950 fold_convert (cst_uchar_ptr_node,
8951 arg2)));
8952 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8953 }
8954
8955 return NULL_TREE;
8956 }
8957
8958 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8959 ARG. Return NULL_TREE if no simplification can be made. */
8960
8961 static tree
8962 fold_builtin_signbit (tree arg, tree type)
8963 {
8964 tree temp;
8965
8966 if (!validate_arg (arg, REAL_TYPE))
8967 return NULL_TREE;
8968
8969 /* If ARG is a compile-time constant, determine the result. */
8970 if (TREE_CODE (arg) == REAL_CST
8971 && !TREE_OVERFLOW (arg))
8972 {
8973 REAL_VALUE_TYPE c;
8974
8975 c = TREE_REAL_CST (arg);
8976 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8977 return fold_convert (type, temp);
8978 }
8979
8980 /* If ARG is non-negative, the result is always zero. */
8981 if (tree_expr_nonnegative_p (arg))
8982 return omit_one_operand (type, integer_zero_node, arg);
8983
8984 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8985 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8986 return fold_build2 (LT_EXPR, type, arg,
8987 build_real (TREE_TYPE (arg), dconst0));
8988
8989 return NULL_TREE;
8990 }
8991
8992 /* Fold function call to builtin copysign, copysignf or copysignl with
8993 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8994 be made. */
8995
8996 static tree
8997 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8998 {
8999 tree tem;
9000
9001 if (!validate_arg (arg1, REAL_TYPE)
9002 || !validate_arg (arg2, REAL_TYPE))
9003 return NULL_TREE;
9004
9005 /* copysign(X,X) is X. */
9006 if (operand_equal_p (arg1, arg2, 0))
9007 return fold_convert (type, arg1);
9008
9009 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9010 if (TREE_CODE (arg1) == REAL_CST
9011 && TREE_CODE (arg2) == REAL_CST
9012 && !TREE_OVERFLOW (arg1)
9013 && !TREE_OVERFLOW (arg2))
9014 {
9015 REAL_VALUE_TYPE c1, c2;
9016
9017 c1 = TREE_REAL_CST (arg1);
9018 c2 = TREE_REAL_CST (arg2);
9019 /* c1.sign := c2.sign. */
9020 real_copysign (&c1, &c2);
9021 return build_real (type, c1);
9022 }
9023
9024 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9025 Remember to evaluate Y for side-effects. */
9026 if (tree_expr_nonnegative_p (arg2))
9027 return omit_one_operand (type,
9028 fold_build1 (ABS_EXPR, type, arg1),
9029 arg2);
9030
9031 /* Strip sign changing operations for the first argument. */
9032 tem = fold_strip_sign_ops (arg1);
9033 if (tem)
9034 return build_call_expr (fndecl, 2, tem, arg2);
9035
9036 return NULL_TREE;
9037 }
9038
9039 /* Fold a call to builtin isascii with argument ARG. */
9040
9041 static tree
9042 fold_builtin_isascii (tree arg)
9043 {
9044 if (!validate_arg (arg, INTEGER_TYPE))
9045 return NULL_TREE;
9046 else
9047 {
9048 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9049 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9050 build_int_cst (NULL_TREE,
9051 ~ (unsigned HOST_WIDE_INT) 0x7f));
9052 return fold_build2 (EQ_EXPR, integer_type_node,
9053 arg, integer_zero_node);
9054 }
9055 }
9056
9057 /* Fold a call to builtin toascii with argument ARG. */
9058
9059 static tree
9060 fold_builtin_toascii (tree arg)
9061 {
9062 if (!validate_arg (arg, INTEGER_TYPE))
9063 return NULL_TREE;
9064
9065 /* Transform toascii(c) -> (c & 0x7f). */
9066 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9067 build_int_cst (NULL_TREE, 0x7f));
9068 }
9069
9070 /* Fold a call to builtin isdigit with argument ARG. */
9071
9072 static tree
9073 fold_builtin_isdigit (tree arg)
9074 {
9075 if (!validate_arg (arg, INTEGER_TYPE))
9076 return NULL_TREE;
9077 else
9078 {
9079 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9080 /* According to the C standard, isdigit is unaffected by locale.
9081 However, it definitely is affected by the target character set. */
9082 unsigned HOST_WIDE_INT target_digit0
9083 = lang_hooks.to_target_charset ('0');
9084
9085 if (target_digit0 == 0)
9086 return NULL_TREE;
9087
9088 arg = fold_convert (unsigned_type_node, arg);
9089 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9090 build_int_cst (unsigned_type_node, target_digit0));
9091 return fold_build2 (LE_EXPR, integer_type_node, arg,
9092 build_int_cst (unsigned_type_node, 9));
9093 }
9094 }
9095
9096 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9097
9098 static tree
9099 fold_builtin_fabs (tree arg, tree type)
9100 {
9101 if (!validate_arg (arg, REAL_TYPE))
9102 return NULL_TREE;
9103
9104 arg = fold_convert (type, arg);
9105 if (TREE_CODE (arg) == REAL_CST)
9106 return fold_abs_const (arg, type);
9107 return fold_build1 (ABS_EXPR, type, arg);
9108 }
9109
9110 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9111
9112 static tree
9113 fold_builtin_abs (tree arg, tree type)
9114 {
9115 if (!validate_arg (arg, INTEGER_TYPE))
9116 return NULL_TREE;
9117
9118 arg = fold_convert (type, arg);
9119 if (TREE_CODE (arg) == INTEGER_CST)
9120 return fold_abs_const (arg, type);
9121 return fold_build1 (ABS_EXPR, type, arg);
9122 }
9123
9124 /* Fold a call to builtin fmin or fmax. */
9125
9126 static tree
9127 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9128 {
9129 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9130 {
9131 /* Calculate the result when the argument is a constant. */
9132 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9133
9134 if (res)
9135 return res;
9136
9137 /* If either argument is NaN, return the other one. Avoid the
9138 transformation if we get (and honor) a signalling NaN. Using
9139 omit_one_operand() ensures we create a non-lvalue. */
9140 if (TREE_CODE (arg0) == REAL_CST
9141 && real_isnan (&TREE_REAL_CST (arg0))
9142 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9143 || ! TREE_REAL_CST (arg0).signalling))
9144 return omit_one_operand (type, arg1, arg0);
9145 if (TREE_CODE (arg1) == REAL_CST
9146 && real_isnan (&TREE_REAL_CST (arg1))
9147 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9148 || ! TREE_REAL_CST (arg1).signalling))
9149 return omit_one_operand (type, arg0, arg1);
9150
9151 /* Transform fmin/fmax(x,x) -> x. */
9152 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9153 return omit_one_operand (type, arg0, arg1);
9154
9155 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9156 functions to return the numeric arg if the other one is NaN.
9157 These tree codes don't honor that, so only transform if
9158 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9159 handled, so we don't have to worry about it either. */
9160 if (flag_finite_math_only)
9161 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9162 fold_convert (type, arg0),
9163 fold_convert (type, arg1));
9164 }
9165 return NULL_TREE;
9166 }
9167
9168 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9169
9170 static tree
9171 fold_builtin_carg (tree arg, tree type)
9172 {
9173 if (validate_arg (arg, COMPLEX_TYPE))
9174 {
9175 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9176
9177 if (atan2_fn)
9178 {
9179 tree new_arg = builtin_save_expr (arg);
9180 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9181 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9182 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9183 }
9184 }
9185
9186 return NULL_TREE;
9187 }
9188
9189 /* Fold a call to builtin logb/ilogb. */
9190
9191 static tree
9192 fold_builtin_logb (tree arg, tree rettype)
9193 {
9194 if (! validate_arg (arg, REAL_TYPE))
9195 return NULL_TREE;
9196
9197 STRIP_NOPS (arg);
9198
9199 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9200 {
9201 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9202
9203 switch (value->cl)
9204 {
9205 case rvc_nan:
9206 case rvc_inf:
9207 /* If arg is Inf or NaN and we're logb, return it. */
9208 if (TREE_CODE (rettype) == REAL_TYPE)
9209 return fold_convert (rettype, arg);
9210 /* Fall through... */
9211 case rvc_zero:
9212 /* Zero may set errno and/or raise an exception for logb, also
9213 for ilogb we don't know FP_ILOGB0. */
9214 return NULL_TREE;
9215 case rvc_normal:
9216 /* For normal numbers, proceed iff radix == 2. In GCC,
9217 normalized significands are in the range [0.5, 1.0). We
9218 want the exponent as if they were [1.0, 2.0) so get the
9219 exponent and subtract 1. */
9220 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9221 return fold_convert (rettype, build_int_cst (NULL_TREE,
9222 REAL_EXP (value)-1));
9223 break;
9224 }
9225 }
9226
9227 return NULL_TREE;
9228 }
9229
9230 /* Fold a call to builtin significand, if radix == 2. */
9231
9232 static tree
9233 fold_builtin_significand (tree arg, tree rettype)
9234 {
9235 if (! validate_arg (arg, REAL_TYPE))
9236 return NULL_TREE;
9237
9238 STRIP_NOPS (arg);
9239
9240 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9241 {
9242 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9243
9244 switch (value->cl)
9245 {
9246 case rvc_zero:
9247 case rvc_nan:
9248 case rvc_inf:
9249 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9250 return fold_convert (rettype, arg);
9251 case rvc_normal:
9252 /* For normal numbers, proceed iff radix == 2. */
9253 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9254 {
9255 REAL_VALUE_TYPE result = *value;
9256 /* In GCC, normalized significands are in the range [0.5,
9257 1.0). We want them to be [1.0, 2.0) so set the
9258 exponent to 1. */
9259 SET_REAL_EXP (&result, 1);
9260 return build_real (rettype, result);
9261 }
9262 break;
9263 }
9264 }
9265
9266 return NULL_TREE;
9267 }
9268
9269 /* Fold a call to builtin frexp, we can assume the base is 2. */
9270
9271 static tree
9272 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9273 {
9274 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9275 return NULL_TREE;
9276
9277 STRIP_NOPS (arg0);
9278
9279 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9280 return NULL_TREE;
9281
9282 arg1 = build_fold_indirect_ref (arg1);
9283
9284 /* Proceed if a valid pointer type was passed in. */
9285 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9286 {
9287 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9288 tree frac, exp;
9289
9290 switch (value->cl)
9291 {
9292 case rvc_zero:
9293 /* For +-0, return (*exp = 0, +-0). */
9294 exp = integer_zero_node;
9295 frac = arg0;
9296 break;
9297 case rvc_nan:
9298 case rvc_inf:
9299 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9300 return omit_one_operand (rettype, arg0, arg1);
9301 case rvc_normal:
9302 {
9303 /* Since the frexp function always expects base 2, and in
9304 GCC normalized significands are already in the range
9305 [0.5, 1.0), we have exactly what frexp wants. */
9306 REAL_VALUE_TYPE frac_rvt = *value;
9307 SET_REAL_EXP (&frac_rvt, 0);
9308 frac = build_real (rettype, frac_rvt);
9309 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9310 }
9311 break;
9312 default:
9313 gcc_unreachable ();
9314 }
9315
9316 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9317 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9318 TREE_SIDE_EFFECTS (arg1) = 1;
9319 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9320 }
9321
9322 return NULL_TREE;
9323 }
9324
9325 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9326 then we can assume the base is two. If it's false, then we have to
9327 check the mode of the TYPE parameter in certain cases. */
9328
9329 static tree
9330 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9331 {
9332 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9333 {
9334 STRIP_NOPS (arg0);
9335 STRIP_NOPS (arg1);
9336
9337 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9338 if (real_zerop (arg0) || integer_zerop (arg1)
9339 || (TREE_CODE (arg0) == REAL_CST
9340 && (real_isnan (&TREE_REAL_CST (arg0))
9341 || real_isinf (&TREE_REAL_CST (arg0)))))
9342 return omit_one_operand (type, arg0, arg1);
9343
9344 /* If both arguments are constant, then try to evaluate it. */
9345 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9346 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9347 && host_integerp (arg1, 0))
9348 {
9349 /* Bound the maximum adjustment to twice the range of the
9350 mode's valid exponents. Use abs to ensure the range is
9351 positive as a sanity check. */
9352 const long max_exp_adj = 2 *
9353 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9354 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9355
9356 /* Get the user-requested adjustment. */
9357 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9358
9359 /* The requested adjustment must be inside this range. This
9360 is a preliminary cap to avoid things like overflow, we
9361 may still fail to compute the result for other reasons. */
9362 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9363 {
9364 REAL_VALUE_TYPE initial_result;
9365
9366 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9367
9368 /* Ensure we didn't overflow. */
9369 if (! real_isinf (&initial_result))
9370 {
9371 const REAL_VALUE_TYPE trunc_result
9372 = real_value_truncate (TYPE_MODE (type), initial_result);
9373
9374 /* Only proceed if the target mode can hold the
9375 resulting value. */
9376 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9377 return build_real (type, trunc_result);
9378 }
9379 }
9380 }
9381 }
9382
9383 return NULL_TREE;
9384 }
9385
9386 /* Fold a call to builtin modf. */
9387
9388 static tree
9389 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9390 {
9391 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9392 return NULL_TREE;
9393
9394 STRIP_NOPS (arg0);
9395
9396 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9397 return NULL_TREE;
9398
9399 arg1 = build_fold_indirect_ref (arg1);
9400
9401 /* Proceed if a valid pointer type was passed in. */
9402 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9403 {
9404 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9405 REAL_VALUE_TYPE trunc, frac;
9406
9407 switch (value->cl)
9408 {
9409 case rvc_nan:
9410 case rvc_zero:
9411 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9412 trunc = frac = *value;
9413 break;
9414 case rvc_inf:
9415 /* For +-Inf, return (*arg1 = arg0, +-0). */
9416 frac = dconst0;
9417 frac.sign = value->sign;
9418 trunc = *value;
9419 break;
9420 case rvc_normal:
9421 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9422 real_trunc (&trunc, VOIDmode, value);
9423 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9424 /* If the original number was negative and already
9425 integral, then the fractional part is -0.0. */
9426 if (value->sign && frac.cl == rvc_zero)
9427 frac.sign = value->sign;
9428 break;
9429 }
9430
9431 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9432 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9433 build_real (rettype, trunc));
9434 TREE_SIDE_EFFECTS (arg1) = 1;
9435 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9436 build_real (rettype, frac));
9437 }
9438
9439 return NULL_TREE;
9440 }
9441
9442 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9443 ARG is the argument for the call. */
9444
9445 static tree
9446 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9447 {
9448 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9449 REAL_VALUE_TYPE r;
9450
9451 if (!validate_arg (arg, REAL_TYPE))
9452 {
9453 error ("non-floating-point argument to function %qs",
9454 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9455 return error_mark_node;
9456 }
9457
9458 switch (builtin_index)
9459 {
9460 case BUILT_IN_ISINF:
9461 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9462 return omit_one_operand (type, integer_zero_node, arg);
9463
9464 if (TREE_CODE (arg) == REAL_CST)
9465 {
9466 r = TREE_REAL_CST (arg);
9467 if (real_isinf (&r))
9468 return real_compare (GT_EXPR, &r, &dconst0)
9469 ? integer_one_node : integer_minus_one_node;
9470 else
9471 return integer_zero_node;
9472 }
9473
9474 return NULL_TREE;
9475
9476 case BUILT_IN_FINITE:
9477 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9478 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9479 return omit_one_operand (type, integer_one_node, arg);
9480
9481 if (TREE_CODE (arg) == REAL_CST)
9482 {
9483 r = TREE_REAL_CST (arg);
9484 return real_isinf (&r) || real_isnan (&r)
9485 ? integer_zero_node : integer_one_node;
9486 }
9487
9488 return NULL_TREE;
9489
9490 case BUILT_IN_ISNAN:
9491 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9492 return omit_one_operand (type, integer_zero_node, arg);
9493
9494 if (TREE_CODE (arg) == REAL_CST)
9495 {
9496 r = TREE_REAL_CST (arg);
9497 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9498 }
9499
9500 arg = builtin_save_expr (arg);
9501 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9502
9503 default:
9504 gcc_unreachable ();
9505 }
9506 }
9507
9508 /* Fold a call to an unordered comparison function such as
9509 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9510 being called and ARG0 and ARG1 are the arguments for the call.
9511 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9512 the opposite of the desired result. UNORDERED_CODE is used
9513 for modes that can hold NaNs and ORDERED_CODE is used for
9514 the rest. */
9515
9516 static tree
9517 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9518 enum tree_code unordered_code,
9519 enum tree_code ordered_code)
9520 {
9521 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9522 enum tree_code code;
9523 tree type0, type1;
9524 enum tree_code code0, code1;
9525 tree cmp_type = NULL_TREE;
9526
9527 type0 = TREE_TYPE (arg0);
9528 type1 = TREE_TYPE (arg1);
9529
9530 code0 = TREE_CODE (type0);
9531 code1 = TREE_CODE (type1);
9532
9533 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9534 /* Choose the wider of two real types. */
9535 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9536 ? type0 : type1;
9537 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9538 cmp_type = type0;
9539 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9540 cmp_type = type1;
9541 else
9542 {
9543 error ("non-floating-point argument to function %qs",
9544 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9545 return error_mark_node;
9546 }
9547
9548 arg0 = fold_convert (cmp_type, arg0);
9549 arg1 = fold_convert (cmp_type, arg1);
9550
9551 if (unordered_code == UNORDERED_EXPR)
9552 {
9553 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9554 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9555 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9556 }
9557
9558 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9559 : ordered_code;
9560 return fold_build1 (TRUTH_NOT_EXPR, type,
9561 fold_build2 (code, type, arg0, arg1));
9562 }
9563
9564 /* Fold a call to built-in function FNDECL with 0 arguments.
9565 IGNORE is true if the result of the function call is ignored. This
9566 function returns NULL_TREE if no simplification was possible. */
9567
9568 static tree
9569 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9570 {
9571 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9572 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9573 switch (fcode)
9574 {
9575 CASE_FLT_FN (BUILT_IN_INF):
9576 case BUILT_IN_INFD32:
9577 case BUILT_IN_INFD64:
9578 case BUILT_IN_INFD128:
9579 return fold_builtin_inf (type, true);
9580
9581 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9582 return fold_builtin_inf (type, false);
9583
9584 case BUILT_IN_CLASSIFY_TYPE:
9585 return fold_builtin_classify_type (NULL_TREE);
9586
9587 default:
9588 break;
9589 }
9590 return NULL_TREE;
9591 }
9592
9593 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9594 IGNORE is true if the result of the function call is ignored. This
9595 function returns NULL_TREE if no simplification was possible. */
9596
9597 static tree
9598 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9599 {
9600 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9601 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9602 switch (fcode)
9603 {
9604
9605 case BUILT_IN_CONSTANT_P:
9606 {
9607 tree val = fold_builtin_constant_p (arg0);
9608
9609 /* Gimplification will pull the CALL_EXPR for the builtin out of
9610 an if condition. When not optimizing, we'll not CSE it back.
9611 To avoid link error types of regressions, return false now. */
9612 if (!val && !optimize)
9613 val = integer_zero_node;
9614
9615 return val;
9616 }
9617
9618 case BUILT_IN_CLASSIFY_TYPE:
9619 return fold_builtin_classify_type (arg0);
9620
9621 case BUILT_IN_STRLEN:
9622 return fold_builtin_strlen (arg0);
9623
9624 CASE_FLT_FN (BUILT_IN_FABS):
9625 return fold_builtin_fabs (arg0, type);
9626
9627 case BUILT_IN_ABS:
9628 case BUILT_IN_LABS:
9629 case BUILT_IN_LLABS:
9630 case BUILT_IN_IMAXABS:
9631 return fold_builtin_abs (arg0, type);
9632
9633 CASE_FLT_FN (BUILT_IN_CONJ):
9634 if (validate_arg (arg0, COMPLEX_TYPE))
9635 return fold_build1 (CONJ_EXPR, type, arg0);
9636 break;
9637
9638 CASE_FLT_FN (BUILT_IN_CREAL):
9639 if (validate_arg (arg0, COMPLEX_TYPE))
9640 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9641 break;
9642
9643 CASE_FLT_FN (BUILT_IN_CIMAG):
9644 if (validate_arg (arg0, COMPLEX_TYPE))
9645 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9646 break;
9647
9648 CASE_FLT_FN (BUILT_IN_CCOS):
9649 CASE_FLT_FN (BUILT_IN_CCOSH):
9650 /* These functions are "even", i.e. f(x) == f(-x). */
9651 if (validate_arg (arg0, COMPLEX_TYPE))
9652 {
9653 tree narg = fold_strip_sign_ops (arg0);
9654 if (narg)
9655 return build_call_expr (fndecl, 1, narg);
9656 }
9657 break;
9658
9659 CASE_FLT_FN (BUILT_IN_CABS):
9660 return fold_builtin_cabs (arg0, type, fndecl);
9661
9662 CASE_FLT_FN (BUILT_IN_CARG):
9663 return fold_builtin_carg (arg0, type);
9664
9665 CASE_FLT_FN (BUILT_IN_SQRT):
9666 return fold_builtin_sqrt (arg0, type);
9667
9668 CASE_FLT_FN (BUILT_IN_CBRT):
9669 return fold_builtin_cbrt (arg0, type);
9670
9671 CASE_FLT_FN (BUILT_IN_ASIN):
9672 if (validate_arg (arg0, REAL_TYPE))
9673 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9674 &dconstm1, &dconst1, true);
9675 break;
9676
9677 CASE_FLT_FN (BUILT_IN_ACOS):
9678 if (validate_arg (arg0, REAL_TYPE))
9679 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9680 &dconstm1, &dconst1, true);
9681 break;
9682
9683 CASE_FLT_FN (BUILT_IN_ATAN):
9684 if (validate_arg (arg0, REAL_TYPE))
9685 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9686 break;
9687
9688 CASE_FLT_FN (BUILT_IN_ASINH):
9689 if (validate_arg (arg0, REAL_TYPE))
9690 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9691 break;
9692
9693 CASE_FLT_FN (BUILT_IN_ACOSH):
9694 if (validate_arg (arg0, REAL_TYPE))
9695 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9696 &dconst1, NULL, true);
9697 break;
9698
9699 CASE_FLT_FN (BUILT_IN_ATANH):
9700 if (validate_arg (arg0, REAL_TYPE))
9701 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9702 &dconstm1, &dconst1, false);
9703 break;
9704
9705 CASE_FLT_FN (BUILT_IN_SIN):
9706 if (validate_arg (arg0, REAL_TYPE))
9707 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9708 break;
9709
9710 CASE_FLT_FN (BUILT_IN_COS):
9711 return fold_builtin_cos (arg0, type, fndecl);
9712 break;
9713
9714 CASE_FLT_FN (BUILT_IN_TAN):
9715 return fold_builtin_tan (arg0, type);
9716
9717 CASE_FLT_FN (BUILT_IN_CEXP):
9718 return fold_builtin_cexp (arg0, type);
9719
9720 CASE_FLT_FN (BUILT_IN_CEXPI):
9721 if (validate_arg (arg0, REAL_TYPE))
9722 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9723 break;
9724
9725 CASE_FLT_FN (BUILT_IN_SINH):
9726 if (validate_arg (arg0, REAL_TYPE))
9727 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9728 break;
9729
9730 CASE_FLT_FN (BUILT_IN_COSH):
9731 return fold_builtin_cosh (arg0, type, fndecl);
9732
9733 CASE_FLT_FN (BUILT_IN_TANH):
9734 if (validate_arg (arg0, REAL_TYPE))
9735 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9736 break;
9737
9738 CASE_FLT_FN (BUILT_IN_ERF):
9739 if (validate_arg (arg0, REAL_TYPE))
9740 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9741 break;
9742
9743 CASE_FLT_FN (BUILT_IN_ERFC):
9744 if (validate_arg (arg0, REAL_TYPE))
9745 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9746 break;
9747
9748 CASE_FLT_FN (BUILT_IN_TGAMMA):
9749 if (validate_arg (arg0, REAL_TYPE))
9750 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9751 break;
9752
9753 CASE_FLT_FN (BUILT_IN_EXP):
9754 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9755
9756 CASE_FLT_FN (BUILT_IN_EXP2):
9757 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9758
9759 CASE_FLT_FN (BUILT_IN_EXP10):
9760 CASE_FLT_FN (BUILT_IN_POW10):
9761 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9762
9763 CASE_FLT_FN (BUILT_IN_EXPM1):
9764 if (validate_arg (arg0, REAL_TYPE))
9765 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9766 break;
9767
9768 CASE_FLT_FN (BUILT_IN_LOG):
9769 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9770
9771 CASE_FLT_FN (BUILT_IN_LOG2):
9772 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9773
9774 CASE_FLT_FN (BUILT_IN_LOG10):
9775 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9776
9777 CASE_FLT_FN (BUILT_IN_LOG1P):
9778 if (validate_arg (arg0, REAL_TYPE))
9779 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9780 &dconstm1, NULL, false);
9781 break;
9782
9783 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9784 CASE_FLT_FN (BUILT_IN_J0):
9785 if (validate_arg (arg0, REAL_TYPE))
9786 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9787 NULL, NULL, 0);
9788 break;
9789
9790 CASE_FLT_FN (BUILT_IN_J1):
9791 if (validate_arg (arg0, REAL_TYPE))
9792 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9793 NULL, NULL, 0);
9794 break;
9795
9796 CASE_FLT_FN (BUILT_IN_Y0):
9797 if (validate_arg (arg0, REAL_TYPE))
9798 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9799 &dconst0, NULL, false);
9800 break;
9801
9802 CASE_FLT_FN (BUILT_IN_Y1):
9803 if (validate_arg (arg0, REAL_TYPE))
9804 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9805 &dconst0, NULL, false);
9806 break;
9807 #endif
9808
9809 CASE_FLT_FN (BUILT_IN_NAN):
9810 case BUILT_IN_NAND32:
9811 case BUILT_IN_NAND64:
9812 case BUILT_IN_NAND128:
9813 return fold_builtin_nan (arg0, type, true);
9814
9815 CASE_FLT_FN (BUILT_IN_NANS):
9816 return fold_builtin_nan (arg0, type, false);
9817
9818 CASE_FLT_FN (BUILT_IN_FLOOR):
9819 return fold_builtin_floor (fndecl, arg0);
9820
9821 CASE_FLT_FN (BUILT_IN_CEIL):
9822 return fold_builtin_ceil (fndecl, arg0);
9823
9824 CASE_FLT_FN (BUILT_IN_TRUNC):
9825 return fold_builtin_trunc (fndecl, arg0);
9826
9827 CASE_FLT_FN (BUILT_IN_ROUND):
9828 return fold_builtin_round (fndecl, arg0);
9829
9830 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9831 CASE_FLT_FN (BUILT_IN_RINT):
9832 return fold_trunc_transparent_mathfn (fndecl, arg0);
9833
9834 CASE_FLT_FN (BUILT_IN_LCEIL):
9835 CASE_FLT_FN (BUILT_IN_LLCEIL):
9836 CASE_FLT_FN (BUILT_IN_LFLOOR):
9837 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9838 CASE_FLT_FN (BUILT_IN_LROUND):
9839 CASE_FLT_FN (BUILT_IN_LLROUND):
9840 return fold_builtin_int_roundingfn (fndecl, arg0);
9841
9842 CASE_FLT_FN (BUILT_IN_LRINT):
9843 CASE_FLT_FN (BUILT_IN_LLRINT):
9844 return fold_fixed_mathfn (fndecl, arg0);
9845
9846 case BUILT_IN_BSWAP32:
9847 case BUILT_IN_BSWAP64:
9848 return fold_builtin_bswap (fndecl, arg0);
9849
9850 CASE_INT_FN (BUILT_IN_FFS):
9851 CASE_INT_FN (BUILT_IN_CLZ):
9852 CASE_INT_FN (BUILT_IN_CTZ):
9853 CASE_INT_FN (BUILT_IN_POPCOUNT):
9854 CASE_INT_FN (BUILT_IN_PARITY):
9855 return fold_builtin_bitop (fndecl, arg0);
9856
9857 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9858 return fold_builtin_signbit (arg0, type);
9859
9860 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9861 return fold_builtin_significand (arg0, type);
9862
9863 CASE_FLT_FN (BUILT_IN_ILOGB):
9864 CASE_FLT_FN (BUILT_IN_LOGB):
9865 return fold_builtin_logb (arg0, type);
9866
9867 case BUILT_IN_ISASCII:
9868 return fold_builtin_isascii (arg0);
9869
9870 case BUILT_IN_TOASCII:
9871 return fold_builtin_toascii (arg0);
9872
9873 case BUILT_IN_ISDIGIT:
9874 return fold_builtin_isdigit (arg0);
9875
9876 CASE_FLT_FN (BUILT_IN_FINITE):
9877 case BUILT_IN_FINITED32:
9878 case BUILT_IN_FINITED64:
9879 case BUILT_IN_FINITED128:
9880 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9881
9882 CASE_FLT_FN (BUILT_IN_ISINF):
9883 case BUILT_IN_ISINFD32:
9884 case BUILT_IN_ISINFD64:
9885 case BUILT_IN_ISINFD128:
9886 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9887
9888 CASE_FLT_FN (BUILT_IN_ISNAN):
9889 case BUILT_IN_ISNAND32:
9890 case BUILT_IN_ISNAND64:
9891 case BUILT_IN_ISNAND128:
9892 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9893
9894 case BUILT_IN_PRINTF:
9895 case BUILT_IN_PRINTF_UNLOCKED:
9896 case BUILT_IN_VPRINTF:
9897 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9898
9899 default:
9900 break;
9901 }
9902
9903 return NULL_TREE;
9904
9905 }
9906
9907 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9908 IGNORE is true if the result of the function call is ignored. This
9909 function returns NULL_TREE if no simplification was possible. */
9910
9911 static tree
9912 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9913 {
9914 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9915 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9916
9917 switch (fcode)
9918 {
9919 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9920 CASE_FLT_FN (BUILT_IN_JN):
9921 if (validate_arg (arg0, INTEGER_TYPE)
9922 && validate_arg (arg1, REAL_TYPE))
9923 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9924 break;
9925
9926 CASE_FLT_FN (BUILT_IN_YN):
9927 if (validate_arg (arg0, INTEGER_TYPE)
9928 && validate_arg (arg1, REAL_TYPE))
9929 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9930 &dconst0, false);
9931 break;
9932
9933 CASE_FLT_FN (BUILT_IN_DREM):
9934 CASE_FLT_FN (BUILT_IN_REMAINDER):
9935 if (validate_arg (arg0, REAL_TYPE)
9936 && validate_arg(arg1, REAL_TYPE))
9937 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9938 break;
9939
9940 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9941 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9942 if (validate_arg (arg0, REAL_TYPE)
9943 && validate_arg(arg1, POINTER_TYPE))
9944 return do_mpfr_lgamma_r (arg0, arg1, type);
9945 break;
9946 #endif
9947
9948 CASE_FLT_FN (BUILT_IN_ATAN2):
9949 if (validate_arg (arg0, REAL_TYPE)
9950 && validate_arg(arg1, REAL_TYPE))
9951 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9952 break;
9953
9954 CASE_FLT_FN (BUILT_IN_FDIM):
9955 if (validate_arg (arg0, REAL_TYPE)
9956 && validate_arg(arg1, REAL_TYPE))
9957 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9958 break;
9959
9960 CASE_FLT_FN (BUILT_IN_HYPOT):
9961 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9962
9963 CASE_FLT_FN (BUILT_IN_LDEXP):
9964 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9965 CASE_FLT_FN (BUILT_IN_SCALBN):
9966 CASE_FLT_FN (BUILT_IN_SCALBLN):
9967 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9968
9969 CASE_FLT_FN (BUILT_IN_FREXP):
9970 return fold_builtin_frexp (arg0, arg1, type);
9971
9972 CASE_FLT_FN (BUILT_IN_MODF):
9973 return fold_builtin_modf (arg0, arg1, type);
9974
9975 case BUILT_IN_BZERO:
9976 return fold_builtin_bzero (arg0, arg1, ignore);
9977
9978 case BUILT_IN_FPUTS:
9979 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9980
9981 case BUILT_IN_FPUTS_UNLOCKED:
9982 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9983
9984 case BUILT_IN_STRSTR:
9985 return fold_builtin_strstr (arg0, arg1, type);
9986
9987 case BUILT_IN_STRCAT:
9988 return fold_builtin_strcat (arg0, arg1);
9989
9990 case BUILT_IN_STRSPN:
9991 return fold_builtin_strspn (arg0, arg1);
9992
9993 case BUILT_IN_STRCSPN:
9994 return fold_builtin_strcspn (arg0, arg1);
9995
9996 case BUILT_IN_STRCHR:
9997 case BUILT_IN_INDEX:
9998 return fold_builtin_strchr (arg0, arg1, type);
9999
10000 case BUILT_IN_STRRCHR:
10001 case BUILT_IN_RINDEX:
10002 return fold_builtin_strrchr (arg0, arg1, type);
10003
10004 case BUILT_IN_STRCPY:
10005 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10006
10007 case BUILT_IN_STRCMP:
10008 return fold_builtin_strcmp (arg0, arg1);
10009
10010 case BUILT_IN_STRPBRK:
10011 return fold_builtin_strpbrk (arg0, arg1, type);
10012
10013 case BUILT_IN_EXPECT:
10014 return fold_builtin_expect (arg0);
10015
10016 CASE_FLT_FN (BUILT_IN_POW):
10017 return fold_builtin_pow (fndecl, arg0, arg1, type);
10018
10019 CASE_FLT_FN (BUILT_IN_POWI):
10020 return fold_builtin_powi (fndecl, arg0, arg1, type);
10021
10022 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10023 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10024
10025 CASE_FLT_FN (BUILT_IN_FMIN):
10026 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10027
10028 CASE_FLT_FN (BUILT_IN_FMAX):
10029 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10030
10031 case BUILT_IN_ISGREATER:
10032 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10033 case BUILT_IN_ISGREATEREQUAL:
10034 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10035 case BUILT_IN_ISLESS:
10036 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10037 case BUILT_IN_ISLESSEQUAL:
10038 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10039 case BUILT_IN_ISLESSGREATER:
10040 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10041 case BUILT_IN_ISUNORDERED:
10042 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10043 NOP_EXPR);
10044
10045 /* We do the folding for va_start in the expander. */
10046 case BUILT_IN_VA_START:
10047 break;
10048
10049 case BUILT_IN_SPRINTF:
10050 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10051
10052 case BUILT_IN_OBJECT_SIZE:
10053 return fold_builtin_object_size (arg0, arg1);
10054
10055 case BUILT_IN_PRINTF:
10056 case BUILT_IN_PRINTF_UNLOCKED:
10057 case BUILT_IN_VPRINTF:
10058 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10059
10060 case BUILT_IN_PRINTF_CHK:
10061 case BUILT_IN_VPRINTF_CHK:
10062 if (!validate_arg (arg0, INTEGER_TYPE)
10063 || TREE_SIDE_EFFECTS (arg0))
10064 return NULL_TREE;
10065 else
10066 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10067 break;
10068
10069 case BUILT_IN_FPRINTF:
10070 case BUILT_IN_FPRINTF_UNLOCKED:
10071 case BUILT_IN_VFPRINTF:
10072 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10073 ignore, fcode);
10074
10075 default:
10076 break;
10077 }
10078 return NULL_TREE;
10079 }
10080
10081 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10082 and ARG2. IGNORE is true if the result of the function call is ignored.
10083 This function returns NULL_TREE if no simplification was possible. */
10084
10085 static tree
10086 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10087 {
10088 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10089 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10090 switch (fcode)
10091 {
10092
10093 CASE_FLT_FN (BUILT_IN_SINCOS):
10094 return fold_builtin_sincos (arg0, arg1, arg2);
10095
10096 CASE_FLT_FN (BUILT_IN_FMA):
10097 if (validate_arg (arg0, REAL_TYPE)
10098 && validate_arg(arg1, REAL_TYPE)
10099 && validate_arg(arg2, REAL_TYPE))
10100 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10101 break;
10102
10103 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10104 CASE_FLT_FN (BUILT_IN_REMQUO):
10105 if (validate_arg (arg0, REAL_TYPE)
10106 && validate_arg(arg1, REAL_TYPE)
10107 && validate_arg(arg2, POINTER_TYPE))
10108 return do_mpfr_remquo (arg0, arg1, arg2);
10109 break;
10110 #endif
10111
10112 case BUILT_IN_MEMSET:
10113 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10114
10115 case BUILT_IN_BCOPY:
10116 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10117
10118 case BUILT_IN_MEMCPY:
10119 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10120
10121 case BUILT_IN_MEMPCPY:
10122 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10123
10124 case BUILT_IN_MEMMOVE:
10125 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10126
10127 case BUILT_IN_STRNCAT:
10128 return fold_builtin_strncat (arg0, arg1, arg2);
10129
10130 case BUILT_IN_STRNCPY:
10131 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10132
10133 case BUILT_IN_STRNCMP:
10134 return fold_builtin_strncmp (arg0, arg1, arg2);
10135
10136 case BUILT_IN_MEMCHR:
10137 return fold_builtin_memchr (arg0, arg1, arg2, type);
10138
10139 case BUILT_IN_BCMP:
10140 case BUILT_IN_MEMCMP:
10141 return fold_builtin_memcmp (arg0, arg1, arg2);;
10142
10143 case BUILT_IN_SPRINTF:
10144 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10145
10146 case BUILT_IN_STRCPY_CHK:
10147 case BUILT_IN_STPCPY_CHK:
10148 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10149 ignore, fcode);
10150
10151 case BUILT_IN_STRCAT_CHK:
10152 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10153
10154 case BUILT_IN_PRINTF_CHK:
10155 case BUILT_IN_VPRINTF_CHK:
10156 if (!validate_arg (arg0, INTEGER_TYPE)
10157 || TREE_SIDE_EFFECTS (arg0))
10158 return NULL_TREE;
10159 else
10160 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10161 break;
10162
10163 case BUILT_IN_FPRINTF:
10164 case BUILT_IN_FPRINTF_UNLOCKED:
10165 case BUILT_IN_VFPRINTF:
10166 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10167
10168 case BUILT_IN_FPRINTF_CHK:
10169 case BUILT_IN_VFPRINTF_CHK:
10170 if (!validate_arg (arg1, INTEGER_TYPE)
10171 || TREE_SIDE_EFFECTS (arg1))
10172 return NULL_TREE;
10173 else
10174 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10175 ignore, fcode);
10176
10177 default:
10178 break;
10179 }
10180 return NULL_TREE;
10181 }
10182
10183 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10184 ARG2, and ARG3. IGNORE is true if the result of the function call is
10185 ignored. This function returns NULL_TREE if no simplification was
10186 possible. */
10187
10188 static tree
10189 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10190 bool ignore)
10191 {
10192 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10193
10194 switch (fcode)
10195 {
10196 case BUILT_IN_MEMCPY_CHK:
10197 case BUILT_IN_MEMPCPY_CHK:
10198 case BUILT_IN_MEMMOVE_CHK:
10199 case BUILT_IN_MEMSET_CHK:
10200 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10201 NULL_TREE, ignore,
10202 DECL_FUNCTION_CODE (fndecl));
10203
10204 case BUILT_IN_STRNCPY_CHK:
10205 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10206
10207 case BUILT_IN_STRNCAT_CHK:
10208 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10209
10210 case BUILT_IN_FPRINTF_CHK:
10211 case BUILT_IN_VFPRINTF_CHK:
10212 if (!validate_arg (arg1, INTEGER_TYPE)
10213 || TREE_SIDE_EFFECTS (arg1))
10214 return NULL_TREE;
10215 else
10216 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10217 ignore, fcode);
10218 break;
10219
10220 default:
10221 break;
10222 }
10223 return NULL_TREE;
10224 }
10225
10226 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10227 arguments, where NARGS <= 4. IGNORE is true if the result of the
10228 function call is ignored. This function returns NULL_TREE if no
10229 simplification was possible. Note that this only folds builtins with
10230 fixed argument patterns. Foldings that do varargs-to-varargs
10231 transformations, or that match calls with more than 4 arguments,
10232 need to be handled with fold_builtin_varargs instead. */
10233
10234 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10235
10236 static tree
10237 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10238 {
10239 tree ret = NULL_TREE;
10240 switch (nargs)
10241 {
10242 case 0:
10243 ret = fold_builtin_0 (fndecl, ignore);
10244 break;
10245 case 1:
10246 ret = fold_builtin_1 (fndecl, args[0], ignore);
10247 break;
10248 case 2:
10249 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10250 break;
10251 case 3:
10252 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10253 break;
10254 case 4:
10255 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10256 ignore);
10257 break;
10258 default:
10259 break;
10260 }
10261 if (ret)
10262 {
10263 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10264 TREE_NO_WARNING (ret) = 1;
10265 return ret;
10266 }
10267 return NULL_TREE;
10268 }
10269
10270 /* Builtins with folding operations that operate on "..." arguments
10271 need special handling; we need to store the arguments in a convenient
10272 data structure before attempting any folding. Fortunately there are
10273 only a few builtins that fall into this category. FNDECL is the
10274 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10275 result of the function call is ignored. */
10276
10277 static tree
10278 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10279 {
10280 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10281 tree ret = NULL_TREE;
10282
10283 switch (fcode)
10284 {
10285 case BUILT_IN_SPRINTF_CHK:
10286 case BUILT_IN_VSPRINTF_CHK:
10287 ret = fold_builtin_sprintf_chk (exp, fcode);
10288 break;
10289
10290 case BUILT_IN_SNPRINTF_CHK:
10291 case BUILT_IN_VSNPRINTF_CHK:
10292 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10293
10294 default:
10295 break;
10296 }
10297 if (ret)
10298 {
10299 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10300 TREE_NO_WARNING (ret) = 1;
10301 return ret;
10302 }
10303 return NULL_TREE;
10304 }
10305
10306 /* A wrapper function for builtin folding that prevents warnings for
10307 "statement without effect" and the like, caused by removing the
10308 call node earlier than the warning is generated. */
10309
10310 tree
10311 fold_call_expr (tree exp, bool ignore)
10312 {
10313 tree ret = NULL_TREE;
10314 tree fndecl = get_callee_fndecl (exp);
10315 if (fndecl
10316 && TREE_CODE (fndecl) == FUNCTION_DECL
10317 && DECL_BUILT_IN (fndecl))
10318 {
10319 /* FIXME: Don't use a list in this interface. */
10320 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10321 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10322 else
10323 {
10324 int nargs = call_expr_nargs (exp);
10325 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10326 {
10327 tree *args = CALL_EXPR_ARGP (exp);
10328 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10329 }
10330 if (!ret)
10331 ret = fold_builtin_varargs (fndecl, exp, ignore);
10332 if (ret)
10333 {
10334 /* Propagate location information from original call to
10335 expansion of builtin. Otherwise things like
10336 maybe_emit_chk_warning, that operate on the expansion
10337 of a builtin, will use the wrong location information. */
10338 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10339 {
10340 tree realret = ret;
10341 if (TREE_CODE (ret) == NOP_EXPR)
10342 realret = TREE_OPERAND (ret, 0);
10343 if (CAN_HAVE_LOCATION_P (realret)
10344 && !EXPR_HAS_LOCATION (realret))
10345 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10346 }
10347 return ret;
10348 }
10349 }
10350 }
10351 return NULL_TREE;
10352 }
10353
10354 /* Conveniently construct a function call expression. FNDECL names the
10355 function to be called and ARGLIST is a TREE_LIST of arguments. */
10356
10357 tree
10358 build_function_call_expr (tree fndecl, tree arglist)
10359 {
10360 tree fntype = TREE_TYPE (fndecl);
10361 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10362 int n = list_length (arglist);
10363 tree *argarray = (tree *) alloca (n * sizeof (tree));
10364 int i;
10365
10366 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10367 argarray[i] = TREE_VALUE (arglist);
10368 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10369 }
10370
10371 /* Conveniently construct a function call expression. FNDECL names the
10372 function to be called, N is the number of arguments, and the "..."
10373 parameters are the argument expressions. */
10374
10375 tree
10376 build_call_expr (tree fndecl, int n, ...)
10377 {
10378 va_list ap;
10379 tree fntype = TREE_TYPE (fndecl);
10380 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10381 tree *argarray = (tree *) alloca (n * sizeof (tree));
10382 int i;
10383
10384 va_start (ap, n);
10385 for (i = 0; i < n; i++)
10386 argarray[i] = va_arg (ap, tree);
10387 va_end (ap);
10388 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10389 }
10390
10391 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10392 N arguments are passed in the array ARGARRAY. */
10393
10394 tree
10395 fold_builtin_call_array (tree type,
10396 tree fn,
10397 int n,
10398 tree *argarray)
10399 {
10400 tree ret = NULL_TREE;
10401 int i;
10402 tree exp;
10403
10404 if (TREE_CODE (fn) == ADDR_EXPR)
10405 {
10406 tree fndecl = TREE_OPERAND (fn, 0);
10407 if (TREE_CODE (fndecl) == FUNCTION_DECL
10408 && DECL_BUILT_IN (fndecl))
10409 {
10410 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10411 {
10412 tree arglist = NULL_TREE;
10413 for (i = n - 1; i >= 0; i--)
10414 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10415 ret = targetm.fold_builtin (fndecl, arglist, false);
10416 if (ret)
10417 return ret;
10418 }
10419 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10420 {
10421 /* First try the transformations that don't require consing up
10422 an exp. */
10423 ret = fold_builtin_n (fndecl, argarray, n, false);
10424 if (ret)
10425 return ret;
10426 }
10427
10428 /* If we got this far, we need to build an exp. */
10429 exp = build_call_array (type, fn, n, argarray);
10430 ret = fold_builtin_varargs (fndecl, exp, false);
10431 return ret ? ret : exp;
10432 }
10433 }
10434
10435 return build_call_array (type, fn, n, argarray);
10436 }
10437
10438 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10439 along with N new arguments specified as the "..." parameters. SKIP
10440 is the number of arguments in EXP to be omitted. This function is used
10441 to do varargs-to-varargs transformations. */
10442
10443 static tree
10444 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10445 {
10446 int oldnargs = call_expr_nargs (exp);
10447 int nargs = oldnargs - skip + n;
10448 tree fntype = TREE_TYPE (fndecl);
10449 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10450 tree *buffer;
10451
10452 if (n > 0)
10453 {
10454 int i, j;
10455 va_list ap;
10456
10457 buffer = alloca (nargs * sizeof (tree));
10458 va_start (ap, n);
10459 for (i = 0; i < n; i++)
10460 buffer[i] = va_arg (ap, tree);
10461 va_end (ap);
10462 for (j = skip; j < oldnargs; j++, i++)
10463 buffer[i] = CALL_EXPR_ARG (exp, j);
10464 }
10465 else
10466 buffer = CALL_EXPR_ARGP (exp) + skip;
10467
10468 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10469 }
10470
10471 /* Validate a single argument ARG against a tree code CODE representing
10472 a type. */
10473
10474 static bool
10475 validate_arg (tree arg, enum tree_code code)
10476 {
10477 if (!arg)
10478 return false;
10479 else if (code == POINTER_TYPE)
10480 return POINTER_TYPE_P (TREE_TYPE (arg));
10481 return code == TREE_CODE (TREE_TYPE (arg));
10482 }
10483
10484 /* This function validates the types of a function call argument list
10485 against a specified list of tree_codes. If the last specifier is a 0,
10486 that represents an ellipses, otherwise the last specifier must be a
10487 VOID_TYPE. */
10488
10489 bool
10490 validate_arglist (tree callexpr, ...)
10491 {
10492 enum tree_code code;
10493 bool res = 0;
10494 va_list ap;
10495 call_expr_arg_iterator iter;
10496 tree arg;
10497
10498 va_start (ap, callexpr);
10499 init_call_expr_arg_iterator (callexpr, &iter);
10500
10501 do
10502 {
10503 code = va_arg (ap, enum tree_code);
10504 switch (code)
10505 {
10506 case 0:
10507 /* This signifies an ellipses, any further arguments are all ok. */
10508 res = true;
10509 goto end;
10510 case VOID_TYPE:
10511 /* This signifies an endlink, if no arguments remain, return
10512 true, otherwise return false. */
10513 res = !more_call_expr_args_p (&iter);
10514 goto end;
10515 default:
10516 /* If no parameters remain or the parameter's code does not
10517 match the specified code, return false. Otherwise continue
10518 checking any remaining arguments. */
10519 arg = next_call_expr_arg (&iter);
10520 if (!validate_arg (arg, code))
10521 goto end;
10522 break;
10523 }
10524 }
10525 while (1);
10526
10527 /* We need gotos here since we can only have one VA_CLOSE in a
10528 function. */
10529 end: ;
10530 va_end (ap);
10531
10532 return res;
10533 }
10534
10535 /* Default target-specific builtin expander that does nothing. */
10536
10537 rtx
10538 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10539 rtx target ATTRIBUTE_UNUSED,
10540 rtx subtarget ATTRIBUTE_UNUSED,
10541 enum machine_mode mode ATTRIBUTE_UNUSED,
10542 int ignore ATTRIBUTE_UNUSED)
10543 {
10544 return NULL_RTX;
10545 }
10546
10547 /* Returns true is EXP represents data that would potentially reside
10548 in a readonly section. */
10549
10550 static bool
10551 readonly_data_expr (tree exp)
10552 {
10553 STRIP_NOPS (exp);
10554
10555 if (TREE_CODE (exp) != ADDR_EXPR)
10556 return false;
10557
10558 exp = get_base_address (TREE_OPERAND (exp, 0));
10559 if (!exp)
10560 return false;
10561
10562 /* Make sure we call decl_readonly_section only for trees it
10563 can handle (since it returns true for everything it doesn't
10564 understand). */
10565 if (TREE_CODE (exp) == STRING_CST
10566 || TREE_CODE (exp) == CONSTRUCTOR
10567 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10568 return decl_readonly_section (exp, 0);
10569 else
10570 return false;
10571 }
10572
10573 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10574 to the call, and TYPE is its return type.
10575
10576 Return NULL_TREE if no simplification was possible, otherwise return the
10577 simplified form of the call as a tree.
10578
10579 The simplified form may be a constant or other expression which
10580 computes the same value, but in a more efficient manner (including
10581 calls to other builtin functions).
10582
10583 The call may contain arguments which need to be evaluated, but
10584 which are not useful to determine the result of the call. In
10585 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10586 COMPOUND_EXPR will be an argument which must be evaluated.
10587 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10588 COMPOUND_EXPR in the chain will contain the tree for the simplified
10589 form of the builtin function call. */
10590
10591 static tree
10592 fold_builtin_strstr (tree s1, tree s2, tree type)
10593 {
10594 if (!validate_arg (s1, POINTER_TYPE)
10595 || !validate_arg (s2, POINTER_TYPE))
10596 return NULL_TREE;
10597 else
10598 {
10599 tree fn;
10600 const char *p1, *p2;
10601
10602 p2 = c_getstr (s2);
10603 if (p2 == NULL)
10604 return NULL_TREE;
10605
10606 p1 = c_getstr (s1);
10607 if (p1 != NULL)
10608 {
10609 const char *r = strstr (p1, p2);
10610 tree tem;
10611
10612 if (r == NULL)
10613 return build_int_cst (TREE_TYPE (s1), 0);
10614
10615 /* Return an offset into the constant string argument. */
10616 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10617 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10618 return fold_convert (type, tem);
10619 }
10620
10621 /* The argument is const char *, and the result is char *, so we need
10622 a type conversion here to avoid a warning. */
10623 if (p2[0] == '\0')
10624 return fold_convert (type, s1);
10625
10626 if (p2[1] != '\0')
10627 return NULL_TREE;
10628
10629 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10630 if (!fn)
10631 return NULL_TREE;
10632
10633 /* New argument list transforming strstr(s1, s2) to
10634 strchr(s1, s2[0]). */
10635 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10636 }
10637 }
10638
10639 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10640 the call, and TYPE is its return type.
10641
10642 Return NULL_TREE if no simplification was possible, otherwise return the
10643 simplified form of the call as a tree.
10644
10645 The simplified form may be a constant or other expression which
10646 computes the same value, but in a more efficient manner (including
10647 calls to other builtin functions).
10648
10649 The call may contain arguments which need to be evaluated, but
10650 which are not useful to determine the result of the call. In
10651 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10652 COMPOUND_EXPR will be an argument which must be evaluated.
10653 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10654 COMPOUND_EXPR in the chain will contain the tree for the simplified
10655 form of the builtin function call. */
10656
10657 static tree
10658 fold_builtin_strchr (tree s1, tree s2, tree type)
10659 {
10660 if (!validate_arg (s1, POINTER_TYPE)
10661 || !validate_arg (s2, INTEGER_TYPE))
10662 return NULL_TREE;
10663 else
10664 {
10665 const char *p1;
10666
10667 if (TREE_CODE (s2) != INTEGER_CST)
10668 return NULL_TREE;
10669
10670 p1 = c_getstr (s1);
10671 if (p1 != NULL)
10672 {
10673 char c;
10674 const char *r;
10675 tree tem;
10676
10677 if (target_char_cast (s2, &c))
10678 return NULL_TREE;
10679
10680 r = strchr (p1, c);
10681
10682 if (r == NULL)
10683 return build_int_cst (TREE_TYPE (s1), 0);
10684
10685 /* Return an offset into the constant string argument. */
10686 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10687 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10688 return fold_convert (type, tem);
10689 }
10690 return NULL_TREE;
10691 }
10692 }
10693
10694 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10695 the call, and TYPE is its return type.
10696
10697 Return NULL_TREE if no simplification was possible, otherwise return the
10698 simplified form of the call as a tree.
10699
10700 The simplified form may be a constant or other expression which
10701 computes the same value, but in a more efficient manner (including
10702 calls to other builtin functions).
10703
10704 The call may contain arguments which need to be evaluated, but
10705 which are not useful to determine the result of the call. In
10706 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10707 COMPOUND_EXPR will be an argument which must be evaluated.
10708 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10709 COMPOUND_EXPR in the chain will contain the tree for the simplified
10710 form of the builtin function call. */
10711
10712 static tree
10713 fold_builtin_strrchr (tree s1, tree s2, tree type)
10714 {
10715 if (!validate_arg (s1, POINTER_TYPE)
10716 || !validate_arg (s2, INTEGER_TYPE))
10717 return NULL_TREE;
10718 else
10719 {
10720 tree fn;
10721 const char *p1;
10722
10723 if (TREE_CODE (s2) != INTEGER_CST)
10724 return NULL_TREE;
10725
10726 p1 = c_getstr (s1);
10727 if (p1 != NULL)
10728 {
10729 char c;
10730 const char *r;
10731 tree tem;
10732
10733 if (target_char_cast (s2, &c))
10734 return NULL_TREE;
10735
10736 r = strrchr (p1, c);
10737
10738 if (r == NULL)
10739 return build_int_cst (TREE_TYPE (s1), 0);
10740
10741 /* Return an offset into the constant string argument. */
10742 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10743 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10744 return fold_convert (type, tem);
10745 }
10746
10747 if (! integer_zerop (s2))
10748 return NULL_TREE;
10749
10750 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10751 if (!fn)
10752 return NULL_TREE;
10753
10754 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10755 return build_call_expr (fn, 2, s1, s2);
10756 }
10757 }
10758
10759 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10760 to the call, and TYPE is its return type.
10761
10762 Return NULL_TREE if no simplification was possible, otherwise return the
10763 simplified form of the call as a tree.
10764
10765 The simplified form may be a constant or other expression which
10766 computes the same value, but in a more efficient manner (including
10767 calls to other builtin functions).
10768
10769 The call may contain arguments which need to be evaluated, but
10770 which are not useful to determine the result of the call. In
10771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10772 COMPOUND_EXPR will be an argument which must be evaluated.
10773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10774 COMPOUND_EXPR in the chain will contain the tree for the simplified
10775 form of the builtin function call. */
10776
10777 static tree
10778 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10779 {
10780 if (!validate_arg (s1, POINTER_TYPE)
10781 || !validate_arg (s2, POINTER_TYPE))
10782 return NULL_TREE;
10783 else
10784 {
10785 tree fn;
10786 const char *p1, *p2;
10787
10788 p2 = c_getstr (s2);
10789 if (p2 == NULL)
10790 return NULL_TREE;
10791
10792 p1 = c_getstr (s1);
10793 if (p1 != NULL)
10794 {
10795 const char *r = strpbrk (p1, p2);
10796 tree tem;
10797
10798 if (r == NULL)
10799 return build_int_cst (TREE_TYPE (s1), 0);
10800
10801 /* Return an offset into the constant string argument. */
10802 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10803 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10804 return fold_convert (type, tem);
10805 }
10806
10807 if (p2[0] == '\0')
10808 /* strpbrk(x, "") == NULL.
10809 Evaluate and ignore s1 in case it had side-effects. */
10810 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10811
10812 if (p2[1] != '\0')
10813 return NULL_TREE; /* Really call strpbrk. */
10814
10815 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10816 if (!fn)
10817 return NULL_TREE;
10818
10819 /* New argument list transforming strpbrk(s1, s2) to
10820 strchr(s1, s2[0]). */
10821 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10822 }
10823 }
10824
10825 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10826 to the call.
10827
10828 Return NULL_TREE if no simplification was possible, otherwise return the
10829 simplified form of the call as a tree.
10830
10831 The simplified form may be a constant or other expression which
10832 computes the same value, but in a more efficient manner (including
10833 calls to other builtin functions).
10834
10835 The call may contain arguments which need to be evaluated, but
10836 which are not useful to determine the result of the call. In
10837 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10838 COMPOUND_EXPR will be an argument which must be evaluated.
10839 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10840 COMPOUND_EXPR in the chain will contain the tree for the simplified
10841 form of the builtin function call. */
10842
10843 static tree
10844 fold_builtin_strcat (tree dst, tree src)
10845 {
10846 if (!validate_arg (dst, POINTER_TYPE)
10847 || !validate_arg (src, POINTER_TYPE))
10848 return NULL_TREE;
10849 else
10850 {
10851 const char *p = c_getstr (src);
10852
10853 /* If the string length is zero, return the dst parameter. */
10854 if (p && *p == '\0')
10855 return dst;
10856
10857 return NULL_TREE;
10858 }
10859 }
10860
10861 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10862 arguments to the call.
10863
10864 Return NULL_TREE if no simplification was possible, otherwise return the
10865 simplified form of the call as a tree.
10866
10867 The simplified form may be a constant or other expression which
10868 computes the same value, but in a more efficient manner (including
10869 calls to other builtin functions).
10870
10871 The call may contain arguments which need to be evaluated, but
10872 which are not useful to determine the result of the call. In
10873 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10874 COMPOUND_EXPR will be an argument which must be evaluated.
10875 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10876 COMPOUND_EXPR in the chain will contain the tree for the simplified
10877 form of the builtin function call. */
10878
10879 static tree
10880 fold_builtin_strncat (tree dst, tree src, tree len)
10881 {
10882 if (!validate_arg (dst, POINTER_TYPE)
10883 || !validate_arg (src, POINTER_TYPE)
10884 || !validate_arg (len, INTEGER_TYPE))
10885 return NULL_TREE;
10886 else
10887 {
10888 const char *p = c_getstr (src);
10889
10890 /* If the requested length is zero, or the src parameter string
10891 length is zero, return the dst parameter. */
10892 if (integer_zerop (len) || (p && *p == '\0'))
10893 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10894
10895 /* If the requested len is greater than or equal to the string
10896 length, call strcat. */
10897 if (TREE_CODE (len) == INTEGER_CST && p
10898 && compare_tree_int (len, strlen (p)) >= 0)
10899 {
10900 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10901
10902 /* If the replacement _DECL isn't initialized, don't do the
10903 transformation. */
10904 if (!fn)
10905 return NULL_TREE;
10906
10907 return build_call_expr (fn, 2, dst, src);
10908 }
10909 return NULL_TREE;
10910 }
10911 }
10912
10913 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10914 to the call.
10915
10916 Return NULL_TREE if no simplification was possible, otherwise return the
10917 simplified form of the call as a tree.
10918
10919 The simplified form may be a constant or other expression which
10920 computes the same value, but in a more efficient manner (including
10921 calls to other builtin functions).
10922
10923 The call may contain arguments which need to be evaluated, but
10924 which are not useful to determine the result of the call. In
10925 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10926 COMPOUND_EXPR will be an argument which must be evaluated.
10927 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10928 COMPOUND_EXPR in the chain will contain the tree for the simplified
10929 form of the builtin function call. */
10930
10931 static tree
10932 fold_builtin_strspn (tree s1, tree s2)
10933 {
10934 if (!validate_arg (s1, POINTER_TYPE)
10935 || !validate_arg (s2, POINTER_TYPE))
10936 return NULL_TREE;
10937 else
10938 {
10939 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10940
10941 /* If both arguments are constants, evaluate at compile-time. */
10942 if (p1 && p2)
10943 {
10944 const size_t r = strspn (p1, p2);
10945 return size_int (r);
10946 }
10947
10948 /* If either argument is "", return NULL_TREE. */
10949 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10950 /* Evaluate and ignore both arguments in case either one has
10951 side-effects. */
10952 return omit_two_operands (integer_type_node, integer_zero_node,
10953 s1, s2);
10954 return NULL_TREE;
10955 }
10956 }
10957
10958 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10959 to the call.
10960
10961 Return NULL_TREE if no simplification was possible, otherwise return the
10962 simplified form of the call as a tree.
10963
10964 The simplified form may be a constant or other expression which
10965 computes the same value, but in a more efficient manner (including
10966 calls to other builtin functions).
10967
10968 The call may contain arguments which need to be evaluated, but
10969 which are not useful to determine the result of the call. In
10970 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10971 COMPOUND_EXPR will be an argument which must be evaluated.
10972 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10973 COMPOUND_EXPR in the chain will contain the tree for the simplified
10974 form of the builtin function call. */
10975
10976 static tree
10977 fold_builtin_strcspn (tree s1, tree s2)
10978 {
10979 if (!validate_arg (s1, POINTER_TYPE)
10980 || !validate_arg (s2, POINTER_TYPE))
10981 return NULL_TREE;
10982 else
10983 {
10984 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10985
10986 /* If both arguments are constants, evaluate at compile-time. */
10987 if (p1 && p2)
10988 {
10989 const size_t r = strcspn (p1, p2);
10990 return size_int (r);
10991 }
10992
10993 /* If the first argument is "", return NULL_TREE. */
10994 if (p1 && *p1 == '\0')
10995 {
10996 /* Evaluate and ignore argument s2 in case it has
10997 side-effects. */
10998 return omit_one_operand (integer_type_node,
10999 integer_zero_node, s2);
11000 }
11001
11002 /* If the second argument is "", return __builtin_strlen(s1). */
11003 if (p2 && *p2 == '\0')
11004 {
11005 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11006
11007 /* If the replacement _DECL isn't initialized, don't do the
11008 transformation. */
11009 if (!fn)
11010 return NULL_TREE;
11011
11012 return build_call_expr (fn, 1, s1);
11013 }
11014 return NULL_TREE;
11015 }
11016 }
11017
11018 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11019 to the call. IGNORE is true if the value returned
11020 by the builtin will be ignored. UNLOCKED is true is true if this
11021 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11022 the known length of the string. Return NULL_TREE if no simplification
11023 was possible. */
11024
11025 tree
11026 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11027 {
11028 /* If we're using an unlocked function, assume the other unlocked
11029 functions exist explicitly. */
11030 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11031 : implicit_built_in_decls[BUILT_IN_FPUTC];
11032 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11033 : implicit_built_in_decls[BUILT_IN_FWRITE];
11034
11035 /* If the return value is used, don't do the transformation. */
11036 if (!ignore)
11037 return NULL_TREE;
11038
11039 /* Verify the arguments in the original call. */
11040 if (!validate_arg (arg0, POINTER_TYPE)
11041 || !validate_arg (arg1, POINTER_TYPE))
11042 return NULL_TREE;
11043
11044 if (! len)
11045 len = c_strlen (arg0, 0);
11046
11047 /* Get the length of the string passed to fputs. If the length
11048 can't be determined, punt. */
11049 if (!len
11050 || TREE_CODE (len) != INTEGER_CST)
11051 return NULL_TREE;
11052
11053 switch (compare_tree_int (len, 1))
11054 {
11055 case -1: /* length is 0, delete the call entirely . */
11056 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11057
11058 case 0: /* length is 1, call fputc. */
11059 {
11060 const char *p = c_getstr (arg0);
11061
11062 if (p != NULL)
11063 {
11064 if (fn_fputc)
11065 return build_call_expr (fn_fputc, 2,
11066 build_int_cst (NULL_TREE, p[0]), arg1);
11067 else
11068 return NULL_TREE;
11069 }
11070 }
11071 /* FALLTHROUGH */
11072 case 1: /* length is greater than 1, call fwrite. */
11073 {
11074 /* If optimizing for size keep fputs. */
11075 if (optimize_size)
11076 return NULL_TREE;
11077 /* New argument list transforming fputs(string, stream) to
11078 fwrite(string, 1, len, stream). */
11079 if (fn_fwrite)
11080 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11081 else
11082 return NULL_TREE;
11083 }
11084 default:
11085 gcc_unreachable ();
11086 }
11087 return NULL_TREE;
11088 }
11089
11090 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11091 produced. False otherwise. This is done so that we don't output the error
11092 or warning twice or three times. */
11093 bool
11094 fold_builtin_next_arg (tree exp, bool va_start_p)
11095 {
11096 tree fntype = TREE_TYPE (current_function_decl);
11097 int nargs = call_expr_nargs (exp);
11098 tree arg;
11099
11100 if (TYPE_ARG_TYPES (fntype) == 0
11101 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11102 == void_type_node))
11103 {
11104 error ("%<va_start%> used in function with fixed args");
11105 return true;
11106 }
11107
11108 if (va_start_p)
11109 {
11110 if (va_start_p && (nargs != 2))
11111 {
11112 error ("wrong number of arguments to function %<va_start%>");
11113 return true;
11114 }
11115 arg = CALL_EXPR_ARG (exp, 1);
11116 }
11117 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11118 when we checked the arguments and if needed issued a warning. */
11119 else
11120 {
11121 if (nargs == 0)
11122 {
11123 /* Evidently an out of date version of <stdarg.h>; can't validate
11124 va_start's second argument, but can still work as intended. */
11125 warning (0, "%<__builtin_next_arg%> called without an argument");
11126 return true;
11127 }
11128 else if (nargs > 1)
11129 {
11130 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11131 return true;
11132 }
11133 arg = CALL_EXPR_ARG (exp, 0);
11134 }
11135
11136 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11137 or __builtin_next_arg (0) the first time we see it, after checking
11138 the arguments and if needed issuing a warning. */
11139 if (!integer_zerop (arg))
11140 {
11141 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11142
11143 /* Strip off all nops for the sake of the comparison. This
11144 is not quite the same as STRIP_NOPS. It does more.
11145 We must also strip off INDIRECT_EXPR for C++ reference
11146 parameters. */
11147 while (TREE_CODE (arg) == NOP_EXPR
11148 || TREE_CODE (arg) == CONVERT_EXPR
11149 || TREE_CODE (arg) == NON_LVALUE_EXPR
11150 || TREE_CODE (arg) == INDIRECT_REF)
11151 arg = TREE_OPERAND (arg, 0);
11152 if (arg != last_parm)
11153 {
11154 /* FIXME: Sometimes with the tree optimizers we can get the
11155 not the last argument even though the user used the last
11156 argument. We just warn and set the arg to be the last
11157 argument so that we will get wrong-code because of
11158 it. */
11159 warning (0, "second parameter of %<va_start%> not last named argument");
11160 }
11161 /* We want to verify the second parameter just once before the tree
11162 optimizers are run and then avoid keeping it in the tree,
11163 as otherwise we could warn even for correct code like:
11164 void foo (int i, ...)
11165 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11166 if (va_start_p)
11167 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11168 else
11169 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11170 }
11171 return false;
11172 }
11173
11174
11175 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11176 ORIG may be null if this is a 2-argument call. We don't attempt to
11177 simplify calls with more than 3 arguments.
11178
11179 Return NULL_TREE if no simplification was possible, otherwise return the
11180 simplified form of the call as a tree. If IGNORED is true, it means that
11181 the caller does not use the returned value of the function. */
11182
11183 static tree
11184 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11185 {
11186 tree call, retval;
11187 const char *fmt_str = NULL;
11188
11189 /* Verify the required arguments in the original call. We deal with two
11190 types of sprintf() calls: 'sprintf (str, fmt)' and
11191 'sprintf (dest, "%s", orig)'. */
11192 if (!validate_arg (dest, POINTER_TYPE)
11193 || !validate_arg (fmt, POINTER_TYPE))
11194 return NULL_TREE;
11195 if (orig && !validate_arg (orig, POINTER_TYPE))
11196 return NULL_TREE;
11197
11198 /* Check whether the format is a literal string constant. */
11199 fmt_str = c_getstr (fmt);
11200 if (fmt_str == NULL)
11201 return NULL_TREE;
11202
11203 call = NULL_TREE;
11204 retval = NULL_TREE;
11205
11206 if (!init_target_chars ())
11207 return NULL_TREE;
11208
11209 /* If the format doesn't contain % args or %%, use strcpy. */
11210 if (strchr (fmt_str, target_percent) == NULL)
11211 {
11212 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11213
11214 if (!fn)
11215 return NULL_TREE;
11216
11217 /* Don't optimize sprintf (buf, "abc", ptr++). */
11218 if (orig)
11219 return NULL_TREE;
11220
11221 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11222 'format' is known to contain no % formats. */
11223 call = build_call_expr (fn, 2, dest, fmt);
11224 if (!ignored)
11225 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11226 }
11227
11228 /* If the format is "%s", use strcpy if the result isn't used. */
11229 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11230 {
11231 tree fn;
11232 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11233
11234 if (!fn)
11235 return NULL_TREE;
11236
11237 /* Don't crash on sprintf (str1, "%s"). */
11238 if (!orig)
11239 return NULL_TREE;
11240
11241 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11242 if (!ignored)
11243 {
11244 retval = c_strlen (orig, 1);
11245 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11246 return NULL_TREE;
11247 }
11248 call = build_call_expr (fn, 2, dest, orig);
11249 }
11250
11251 if (call && retval)
11252 {
11253 retval = fold_convert
11254 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11255 retval);
11256 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11257 }
11258 else
11259 return call;
11260 }
11261
11262 /* Expand a call EXP to __builtin_object_size. */
11263
11264 rtx
11265 expand_builtin_object_size (tree exp)
11266 {
11267 tree ost;
11268 int object_size_type;
11269 tree fndecl = get_callee_fndecl (exp);
11270 location_t locus = EXPR_LOCATION (exp);
11271
11272 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11273 {
11274 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11275 &locus, fndecl);
11276 expand_builtin_trap ();
11277 return const0_rtx;
11278 }
11279
11280 ost = CALL_EXPR_ARG (exp, 1);
11281 STRIP_NOPS (ost);
11282
11283 if (TREE_CODE (ost) != INTEGER_CST
11284 || tree_int_cst_sgn (ost) < 0
11285 || compare_tree_int (ost, 3) > 0)
11286 {
11287 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11288 &locus, fndecl);
11289 expand_builtin_trap ();
11290 return const0_rtx;
11291 }
11292
11293 object_size_type = tree_low_cst (ost, 0);
11294
11295 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11296 }
11297
11298 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11299 FCODE is the BUILT_IN_* to use.
11300 Return NULL_RTX if we failed; the caller should emit a normal call,
11301 otherwise try to get the result in TARGET, if convenient (and in
11302 mode MODE if that's convenient). */
11303
11304 static rtx
11305 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11306 enum built_in_function fcode)
11307 {
11308 tree dest, src, len, size;
11309
11310 if (!validate_arglist (exp,
11311 POINTER_TYPE,
11312 fcode == BUILT_IN_MEMSET_CHK
11313 ? INTEGER_TYPE : POINTER_TYPE,
11314 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11315 return NULL_RTX;
11316
11317 dest = CALL_EXPR_ARG (exp, 0);
11318 src = CALL_EXPR_ARG (exp, 1);
11319 len = CALL_EXPR_ARG (exp, 2);
11320 size = CALL_EXPR_ARG (exp, 3);
11321
11322 if (! host_integerp (size, 1))
11323 return NULL_RTX;
11324
11325 if (host_integerp (len, 1) || integer_all_onesp (size))
11326 {
11327 tree fn;
11328
11329 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11330 {
11331 location_t locus = EXPR_LOCATION (exp);
11332 warning (0, "%Hcall to %D will always overflow destination buffer",
11333 &locus, get_callee_fndecl (exp));
11334 return NULL_RTX;
11335 }
11336
11337 fn = NULL_TREE;
11338 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11339 mem{cpy,pcpy,move,set} is available. */
11340 switch (fcode)
11341 {
11342 case BUILT_IN_MEMCPY_CHK:
11343 fn = built_in_decls[BUILT_IN_MEMCPY];
11344 break;
11345 case BUILT_IN_MEMPCPY_CHK:
11346 fn = built_in_decls[BUILT_IN_MEMPCPY];
11347 break;
11348 case BUILT_IN_MEMMOVE_CHK:
11349 fn = built_in_decls[BUILT_IN_MEMMOVE];
11350 break;
11351 case BUILT_IN_MEMSET_CHK:
11352 fn = built_in_decls[BUILT_IN_MEMSET];
11353 break;
11354 default:
11355 break;
11356 }
11357
11358 if (! fn)
11359 return NULL_RTX;
11360
11361 fn = build_call_expr (fn, 3, dest, src, len);
11362 if (TREE_CODE (fn) == CALL_EXPR)
11363 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11364 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11365 }
11366 else if (fcode == BUILT_IN_MEMSET_CHK)
11367 return NULL_RTX;
11368 else
11369 {
11370 unsigned int dest_align
11371 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11372
11373 /* If DEST is not a pointer type, call the normal function. */
11374 if (dest_align == 0)
11375 return NULL_RTX;
11376
11377 /* If SRC and DEST are the same (and not volatile), do nothing. */
11378 if (operand_equal_p (src, dest, 0))
11379 {
11380 tree expr;
11381
11382 if (fcode != BUILT_IN_MEMPCPY_CHK)
11383 {
11384 /* Evaluate and ignore LEN in case it has side-effects. */
11385 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11386 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11387 }
11388
11389 len = fold_convert (TREE_TYPE (dest), len);
11390 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
11391 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11392 }
11393
11394 /* __memmove_chk special case. */
11395 if (fcode == BUILT_IN_MEMMOVE_CHK)
11396 {
11397 unsigned int src_align
11398 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11399
11400 if (src_align == 0)
11401 return NULL_RTX;
11402
11403 /* If src is categorized for a readonly section we can use
11404 normal __memcpy_chk. */
11405 if (readonly_data_expr (src))
11406 {
11407 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11408 if (!fn)
11409 return NULL_RTX;
11410 fn = build_call_expr (fn, 4, dest, src, len, size);
11411 if (TREE_CODE (fn) == CALL_EXPR)
11412 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11413 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11414 }
11415 }
11416 return NULL_RTX;
11417 }
11418 }
11419
11420 /* Emit warning if a buffer overflow is detected at compile time. */
11421
11422 static void
11423 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11424 {
11425 int is_strlen = 0;
11426 tree len, size;
11427 location_t locus;
11428
11429 switch (fcode)
11430 {
11431 case BUILT_IN_STRCPY_CHK:
11432 case BUILT_IN_STPCPY_CHK:
11433 /* For __strcat_chk the warning will be emitted only if overflowing
11434 by at least strlen (dest) + 1 bytes. */
11435 case BUILT_IN_STRCAT_CHK:
11436 len = CALL_EXPR_ARG (exp, 1);
11437 size = CALL_EXPR_ARG (exp, 2);
11438 is_strlen = 1;
11439 break;
11440 case BUILT_IN_STRNCAT_CHK:
11441 case BUILT_IN_STRNCPY_CHK:
11442 len = CALL_EXPR_ARG (exp, 2);
11443 size = CALL_EXPR_ARG (exp, 3);
11444 break;
11445 case BUILT_IN_SNPRINTF_CHK:
11446 case BUILT_IN_VSNPRINTF_CHK:
11447 len = CALL_EXPR_ARG (exp, 1);
11448 size = CALL_EXPR_ARG (exp, 3);
11449 break;
11450 default:
11451 gcc_unreachable ();
11452 }
11453
11454 if (!len || !size)
11455 return;
11456
11457 if (! host_integerp (size, 1) || integer_all_onesp (size))
11458 return;
11459
11460 if (is_strlen)
11461 {
11462 len = c_strlen (len, 1);
11463 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11464 return;
11465 }
11466 else if (fcode == BUILT_IN_STRNCAT_CHK)
11467 {
11468 tree src = CALL_EXPR_ARG (exp, 1);
11469 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11470 return;
11471 src = c_strlen (src, 1);
11472 if (! src || ! host_integerp (src, 1))
11473 {
11474 locus = EXPR_LOCATION (exp);
11475 warning (0, "%Hcall to %D might overflow destination buffer",
11476 &locus, get_callee_fndecl (exp));
11477 return;
11478 }
11479 else if (tree_int_cst_lt (src, size))
11480 return;
11481 }
11482 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11483 return;
11484
11485 locus = EXPR_LOCATION (exp);
11486 warning (0, "%Hcall to %D will always overflow destination buffer",
11487 &locus, get_callee_fndecl (exp));
11488 }
11489
11490 /* Emit warning if a buffer overflow is detected at compile time
11491 in __sprintf_chk/__vsprintf_chk calls. */
11492
11493 static void
11494 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11495 {
11496 tree dest, size, len, fmt, flag;
11497 const char *fmt_str;
11498 int nargs = call_expr_nargs (exp);
11499
11500 /* Verify the required arguments in the original call. */
11501
11502 if (nargs < 4)
11503 return;
11504 dest = CALL_EXPR_ARG (exp, 0);
11505 flag = CALL_EXPR_ARG (exp, 1);
11506 size = CALL_EXPR_ARG (exp, 2);
11507 fmt = CALL_EXPR_ARG (exp, 3);
11508
11509 if (! host_integerp (size, 1) || integer_all_onesp (size))
11510 return;
11511
11512 /* Check whether the format is a literal string constant. */
11513 fmt_str = c_getstr (fmt);
11514 if (fmt_str == NULL)
11515 return;
11516
11517 if (!init_target_chars ())
11518 return;
11519
11520 /* If the format doesn't contain % args or %%, we know its size. */
11521 if (strchr (fmt_str, target_percent) == 0)
11522 len = build_int_cstu (size_type_node, strlen (fmt_str));
11523 /* If the format is "%s" and first ... argument is a string literal,
11524 we know it too. */
11525 else if (fcode == BUILT_IN_SPRINTF_CHK
11526 && strcmp (fmt_str, target_percent_s) == 0)
11527 {
11528 tree arg;
11529
11530 if (nargs < 5)
11531 return;
11532 arg = CALL_EXPR_ARG (exp, 4);
11533 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11534 return;
11535
11536 len = c_strlen (arg, 1);
11537 if (!len || ! host_integerp (len, 1))
11538 return;
11539 }
11540 else
11541 return;
11542
11543 if (! tree_int_cst_lt (len, size))
11544 {
11545 location_t locus = EXPR_LOCATION (exp);
11546 warning (0, "%Hcall to %D will always overflow destination buffer",
11547 &locus, get_callee_fndecl (exp));
11548 }
11549 }
11550
11551 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11552 if possible. */
11553
11554 tree
11555 fold_builtin_object_size (tree ptr, tree ost)
11556 {
11557 tree ret = NULL_TREE;
11558 int object_size_type;
11559
11560 if (!validate_arg (ptr, POINTER_TYPE)
11561 || !validate_arg (ost, INTEGER_TYPE))
11562 return NULL_TREE;
11563
11564 STRIP_NOPS (ost);
11565
11566 if (TREE_CODE (ost) != INTEGER_CST
11567 || tree_int_cst_sgn (ost) < 0
11568 || compare_tree_int (ost, 3) > 0)
11569 return NULL_TREE;
11570
11571 object_size_type = tree_low_cst (ost, 0);
11572
11573 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11574 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11575 and (size_t) 0 for types 2 and 3. */
11576 if (TREE_SIDE_EFFECTS (ptr))
11577 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11578
11579 if (TREE_CODE (ptr) == ADDR_EXPR)
11580 ret = build_int_cstu (size_type_node,
11581 compute_builtin_object_size (ptr, object_size_type));
11582
11583 else if (TREE_CODE (ptr) == SSA_NAME)
11584 {
11585 unsigned HOST_WIDE_INT bytes;
11586
11587 /* If object size is not known yet, delay folding until
11588 later. Maybe subsequent passes will help determining
11589 it. */
11590 bytes = compute_builtin_object_size (ptr, object_size_type);
11591 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11592 ? -1 : 0))
11593 ret = build_int_cstu (size_type_node, bytes);
11594 }
11595
11596 if (ret)
11597 {
11598 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11599 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11600 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11601 ret = NULL_TREE;
11602 }
11603
11604 return ret;
11605 }
11606
11607 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11608 DEST, SRC, LEN, and SIZE are the arguments to the call.
11609 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11610 code of the builtin. If MAXLEN is not NULL, it is maximum length
11611 passed as third argument. */
11612
11613 tree
11614 fold_builtin_memory_chk (tree fndecl,
11615 tree dest, tree src, tree len, tree size,
11616 tree maxlen, bool ignore,
11617 enum built_in_function fcode)
11618 {
11619 tree fn;
11620
11621 if (!validate_arg (dest, POINTER_TYPE)
11622 || !validate_arg (src,
11623 (fcode == BUILT_IN_MEMSET_CHK
11624 ? INTEGER_TYPE : POINTER_TYPE))
11625 || !validate_arg (len, INTEGER_TYPE)
11626 || !validate_arg (size, INTEGER_TYPE))
11627 return NULL_TREE;
11628
11629 /* If SRC and DEST are the same (and not volatile), return DEST
11630 (resp. DEST+LEN for __mempcpy_chk). */
11631 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11632 {
11633 if (fcode != BUILT_IN_MEMPCPY_CHK)
11634 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11635 else
11636 {
11637 tree temp = fold_convert (TREE_TYPE (dest), len);
11638 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11639 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11640 }
11641 }
11642
11643 if (! host_integerp (size, 1))
11644 return NULL_TREE;
11645
11646 if (! integer_all_onesp (size))
11647 {
11648 if (! host_integerp (len, 1))
11649 {
11650 /* If LEN is not constant, try MAXLEN too.
11651 For MAXLEN only allow optimizing into non-_ocs function
11652 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11653 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11654 {
11655 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11656 {
11657 /* (void) __mempcpy_chk () can be optimized into
11658 (void) __memcpy_chk (). */
11659 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11660 if (!fn)
11661 return NULL_TREE;
11662
11663 return build_call_expr (fn, 4, dest, src, len, size);
11664 }
11665 return NULL_TREE;
11666 }
11667 }
11668 else
11669 maxlen = len;
11670
11671 if (tree_int_cst_lt (size, maxlen))
11672 return NULL_TREE;
11673 }
11674
11675 fn = NULL_TREE;
11676 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11677 mem{cpy,pcpy,move,set} is available. */
11678 switch (fcode)
11679 {
11680 case BUILT_IN_MEMCPY_CHK:
11681 fn = built_in_decls[BUILT_IN_MEMCPY];
11682 break;
11683 case BUILT_IN_MEMPCPY_CHK:
11684 fn = built_in_decls[BUILT_IN_MEMPCPY];
11685 break;
11686 case BUILT_IN_MEMMOVE_CHK:
11687 fn = built_in_decls[BUILT_IN_MEMMOVE];
11688 break;
11689 case BUILT_IN_MEMSET_CHK:
11690 fn = built_in_decls[BUILT_IN_MEMSET];
11691 break;
11692 default:
11693 break;
11694 }
11695
11696 if (!fn)
11697 return NULL_TREE;
11698
11699 return build_call_expr (fn, 3, dest, src, len);
11700 }
11701
11702 /* Fold a call to the __st[rp]cpy_chk builtin.
11703 DEST, SRC, and SIZE are the arguments to the call.
11704 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11705 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11706 strings passed as second argument. */
11707
11708 tree
11709 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11710 tree maxlen, bool ignore,
11711 enum built_in_function fcode)
11712 {
11713 tree len, fn;
11714
11715 if (!validate_arg (dest, POINTER_TYPE)
11716 || !validate_arg (src, POINTER_TYPE)
11717 || !validate_arg (size, INTEGER_TYPE))
11718 return NULL_TREE;
11719
11720 /* If SRC and DEST are the same (and not volatile), return DEST. */
11721 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11722 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11723
11724 if (! host_integerp (size, 1))
11725 return NULL_TREE;
11726
11727 if (! integer_all_onesp (size))
11728 {
11729 len = c_strlen (src, 1);
11730 if (! len || ! host_integerp (len, 1))
11731 {
11732 /* If LEN is not constant, try MAXLEN too.
11733 For MAXLEN only allow optimizing into non-_ocs function
11734 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11735 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11736 {
11737 if (fcode == BUILT_IN_STPCPY_CHK)
11738 {
11739 if (! ignore)
11740 return NULL_TREE;
11741
11742 /* If return value of __stpcpy_chk is ignored,
11743 optimize into __strcpy_chk. */
11744 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11745 if (!fn)
11746 return NULL_TREE;
11747
11748 return build_call_expr (fn, 3, dest, src, size);
11749 }
11750
11751 if (! len || TREE_SIDE_EFFECTS (len))
11752 return NULL_TREE;
11753
11754 /* If c_strlen returned something, but not a constant,
11755 transform __strcpy_chk into __memcpy_chk. */
11756 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11757 if (!fn)
11758 return NULL_TREE;
11759
11760 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11761 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11762 build_call_expr (fn, 4,
11763 dest, src, len, size));
11764 }
11765 }
11766 else
11767 maxlen = len;
11768
11769 if (! tree_int_cst_lt (maxlen, size))
11770 return NULL_TREE;
11771 }
11772
11773 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11774 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11775 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11776 if (!fn)
11777 return NULL_TREE;
11778
11779 return build_call_expr (fn, 2, dest, src);
11780 }
11781
11782 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11783 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11784 length passed as third argument. */
11785
11786 tree
11787 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11788 tree maxlen)
11789 {
11790 tree fn;
11791
11792 if (!validate_arg (dest, POINTER_TYPE)
11793 || !validate_arg (src, POINTER_TYPE)
11794 || !validate_arg (len, INTEGER_TYPE)
11795 || !validate_arg (size, INTEGER_TYPE))
11796 return NULL_TREE;
11797
11798 if (! host_integerp (size, 1))
11799 return NULL_TREE;
11800
11801 if (! integer_all_onesp (size))
11802 {
11803 if (! host_integerp (len, 1))
11804 {
11805 /* If LEN is not constant, try MAXLEN too.
11806 For MAXLEN only allow optimizing into non-_ocs function
11807 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11808 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11809 return NULL_TREE;
11810 }
11811 else
11812 maxlen = len;
11813
11814 if (tree_int_cst_lt (size, maxlen))
11815 return NULL_TREE;
11816 }
11817
11818 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11819 fn = built_in_decls[BUILT_IN_STRNCPY];
11820 if (!fn)
11821 return NULL_TREE;
11822
11823 return build_call_expr (fn, 3, dest, src, len);
11824 }
11825
11826 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11827 are the arguments to the call. */
11828
11829 static tree
11830 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11831 {
11832 tree fn;
11833 const char *p;
11834
11835 if (!validate_arg (dest, POINTER_TYPE)
11836 || !validate_arg (src, POINTER_TYPE)
11837 || !validate_arg (size, INTEGER_TYPE))
11838 return NULL_TREE;
11839
11840 p = c_getstr (src);
11841 /* If the SRC parameter is "", return DEST. */
11842 if (p && *p == '\0')
11843 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11844
11845 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11846 return NULL_TREE;
11847
11848 /* If __builtin_strcat_chk is used, assume strcat is available. */
11849 fn = built_in_decls[BUILT_IN_STRCAT];
11850 if (!fn)
11851 return NULL_TREE;
11852
11853 return build_call_expr (fn, 2, dest, src);
11854 }
11855
11856 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11857 LEN, and SIZE. */
11858
11859 static tree
11860 fold_builtin_strncat_chk (tree fndecl,
11861 tree dest, tree src, tree len, tree size)
11862 {
11863 tree fn;
11864 const char *p;
11865
11866 if (!validate_arg (dest, POINTER_TYPE)
11867 || !validate_arg (src, POINTER_TYPE)
11868 || !validate_arg (size, INTEGER_TYPE)
11869 || !validate_arg (size, INTEGER_TYPE))
11870 return NULL_TREE;
11871
11872 p = c_getstr (src);
11873 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11874 if (p && *p == '\0')
11875 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11876 else if (integer_zerop (len))
11877 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11878
11879 if (! host_integerp (size, 1))
11880 return NULL_TREE;
11881
11882 if (! integer_all_onesp (size))
11883 {
11884 tree src_len = c_strlen (src, 1);
11885 if (src_len
11886 && host_integerp (src_len, 1)
11887 && host_integerp (len, 1)
11888 && ! tree_int_cst_lt (len, src_len))
11889 {
11890 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11891 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11892 if (!fn)
11893 return NULL_TREE;
11894
11895 return build_call_expr (fn, 3, dest, src, size);
11896 }
11897 return NULL_TREE;
11898 }
11899
11900 /* If __builtin_strncat_chk is used, assume strncat is available. */
11901 fn = built_in_decls[BUILT_IN_STRNCAT];
11902 if (!fn)
11903 return NULL_TREE;
11904
11905 return build_call_expr (fn, 3, dest, src, len);
11906 }
11907
11908 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11909 a normal call should be emitted rather than expanding the function
11910 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11911
11912 static tree
11913 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11914 {
11915 tree dest, size, len, fn, fmt, flag;
11916 const char *fmt_str;
11917 int nargs = call_expr_nargs (exp);
11918
11919 /* Verify the required arguments in the original call. */
11920 if (nargs < 4)
11921 return NULL_TREE;
11922 dest = CALL_EXPR_ARG (exp, 0);
11923 if (!validate_arg (dest, POINTER_TYPE))
11924 return NULL_TREE;
11925 flag = CALL_EXPR_ARG (exp, 1);
11926 if (!validate_arg (flag, INTEGER_TYPE))
11927 return NULL_TREE;
11928 size = CALL_EXPR_ARG (exp, 2);
11929 if (!validate_arg (size, INTEGER_TYPE))
11930 return NULL_TREE;
11931 fmt = CALL_EXPR_ARG (exp, 3);
11932 if (!validate_arg (fmt, POINTER_TYPE))
11933 return NULL_TREE;
11934
11935 if (! host_integerp (size, 1))
11936 return NULL_TREE;
11937
11938 len = NULL_TREE;
11939
11940 if (!init_target_chars ())
11941 return NULL_TREE;
11942
11943 /* Check whether the format is a literal string constant. */
11944 fmt_str = c_getstr (fmt);
11945 if (fmt_str != NULL)
11946 {
11947 /* If the format doesn't contain % args or %%, we know the size. */
11948 if (strchr (fmt_str, target_percent) == 0)
11949 {
11950 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11951 len = build_int_cstu (size_type_node, strlen (fmt_str));
11952 }
11953 /* If the format is "%s" and first ... argument is a string literal,
11954 we know the size too. */
11955 else if (fcode == BUILT_IN_SPRINTF_CHK
11956 && strcmp (fmt_str, target_percent_s) == 0)
11957 {
11958 tree arg;
11959
11960 if (nargs == 5)
11961 {
11962 arg = CALL_EXPR_ARG (exp, 4);
11963 if (validate_arg (arg, POINTER_TYPE))
11964 {
11965 len = c_strlen (arg, 1);
11966 if (! len || ! host_integerp (len, 1))
11967 len = NULL_TREE;
11968 }
11969 }
11970 }
11971 }
11972
11973 if (! integer_all_onesp (size))
11974 {
11975 if (! len || ! tree_int_cst_lt (len, size))
11976 return NULL_TREE;
11977 }
11978
11979 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11980 or if format doesn't contain % chars or is "%s". */
11981 if (! integer_zerop (flag))
11982 {
11983 if (fmt_str == NULL)
11984 return NULL_TREE;
11985 if (strchr (fmt_str, target_percent) != NULL
11986 && strcmp (fmt_str, target_percent_s))
11987 return NULL_TREE;
11988 }
11989
11990 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11991 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11992 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11993 if (!fn)
11994 return NULL_TREE;
11995
11996 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11997 }
11998
11999 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12000 a normal call should be emitted rather than expanding the function
12001 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12002 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12003 passed as second argument. */
12004
12005 tree
12006 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12007 enum built_in_function fcode)
12008 {
12009 tree dest, size, len, fn, fmt, flag;
12010 const char *fmt_str;
12011
12012 /* Verify the required arguments in the original call. */
12013 if (call_expr_nargs (exp) < 5)
12014 return NULL_TREE;
12015 dest = CALL_EXPR_ARG (exp, 0);
12016 if (!validate_arg (dest, POINTER_TYPE))
12017 return NULL_TREE;
12018 len = CALL_EXPR_ARG (exp, 1);
12019 if (!validate_arg (len, INTEGER_TYPE))
12020 return NULL_TREE;
12021 flag = CALL_EXPR_ARG (exp, 2);
12022 if (!validate_arg (flag, INTEGER_TYPE))
12023 return NULL_TREE;
12024 size = CALL_EXPR_ARG (exp, 3);
12025 if (!validate_arg (size, INTEGER_TYPE))
12026 return NULL_TREE;
12027 fmt = CALL_EXPR_ARG (exp, 4);
12028 if (!validate_arg (fmt, POINTER_TYPE))
12029 return NULL_TREE;
12030
12031 if (! host_integerp (size, 1))
12032 return NULL_TREE;
12033
12034 if (! integer_all_onesp (size))
12035 {
12036 if (! host_integerp (len, 1))
12037 {
12038 /* If LEN is not constant, try MAXLEN too.
12039 For MAXLEN only allow optimizing into non-_ocs function
12040 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12041 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12042 return NULL_TREE;
12043 }
12044 else
12045 maxlen = len;
12046
12047 if (tree_int_cst_lt (size, maxlen))
12048 return NULL_TREE;
12049 }
12050
12051 if (!init_target_chars ())
12052 return NULL_TREE;
12053
12054 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12055 or if format doesn't contain % chars or is "%s". */
12056 if (! integer_zerop (flag))
12057 {
12058 fmt_str = c_getstr (fmt);
12059 if (fmt_str == NULL)
12060 return NULL_TREE;
12061 if (strchr (fmt_str, target_percent) != NULL
12062 && strcmp (fmt_str, target_percent_s))
12063 return NULL_TREE;
12064 }
12065
12066 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12067 available. */
12068 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12069 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12070 if (!fn)
12071 return NULL_TREE;
12072
12073 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12074 }
12075
12076 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12077 FMT and ARG are the arguments to the call; we don't fold cases with
12078 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12079
12080 Return NULL_TREE if no simplification was possible, otherwise return the
12081 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12082 code of the function to be simplified. */
12083
12084 static tree
12085 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12086 enum built_in_function fcode)
12087 {
12088 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12089 const char *fmt_str = NULL;
12090
12091 /* If the return value is used, don't do the transformation. */
12092 if (! ignore)
12093 return NULL_TREE;
12094
12095 /* Verify the required arguments in the original call. */
12096 if (!validate_arg (fmt, POINTER_TYPE))
12097 return NULL_TREE;
12098
12099 /* Check whether the format is a literal string constant. */
12100 fmt_str = c_getstr (fmt);
12101 if (fmt_str == NULL)
12102 return NULL_TREE;
12103
12104 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12105 {
12106 /* If we're using an unlocked function, assume the other
12107 unlocked functions exist explicitly. */
12108 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12109 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12110 }
12111 else
12112 {
12113 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12114 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12115 }
12116
12117 if (!init_target_chars ())
12118 return NULL_TREE;
12119
12120 if (strcmp (fmt_str, target_percent_s) == 0
12121 || strchr (fmt_str, target_percent) == NULL)
12122 {
12123 const char *str;
12124
12125 if (strcmp (fmt_str, target_percent_s) == 0)
12126 {
12127 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12128 return NULL_TREE;
12129
12130 if (!arg || !validate_arg (arg, POINTER_TYPE))
12131 return NULL_TREE;
12132
12133 str = c_getstr (arg);
12134 if (str == NULL)
12135 return NULL_TREE;
12136 }
12137 else
12138 {
12139 /* The format specifier doesn't contain any '%' characters. */
12140 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12141 && arg)
12142 return NULL_TREE;
12143 str = fmt_str;
12144 }
12145
12146 /* If the string was "", printf does nothing. */
12147 if (str[0] == '\0')
12148 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12149
12150 /* If the string has length of 1, call putchar. */
12151 if (str[1] == '\0')
12152 {
12153 /* Given printf("c"), (where c is any one character,)
12154 convert "c"[0] to an int and pass that to the replacement
12155 function. */
12156 newarg = build_int_cst (NULL_TREE, str[0]);
12157 if (fn_putchar)
12158 call = build_call_expr (fn_putchar, 1, newarg);
12159 }
12160 else
12161 {
12162 /* If the string was "string\n", call puts("string"). */
12163 size_t len = strlen (str);
12164 if ((unsigned char)str[len - 1] == target_newline)
12165 {
12166 /* Create a NUL-terminated string that's one char shorter
12167 than the original, stripping off the trailing '\n'. */
12168 char *newstr = alloca (len);
12169 memcpy (newstr, str, len - 1);
12170 newstr[len - 1] = 0;
12171
12172 newarg = build_string_literal (len, newstr);
12173 if (fn_puts)
12174 call = build_call_expr (fn_puts, 1, newarg);
12175 }
12176 else
12177 /* We'd like to arrange to call fputs(string,stdout) here,
12178 but we need stdout and don't have a way to get it yet. */
12179 return NULL_TREE;
12180 }
12181 }
12182
12183 /* The other optimizations can be done only on the non-va_list variants. */
12184 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12185 return NULL_TREE;
12186
12187 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12188 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12189 {
12190 if (!arg || !validate_arg (arg, POINTER_TYPE))
12191 return NULL_TREE;
12192 if (fn_puts)
12193 call = build_call_expr (fn_puts, 1, arg);
12194 }
12195
12196 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12197 else if (strcmp (fmt_str, target_percent_c) == 0)
12198 {
12199 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12200 return NULL_TREE;
12201 if (fn_putchar)
12202 call = build_call_expr (fn_putchar, 1, arg);
12203 }
12204
12205 if (!call)
12206 return NULL_TREE;
12207
12208 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12209 }
12210
12211 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12212 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12213 more than 3 arguments, and ARG may be null in the 2-argument case.
12214
12215 Return NULL_TREE if no simplification was possible, otherwise return the
12216 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12217 code of the function to be simplified. */
12218
12219 static tree
12220 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12221 enum built_in_function fcode)
12222 {
12223 tree fn_fputc, fn_fputs, call = NULL_TREE;
12224 const char *fmt_str = NULL;
12225
12226 /* If the return value is used, don't do the transformation. */
12227 if (! ignore)
12228 return NULL_TREE;
12229
12230 /* Verify the required arguments in the original call. */
12231 if (!validate_arg (fp, POINTER_TYPE))
12232 return NULL_TREE;
12233 if (!validate_arg (fmt, POINTER_TYPE))
12234 return NULL_TREE;
12235
12236 /* Check whether the format is a literal string constant. */
12237 fmt_str = c_getstr (fmt);
12238 if (fmt_str == NULL)
12239 return NULL_TREE;
12240
12241 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12242 {
12243 /* If we're using an unlocked function, assume the other
12244 unlocked functions exist explicitly. */
12245 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12246 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12247 }
12248 else
12249 {
12250 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12251 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12252 }
12253
12254 if (!init_target_chars ())
12255 return NULL_TREE;
12256
12257 /* If the format doesn't contain % args or %%, use strcpy. */
12258 if (strchr (fmt_str, target_percent) == NULL)
12259 {
12260 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12261 && arg)
12262 return NULL_TREE;
12263
12264 /* If the format specifier was "", fprintf does nothing. */
12265 if (fmt_str[0] == '\0')
12266 {
12267 /* If FP has side-effects, just wait until gimplification is
12268 done. */
12269 if (TREE_SIDE_EFFECTS (fp))
12270 return NULL_TREE;
12271
12272 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12273 }
12274
12275 /* When "string" doesn't contain %, replace all cases of
12276 fprintf (fp, string) with fputs (string, fp). The fputs
12277 builtin will take care of special cases like length == 1. */
12278 if (fn_fputs)
12279 call = build_call_expr (fn_fputs, 2, fmt, fp);
12280 }
12281
12282 /* The other optimizations can be done only on the non-va_list variants. */
12283 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12284 return NULL_TREE;
12285
12286 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12287 else if (strcmp (fmt_str, target_percent_s) == 0)
12288 {
12289 if (!arg || !validate_arg (arg, POINTER_TYPE))
12290 return NULL_TREE;
12291 if (fn_fputs)
12292 call = build_call_expr (fn_fputs, 2, arg, fp);
12293 }
12294
12295 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12296 else if (strcmp (fmt_str, target_percent_c) == 0)
12297 {
12298 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12299 return NULL_TREE;
12300 if (fn_fputc)
12301 call = build_call_expr (fn_fputc, 2, arg, fp);
12302 }
12303
12304 if (!call)
12305 return NULL_TREE;
12306 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12307 }
12308
12309 /* Initialize format string characters in the target charset. */
12310
12311 static bool
12312 init_target_chars (void)
12313 {
12314 static bool init;
12315 if (!init)
12316 {
12317 target_newline = lang_hooks.to_target_charset ('\n');
12318 target_percent = lang_hooks.to_target_charset ('%');
12319 target_c = lang_hooks.to_target_charset ('c');
12320 target_s = lang_hooks.to_target_charset ('s');
12321 if (target_newline == 0 || target_percent == 0 || target_c == 0
12322 || target_s == 0)
12323 return false;
12324
12325 target_percent_c[0] = target_percent;
12326 target_percent_c[1] = target_c;
12327 target_percent_c[2] = '\0';
12328
12329 target_percent_s[0] = target_percent;
12330 target_percent_s[1] = target_s;
12331 target_percent_s[2] = '\0';
12332
12333 target_percent_s_newline[0] = target_percent;
12334 target_percent_s_newline[1] = target_s;
12335 target_percent_s_newline[2] = target_newline;
12336 target_percent_s_newline[3] = '\0';
12337
12338 init = true;
12339 }
12340 return true;
12341 }
12342
12343 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12344 and no overflow/underflow occurred. INEXACT is true if M was not
12345 exactly calculated. TYPE is the tree type for the result. This
12346 function assumes that you cleared the MPFR flags and then
12347 calculated M to see if anything subsequently set a flag prior to
12348 entering this function. Return NULL_TREE if any checks fail. */
12349
12350 static tree
12351 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12352 {
12353 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12354 overflow/underflow occurred. If -frounding-math, proceed iff the
12355 result of calling FUNC was exact. */
12356 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12357 && (!flag_rounding_math || !inexact))
12358 {
12359 REAL_VALUE_TYPE rr;
12360
12361 real_from_mpfr (&rr, m, type, GMP_RNDN);
12362 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12363 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12364 but the mpft_t is not, then we underflowed in the
12365 conversion. */
12366 if (!real_isnan (&rr) && !real_isinf (&rr)
12367 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12368 {
12369 REAL_VALUE_TYPE rmode;
12370
12371 real_convert (&rmode, TYPE_MODE (type), &rr);
12372 /* Proceed iff the specified mode can hold the value. */
12373 if (real_identical (&rmode, &rr))
12374 return build_real (type, rmode);
12375 }
12376 }
12377 return NULL_TREE;
12378 }
12379
12380 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12381 FUNC on it and return the resulting value as a tree with type TYPE.
12382 If MIN and/or MAX are not NULL, then the supplied ARG must be
12383 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12384 acceptable values, otherwise they are not. The mpfr precision is
12385 set to the precision of TYPE. We assume that function FUNC returns
12386 zero if the result could be calculated exactly within the requested
12387 precision. */
12388
12389 static tree
12390 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12391 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12392 bool inclusive)
12393 {
12394 tree result = NULL_TREE;
12395
12396 STRIP_NOPS (arg);
12397
12398 /* To proceed, MPFR must exactly represent the target floating point
12399 format, which only happens when the target base equals two. */
12400 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12401 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12402 {
12403 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12404
12405 if (!real_isnan (ra) && !real_isinf (ra)
12406 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12407 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12408 {
12409 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12410 int inexact;
12411 mpfr_t m;
12412
12413 mpfr_init2 (m, prec);
12414 mpfr_from_real (m, ra, GMP_RNDN);
12415 mpfr_clear_flags ();
12416 inexact = func (m, m, GMP_RNDN);
12417 result = do_mpfr_ckconv (m, type, inexact);
12418 mpfr_clear (m);
12419 }
12420 }
12421
12422 return result;
12423 }
12424
12425 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12426 FUNC on it and return the resulting value as a tree with type TYPE.
12427 The mpfr precision is set to the precision of TYPE. We assume that
12428 function FUNC returns zero if the result could be calculated
12429 exactly within the requested precision. */
12430
12431 static tree
12432 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12433 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12434 {
12435 tree result = NULL_TREE;
12436
12437 STRIP_NOPS (arg1);
12438 STRIP_NOPS (arg2);
12439
12440 /* To proceed, MPFR must exactly represent the target floating point
12441 format, which only happens when the target base equals two. */
12442 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12443 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12444 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12445 {
12446 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12447 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12448
12449 if (!real_isnan (ra1) && !real_isinf (ra1)
12450 && !real_isnan (ra2) && !real_isinf (ra2))
12451 {
12452 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12453 int inexact;
12454 mpfr_t m1, m2;
12455
12456 mpfr_inits2 (prec, m1, m2, NULL);
12457 mpfr_from_real (m1, ra1, GMP_RNDN);
12458 mpfr_from_real (m2, ra2, GMP_RNDN);
12459 mpfr_clear_flags ();
12460 inexact = func (m1, m1, m2, GMP_RNDN);
12461 result = do_mpfr_ckconv (m1, type, inexact);
12462 mpfr_clears (m1, m2, NULL);
12463 }
12464 }
12465
12466 return result;
12467 }
12468
12469 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12470 FUNC on it and return the resulting value as a tree with type TYPE.
12471 The mpfr precision is set to the precision of TYPE. We assume that
12472 function FUNC returns zero if the result could be calculated
12473 exactly within the requested precision. */
12474
12475 static tree
12476 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12477 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12478 {
12479 tree result = NULL_TREE;
12480
12481 STRIP_NOPS (arg1);
12482 STRIP_NOPS (arg2);
12483 STRIP_NOPS (arg3);
12484
12485 /* To proceed, MPFR must exactly represent the target floating point
12486 format, which only happens when the target base equals two. */
12487 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12488 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12489 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12490 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12491 {
12492 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12493 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12494 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12495
12496 if (!real_isnan (ra1) && !real_isinf (ra1)
12497 && !real_isnan (ra2) && !real_isinf (ra2)
12498 && !real_isnan (ra3) && !real_isinf (ra3))
12499 {
12500 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12501 int inexact;
12502 mpfr_t m1, m2, m3;
12503
12504 mpfr_inits2 (prec, m1, m2, m3, NULL);
12505 mpfr_from_real (m1, ra1, GMP_RNDN);
12506 mpfr_from_real (m2, ra2, GMP_RNDN);
12507 mpfr_from_real (m3, ra3, GMP_RNDN);
12508 mpfr_clear_flags ();
12509 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12510 result = do_mpfr_ckconv (m1, type, inexact);
12511 mpfr_clears (m1, m2, m3, NULL);
12512 }
12513 }
12514
12515 return result;
12516 }
12517
12518 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12519 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12520 If ARG_SINP and ARG_COSP are NULL then the result is returned
12521 as a complex value.
12522 The type is taken from the type of ARG and is used for setting the
12523 precision of the calculation and results. */
12524
12525 static tree
12526 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12527 {
12528 tree const type = TREE_TYPE (arg);
12529 tree result = NULL_TREE;
12530
12531 STRIP_NOPS (arg);
12532
12533 /* To proceed, MPFR must exactly represent the target floating point
12534 format, which only happens when the target base equals two. */
12535 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12536 && TREE_CODE (arg) == REAL_CST
12537 && !TREE_OVERFLOW (arg))
12538 {
12539 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12540
12541 if (!real_isnan (ra) && !real_isinf (ra))
12542 {
12543 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12544 tree result_s, result_c;
12545 int inexact;
12546 mpfr_t m, ms, mc;
12547
12548 mpfr_inits2 (prec, m, ms, mc, NULL);
12549 mpfr_from_real (m, ra, GMP_RNDN);
12550 mpfr_clear_flags ();
12551 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12552 result_s = do_mpfr_ckconv (ms, type, inexact);
12553 result_c = do_mpfr_ckconv (mc, type, inexact);
12554 mpfr_clears (m, ms, mc, NULL);
12555 if (result_s && result_c)
12556 {
12557 /* If we are to return in a complex value do so. */
12558 if (!arg_sinp && !arg_cosp)
12559 return build_complex (build_complex_type (type),
12560 result_c, result_s);
12561
12562 /* Dereference the sin/cos pointer arguments. */
12563 arg_sinp = build_fold_indirect_ref (arg_sinp);
12564 arg_cosp = build_fold_indirect_ref (arg_cosp);
12565 /* Proceed if valid pointer type were passed in. */
12566 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12567 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12568 {
12569 /* Set the values. */
12570 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12571 result_s);
12572 TREE_SIDE_EFFECTS (result_s) = 1;
12573 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12574 result_c);
12575 TREE_SIDE_EFFECTS (result_c) = 1;
12576 /* Combine the assignments into a compound expr. */
12577 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12578 result_s, result_c));
12579 }
12580 }
12581 }
12582 }
12583 return result;
12584 }
12585
12586 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12587 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12588 two-argument mpfr order N Bessel function FUNC on them and return
12589 the resulting value as a tree with type TYPE. The mpfr precision
12590 is set to the precision of TYPE. We assume that function FUNC
12591 returns zero if the result could be calculated exactly within the
12592 requested precision. */
12593 static tree
12594 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12595 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12596 const REAL_VALUE_TYPE *min, bool inclusive)
12597 {
12598 tree result = NULL_TREE;
12599
12600 STRIP_NOPS (arg1);
12601 STRIP_NOPS (arg2);
12602
12603 /* To proceed, MPFR must exactly represent the target floating point
12604 format, which only happens when the target base equals two. */
12605 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12606 && host_integerp (arg1, 0)
12607 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12608 {
12609 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12610 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12611
12612 if (n == (long)n
12613 && !real_isnan (ra) && !real_isinf (ra)
12614 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12615 {
12616 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12617 int inexact;
12618 mpfr_t m;
12619
12620 mpfr_init2 (m, prec);
12621 mpfr_from_real (m, ra, GMP_RNDN);
12622 mpfr_clear_flags ();
12623 inexact = func (m, n, m, GMP_RNDN);
12624 result = do_mpfr_ckconv (m, type, inexact);
12625 mpfr_clear (m);
12626 }
12627 }
12628
12629 return result;
12630 }
12631
12632 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12633 the pointer *(ARG_QUO) and return the result. The type is taken
12634 from the type of ARG0 and is used for setting the precision of the
12635 calculation and results. */
12636
12637 static tree
12638 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12639 {
12640 tree const type = TREE_TYPE (arg0);
12641 tree result = NULL_TREE;
12642
12643 STRIP_NOPS (arg0);
12644 STRIP_NOPS (arg1);
12645
12646 /* To proceed, MPFR must exactly represent the target floating point
12647 format, which only happens when the target base equals two. */
12648 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12649 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12650 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12651 {
12652 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12653 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12654
12655 if (!real_isnan (ra0) && !real_isinf (ra0)
12656 && !real_isnan (ra1) && !real_isinf (ra1))
12657 {
12658 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12659 tree result_rem;
12660 long integer_quo;
12661 mpfr_t m0, m1;
12662
12663 mpfr_inits2 (prec, m0, m1, NULL);
12664 mpfr_from_real (m0, ra0, GMP_RNDN);
12665 mpfr_from_real (m1, ra1, GMP_RNDN);
12666 mpfr_clear_flags ();
12667 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12668 /* Remquo is independent of the rounding mode, so pass
12669 inexact=0 to do_mpfr_ckconv(). */
12670 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12671 mpfr_clears (m0, m1, NULL);
12672 if (result_rem)
12673 {
12674 /* MPFR calculates quo in the host's long so it may
12675 return more bits in quo than the target int can hold
12676 if sizeof(host long) > sizeof(target int). This can
12677 happen even for native compilers in LP64 mode. In
12678 these cases, modulo the quo value with the largest
12679 number that the target int can hold while leaving one
12680 bit for the sign. */
12681 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12682 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12683
12684 /* Dereference the quo pointer argument. */
12685 arg_quo = build_fold_indirect_ref (arg_quo);
12686 /* Proceed iff a valid pointer type was passed in. */
12687 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12688 {
12689 /* Set the value. */
12690 tree result_quo = fold_build2 (MODIFY_EXPR,
12691 TREE_TYPE (arg_quo), arg_quo,
12692 build_int_cst (NULL, integer_quo));
12693 TREE_SIDE_EFFECTS (result_quo) = 1;
12694 /* Combine the quo assignment with the rem. */
12695 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12696 result_quo, result_rem));
12697 }
12698 }
12699 }
12700 }
12701 return result;
12702 }
12703
12704 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12705 resulting value as a tree with type TYPE. The mpfr precision is
12706 set to the precision of TYPE. We assume that this mpfr function
12707 returns zero if the result could be calculated exactly within the
12708 requested precision. In addition, the integer pointer represented
12709 by ARG_SG will be dereferenced and set to the appropriate signgam
12710 (-1,1) value. */
12711
12712 static tree
12713 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12714 {
12715 tree result = NULL_TREE;
12716
12717 STRIP_NOPS (arg);
12718
12719 /* To proceed, MPFR must exactly represent the target floating point
12720 format, which only happens when the target base equals two. Also
12721 verify ARG is a constant and that ARG_SG is an int pointer. */
12722 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12723 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12724 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12725 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12726 {
12727 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12728
12729 /* In addition to NaN and Inf, the argument cannot be zero or a
12730 negative integer. */
12731 if (!real_isnan (ra) && !real_isinf (ra)
12732 && ra->cl != rvc_zero
12733 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12734 {
12735 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12736 int inexact, sg;
12737 mpfr_t m;
12738 tree result_lg;
12739
12740 mpfr_init2 (m, prec);
12741 mpfr_from_real (m, ra, GMP_RNDN);
12742 mpfr_clear_flags ();
12743 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12744 result_lg = do_mpfr_ckconv (m, type, inexact);
12745 mpfr_clear (m);
12746 if (result_lg)
12747 {
12748 tree result_sg;
12749
12750 /* Dereference the arg_sg pointer argument. */
12751 arg_sg = build_fold_indirect_ref (arg_sg);
12752 /* Assign the signgam value into *arg_sg. */
12753 result_sg = fold_build2 (MODIFY_EXPR,
12754 TREE_TYPE (arg_sg), arg_sg,
12755 build_int_cst (NULL, sg));
12756 TREE_SIDE_EFFECTS (result_sg) = 1;
12757 /* Combine the signgam assignment with the lgamma result. */
12758 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12759 result_sg, result_lg));
12760 }
12761 }
12762 }
12763
12764 return result;
12765 }
12766 #endif