tree-flow.h (tree_ssa_useless_type_conversion_1): Rename to ...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tree-gimple.h"
32 #include "flags.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "predict.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
54
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
65 {
66 #include "builtins.def"
67 };
68 #undef DEF_BUILTIN
69
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
77
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
86 #endif
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
189 enum tree_code);
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
197
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
206
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
220
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 #endif
242
243 /* Return true if NODE should be considered for inline expansion regardless
244 of the optimization level. This means whenever a function is invoked with
245 its "internal" name, which normally contains the prefix "__builtin". */
246
247 static bool called_as_built_in (tree node)
248 {
249 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
255 }
256
257 /* Return the alignment in bits of EXP, a pointer valued expression.
258 But don't return more than MAX_ALIGN no matter what.
259 The alignment returned is, by default, the alignment of the thing that
260 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
261
262 Otherwise, look at the expression to see if we can do better, i.e., if the
263 expression is actually pointing at an object whose alignment is tighter. */
264
265 int
266 get_pointer_alignment (tree exp, unsigned int max_align)
267 {
268 unsigned int align, inner;
269
270 /* We rely on TER to compute accurate alignment information. */
271 if (!(optimize && flag_tree_ter))
272 return 0;
273
274 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 return 0;
276
277 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
278 align = MIN (align, max_align);
279
280 while (1)
281 {
282 switch (TREE_CODE (exp))
283 {
284 case NOP_EXPR:
285 case CONVERT_EXPR:
286 case NON_LVALUE_EXPR:
287 exp = TREE_OPERAND (exp, 0);
288 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
289 return align;
290
291 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
292 align = MIN (inner, max_align);
293 break;
294
295 case POINTER_PLUS_EXPR:
296 /* If sum of pointer + int, restrict our maximum alignment to that
297 imposed by the integer. If not, we can't do any better than
298 ALIGN. */
299 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
300 return align;
301
302 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
303 & (max_align / BITS_PER_UNIT - 1))
304 != 0)
305 max_align >>= 1;
306
307 exp = TREE_OPERAND (exp, 0);
308 break;
309
310 case ADDR_EXPR:
311 /* See what we are pointing at and look at its alignment. */
312 exp = TREE_OPERAND (exp, 0);
313 inner = max_align;
314 if (handled_component_p (exp))
315 {
316 HOST_WIDE_INT bitsize, bitpos;
317 tree offset;
318 enum machine_mode mode;
319 int unsignedp, volatilep;
320
321 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
322 &mode, &unsignedp, &volatilep, true);
323 if (bitpos)
324 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
325 if (offset && TREE_CODE (offset) == PLUS_EXPR
326 && host_integerp (TREE_OPERAND (offset, 1), 1))
327 {
328 /* Any overflow in calculating offset_bits won't change
329 the alignment. */
330 unsigned offset_bits
331 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
332 * BITS_PER_UNIT);
333
334 if (offset_bits)
335 inner = MIN (inner, (offset_bits & -offset_bits));
336 offset = TREE_OPERAND (offset, 0);
337 }
338 if (offset && TREE_CODE (offset) == MULT_EXPR
339 && host_integerp (TREE_OPERAND (offset, 1), 1))
340 {
341 /* Any overflow in calculating offset_factor won't change
342 the alignment. */
343 unsigned offset_factor
344 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
345 * BITS_PER_UNIT);
346
347 if (offset_factor)
348 inner = MIN (inner, (offset_factor & -offset_factor));
349 }
350 else if (offset)
351 inner = MIN (inner, BITS_PER_UNIT);
352 }
353 if (TREE_CODE (exp) == FUNCTION_DECL)
354 align = FUNCTION_BOUNDARY;
355 else if (DECL_P (exp))
356 align = MIN (inner, DECL_ALIGN (exp));
357 #ifdef CONSTANT_ALIGNMENT
358 else if (CONSTANT_CLASS_P (exp))
359 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
360 #endif
361 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
362 || TREE_CODE (exp) == INDIRECT_REF)
363 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
364 else
365 align = MIN (align, inner);
366 return MIN (align, max_align);
367
368 default:
369 return align;
370 }
371 }
372 }
373
374 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
375 way, because it could contain a zero byte in the middle.
376 TREE_STRING_LENGTH is the size of the character array, not the string.
377
378 ONLY_VALUE should be nonzero if the result is not going to be emitted
379 into the instruction stream and zero if it is going to be expanded.
380 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
381 is returned, otherwise NULL, since
382 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
383 evaluate the side-effects.
384
385 The value returned is of type `ssizetype'.
386
387 Unfortunately, string_constant can't access the values of const char
388 arrays with initializers, so neither can we do so here. */
389
390 tree
391 c_strlen (tree src, int only_value)
392 {
393 tree offset_node;
394 HOST_WIDE_INT offset;
395 int max;
396 const char *ptr;
397
398 STRIP_NOPS (src);
399 if (TREE_CODE (src) == COND_EXPR
400 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
401 {
402 tree len1, len2;
403
404 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
405 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
406 if (tree_int_cst_equal (len1, len2))
407 return len1;
408 }
409
410 if (TREE_CODE (src) == COMPOUND_EXPR
411 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
412 return c_strlen (TREE_OPERAND (src, 1), only_value);
413
414 src = string_constant (src, &offset_node);
415 if (src == 0)
416 return NULL_TREE;
417
418 max = TREE_STRING_LENGTH (src) - 1;
419 ptr = TREE_STRING_POINTER (src);
420
421 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
422 {
423 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
424 compute the offset to the following null if we don't know where to
425 start searching for it. */
426 int i;
427
428 for (i = 0; i < max; i++)
429 if (ptr[i] == 0)
430 return NULL_TREE;
431
432 /* We don't know the starting offset, but we do know that the string
433 has no internal zero bytes. We can assume that the offset falls
434 within the bounds of the string; otherwise, the programmer deserves
435 what he gets. Subtract the offset from the length of the string,
436 and return that. This would perhaps not be valid if we were dealing
437 with named arrays in addition to literal string constants. */
438
439 return size_diffop (size_int (max), offset_node);
440 }
441
442 /* We have a known offset into the string. Start searching there for
443 a null character if we can represent it as a single HOST_WIDE_INT. */
444 if (offset_node == 0)
445 offset = 0;
446 else if (! host_integerp (offset_node, 0))
447 offset = -1;
448 else
449 offset = tree_low_cst (offset_node, 0);
450
451 /* If the offset is known to be out of bounds, warn, and call strlen at
452 runtime. */
453 if (offset < 0 || offset > max)
454 {
455 warning (0, "offset outside bounds of constant string");
456 return NULL_TREE;
457 }
458
459 /* Use strlen to search for the first zero byte. Since any strings
460 constructed with build_string will have nulls appended, we win even
461 if we get handed something like (char[4])"abcd".
462
463 Since OFFSET is our starting index into the string, no further
464 calculation is needed. */
465 return ssize_int (strlen (ptr + offset));
466 }
467
468 /* Return a char pointer for a C string if it is a string constant
469 or sum of string constant and integer constant. */
470
471 static const char *
472 c_getstr (tree src)
473 {
474 tree offset_node;
475
476 src = string_constant (src, &offset_node);
477 if (src == 0)
478 return 0;
479
480 if (offset_node == 0)
481 return TREE_STRING_POINTER (src);
482 else if (!host_integerp (offset_node, 1)
483 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
484 return 0;
485
486 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
487 }
488
489 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
490 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491
492 static rtx
493 c_readstr (const char *str, enum machine_mode mode)
494 {
495 HOST_WIDE_INT c[2];
496 HOST_WIDE_INT ch;
497 unsigned int i, j;
498
499 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500
501 c[0] = 0;
502 c[1] = 0;
503 ch = 1;
504 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 {
506 j = i;
507 if (WORDS_BIG_ENDIAN)
508 j = GET_MODE_SIZE (mode) - i - 1;
509 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
510 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
511 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 j *= BITS_PER_UNIT;
513 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514
515 if (ch)
516 ch = (unsigned char) str[i];
517 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 }
519 return immed_double_const (c[0], c[1], mode);
520 }
521
522 /* Cast a target constant CST to target CHAR and if that value fits into
523 host char type, return zero and put that value into variable pointed to by
524 P. */
525
526 static int
527 target_char_cast (tree cst, char *p)
528 {
529 unsigned HOST_WIDE_INT val, hostval;
530
531 if (!host_integerp (cst, 1)
532 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
533 return 1;
534
535 val = tree_low_cst (cst, 1);
536 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
537 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538
539 hostval = val;
540 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
541 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542
543 if (val != hostval)
544 return 1;
545
546 *p = hostval;
547 return 0;
548 }
549
550 /* Similar to save_expr, but assumes that arbitrary code is not executed
551 in between the multiple evaluations. In particular, we assume that a
552 non-addressable local variable will not be modified. */
553
554 static tree
555 builtin_save_expr (tree exp)
556 {
557 if (TREE_ADDRESSABLE (exp) == 0
558 && (TREE_CODE (exp) == PARM_DECL
559 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
560 return exp;
561
562 return save_expr (exp);
563 }
564
565 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
566 times to get the address of either a higher stack frame, or a return
567 address located within it (depending on FNDECL_CODE). */
568
569 static rtx
570 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 {
572 int i;
573
574 #ifdef INITIAL_FRAME_ADDRESS_RTX
575 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
576 #else
577 rtx tem;
578
579 /* For a zero count with __builtin_return_address, we don't care what
580 frame address we return, because target-specific definitions will
581 override us. Therefore frame pointer elimination is OK, and using
582 the soft frame pointer is OK.
583
584 For a nonzero count, or a zero count with __builtin_frame_address,
585 we require a stable offset from the current frame pointer to the
586 previous one, so we must use the hard frame pointer, and
587 we must disable frame pointer elimination. */
588 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
589 tem = frame_pointer_rtx;
590 else
591 {
592 tem = hard_frame_pointer_rtx;
593
594 /* Tell reload not to eliminate the frame pointer. */
595 current_function_accesses_prior_frames = 1;
596 }
597 #endif
598
599 /* Some machines need special handling before we can access
600 arbitrary frames. For example, on the SPARC, we must first flush
601 all register windows to the stack. */
602 #ifdef SETUP_FRAME_ADDRESSES
603 if (count > 0)
604 SETUP_FRAME_ADDRESSES ();
605 #endif
606
607 /* On the SPARC, the return address is not in the frame, it is in a
608 register. There is no way to access it off of the current frame
609 pointer, but it can be accessed off the previous frame pointer by
610 reading the value from the register window save area. */
611 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
612 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
613 count--;
614 #endif
615
616 /* Scan back COUNT frames to the specified frame. */
617 for (i = 0; i < count; i++)
618 {
619 /* Assume the dynamic chain pointer is in the word that the
620 frame address points to, unless otherwise specified. */
621 #ifdef DYNAMIC_CHAIN_ADDRESS
622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 #endif
624 tem = memory_address (Pmode, tem);
625 tem = gen_frame_mem (Pmode, tem);
626 tem = copy_to_reg (tem);
627 }
628
629 /* For __builtin_frame_address, return what we've got. But, on
630 the SPARC for example, we may have to add a bias. */
631 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
632 #ifdef FRAME_ADDR_RTX
633 return FRAME_ADDR_RTX (tem);
634 #else
635 return tem;
636 #endif
637
638 /* For __builtin_return_address, get the return address from that frame. */
639 #ifdef RETURN_ADDR_RTX
640 tem = RETURN_ADDR_RTX (count, tem);
641 #else
642 tem = memory_address (Pmode,
643 plus_constant (tem, GET_MODE_SIZE (Pmode)));
644 tem = gen_frame_mem (Pmode, tem);
645 #endif
646 return tem;
647 }
648
649 /* Alias set used for setjmp buffer. */
650 static HOST_WIDE_INT setjmp_alias_set = -1;
651
652 /* Construct the leading half of a __builtin_setjmp call. Control will
653 return to RECEIVER_LABEL. This is also called directly by the SJLJ
654 exception handling code. */
655
656 void
657 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 {
659 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
660 rtx stack_save;
661 rtx mem;
662
663 if (setjmp_alias_set == -1)
664 setjmp_alias_set = new_alias_set ();
665
666 buf_addr = convert_memory_address (Pmode, buf_addr);
667
668 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669
670 /* We store the frame pointer and the address of receiver_label in
671 the buffer and use the rest of it for the stack save area, which
672 is machine-dependent. */
673
674 mem = gen_rtx_MEM (Pmode, buf_addr);
675 set_mem_alias_set (mem, setjmp_alias_set);
676 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677
678 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
679 set_mem_alias_set (mem, setjmp_alias_set);
680
681 emit_move_insn (validize_mem (mem),
682 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683
684 stack_save = gen_rtx_MEM (sa_mode,
685 plus_constant (buf_addr,
686 2 * GET_MODE_SIZE (Pmode)));
687 set_mem_alias_set (stack_save, setjmp_alias_set);
688 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689
690 /* If there is further processing to do, do it. */
691 #ifdef HAVE_builtin_setjmp_setup
692 if (HAVE_builtin_setjmp_setup)
693 emit_insn (gen_builtin_setjmp_setup (buf_addr));
694 #endif
695
696 /* Tell optimize_save_area_alloca that extra work is going to
697 need to go on during alloca. */
698 current_function_calls_setjmp = 1;
699
700 /* We have a nonlocal label. */
701 current_function_has_nonlocal_label = 1;
702 }
703
704 /* Construct the trailing part of a __builtin_setjmp call. This is
705 also called directly by the SJLJ exception handling code. */
706
707 void
708 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 {
710 /* Clobber the FP when we get here, so we have to make sure it's
711 marked as used by this function. */
712 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
713
714 /* Mark the static chain as clobbered here so life information
715 doesn't get messed up for it. */
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
717
718 /* Now put in the code to restore the frame pointer, and argument
719 pointer, if needed. */
720 #ifdef HAVE_nonlocal_goto
721 if (! HAVE_nonlocal_goto)
722 #endif
723 {
724 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
725 /* This might change the hard frame pointer in ways that aren't
726 apparent to early optimization passes, so force a clobber. */
727 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
728 }
729
730 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
731 if (fixed_regs[ARG_POINTER_REGNUM])
732 {
733 #ifdef ELIMINABLE_REGS
734 size_t i;
735 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736
737 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
738 if (elim_regs[i].from == ARG_POINTER_REGNUM
739 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
740 break;
741
742 if (i == ARRAY_SIZE (elim_regs))
743 #endif
744 {
745 /* Now restore our arg pointer from the address at which it
746 was saved in our stack frame. */
747 emit_move_insn (virtual_incoming_args_rtx,
748 copy_to_reg (get_arg_pointer_save_area (cfun)));
749 }
750 }
751 #endif
752
753 #ifdef HAVE_builtin_setjmp_receiver
754 if (HAVE_builtin_setjmp_receiver)
755 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
756 else
757 #endif
758 #ifdef HAVE_nonlocal_goto_receiver
759 if (HAVE_nonlocal_goto_receiver)
760 emit_insn (gen_nonlocal_goto_receiver ());
761 else
762 #endif
763 { /* Nothing */ }
764
765 /* We must not allow the code we just generated to be reordered by
766 scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. */
768 emit_insn (gen_blockage ());
769 }
770
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
775
776 static void
777 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 {
779 rtx fp, lab, stack, insn, last;
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
784
785 buf_addr = convert_memory_address (Pmode, buf_addr);
786
787 buf_addr = force_reg (Pmode, buf_addr);
788
789 /* We used to store value in static_chain_rtx, but that fails if pointers
790 are smaller than integers. We instead require that the user must pass
791 a second argument of 1, because that is what builtin_setjmp will
792 return. This also makes EH slightly more efficient, since we are no
793 longer copying around a value that we don't care about. */
794 gcc_assert (value == const1_rtx);
795
796 last = get_last_insn ();
797 #ifdef HAVE_builtin_longjmp
798 if (HAVE_builtin_longjmp)
799 emit_insn (gen_builtin_longjmp (buf_addr));
800 else
801 #endif
802 {
803 fp = gen_rtx_MEM (Pmode, buf_addr);
804 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
805 GET_MODE_SIZE (Pmode)));
806
807 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (fp, setjmp_alias_set);
810 set_mem_alias_set (lab, setjmp_alias_set);
811 set_mem_alias_set (stack, setjmp_alias_set);
812
813 /* Pick up FP, label, and SP from the block and jump. This code is
814 from expand_goto in stmt.c; see there for detailed comments. */
815 #ifdef HAVE_nonlocal_goto
816 if (HAVE_nonlocal_goto)
817 /* We have to pass a value to the nonlocal_goto pattern that will
818 get copied into the static_chain pointer, but it does not matter
819 what that value is, because builtin_setjmp does not use it. */
820 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
821 else
822 #endif
823 {
824 lab = copy_to_reg (lab);
825
826 emit_insn (gen_rtx_CLOBBER (VOIDmode,
827 gen_rtx_MEM (BLKmode,
828 gen_rtx_SCRATCH (VOIDmode))));
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 hard_frame_pointer_rtx)));
832
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
835
836 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
837 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
838 emit_indirect_jump (lab);
839 }
840 }
841
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
848 {
849 gcc_assert (insn != last);
850
851 if (JUMP_P (insn))
852 {
853 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
854 REG_NOTES (insn));
855 break;
856 }
857 else if (CALL_P (insn))
858 break;
859 }
860 }
861
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
864
865 static rtx
866 expand_builtin_nonlocal_goto (tree exp)
867 {
868 tree t_label, t_save_area;
869 rtx r_label, r_save_area, r_fp, r_sp, insn;
870
871 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
872 return NULL_RTX;
873
874 t_label = CALL_EXPR_ARG (exp, 0);
875 t_save_area = CALL_EXPR_ARG (exp, 1);
876
877 r_label = expand_normal (t_label);
878 r_label = convert_memory_address (Pmode, r_label);
879 r_save_area = expand_normal (t_save_area);
880 r_save_area = convert_memory_address (Pmode, r_save_area);
881 r_fp = gen_rtx_MEM (Pmode, r_save_area);
882 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
883 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
884
885 current_function_has_nonlocal_goto = 1;
886
887 #ifdef HAVE_nonlocal_goto
888 /* ??? We no longer need to pass the static chain value, afaik. */
889 if (HAVE_nonlocal_goto)
890 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
891 else
892 #endif
893 {
894 r_label = copy_to_reg (r_label);
895
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 gen_rtx_SCRATCH (VOIDmode))));
899
900 emit_insn (gen_rtx_CLOBBER (VOIDmode,
901 gen_rtx_MEM (BLKmode,
902 hard_frame_pointer_rtx)));
903
904 /* Restore frame pointer for containing function.
905 This sets the actual hard register used for the frame pointer
906 to the location of the function's incoming static chain info.
907 The non-local goto handler will then adjust it to contain the
908 proper value and reload the argument pointer, if needed. */
909 emit_move_insn (hard_frame_pointer_rtx, r_fp);
910 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
911
912 /* USE of hard_frame_pointer_rtx added for consistency;
913 not clear if really needed. */
914 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
915 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
916 emit_indirect_jump (r_label);
917 }
918
919 /* Search backwards to the jump insn and mark it as a
920 non-local goto. */
921 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
922 {
923 if (JUMP_P (insn))
924 {
925 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
926 const0_rtx, REG_NOTES (insn));
927 break;
928 }
929 else if (CALL_P (insn))
930 break;
931 }
932
933 return const0_rtx;
934 }
935
936 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
937 (not all will be used on all machines) that was passed to __builtin_setjmp.
938 It updates the stack pointer in that block to correspond to the current
939 stack pointer. */
940
941 static void
942 expand_builtin_update_setjmp_buf (rtx buf_addr)
943 {
944 enum machine_mode sa_mode = Pmode;
945 rtx stack_save;
946
947
948 #ifdef HAVE_save_stack_nonlocal
949 if (HAVE_save_stack_nonlocal)
950 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
951 #endif
952 #ifdef STACK_SAVEAREA_MODE
953 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954 #endif
955
956 stack_save
957 = gen_rtx_MEM (sa_mode,
958 memory_address
959 (sa_mode,
960 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
961
962 #ifdef HAVE_setjmp
963 if (HAVE_setjmp)
964 emit_insn (gen_setjmp ());
965 #endif
966
967 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
968 }
969
970 /* Expand a call to __builtin_prefetch. For a target that does not support
971 data prefetch, evaluate the memory address argument in case it has side
972 effects. */
973
974 static void
975 expand_builtin_prefetch (tree exp)
976 {
977 tree arg0, arg1, arg2;
978 int nargs;
979 rtx op0, op1, op2;
980
981 if (!validate_arglist (exp, POINTER_TYPE, 0))
982 return;
983
984 arg0 = CALL_EXPR_ARG (exp, 0);
985
986 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
987 zero (read) and argument 2 (locality) defaults to 3 (high degree of
988 locality). */
989 nargs = call_expr_nargs (exp);
990 if (nargs > 1)
991 arg1 = CALL_EXPR_ARG (exp, 1);
992 else
993 arg1 = integer_zero_node;
994 if (nargs > 2)
995 arg2 = CALL_EXPR_ARG (exp, 2);
996 else
997 arg2 = build_int_cst (NULL_TREE, 3);
998
999 /* Argument 0 is an address. */
1000 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1001
1002 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1003 if (TREE_CODE (arg1) != INTEGER_CST)
1004 {
1005 error ("second argument to %<__builtin_prefetch%> must be a constant");
1006 arg1 = integer_zero_node;
1007 }
1008 op1 = expand_normal (arg1);
1009 /* Argument 1 must be either zero or one. */
1010 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1011 {
1012 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1013 " using zero");
1014 op1 = const0_rtx;
1015 }
1016
1017 /* Argument 2 (locality) must be a compile-time constant int. */
1018 if (TREE_CODE (arg2) != INTEGER_CST)
1019 {
1020 error ("third argument to %<__builtin_prefetch%> must be a constant");
1021 arg2 = integer_zero_node;
1022 }
1023 op2 = expand_normal (arg2);
1024 /* Argument 2 must be 0, 1, 2, or 3. */
1025 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1026 {
1027 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1028 op2 = const0_rtx;
1029 }
1030
1031 #ifdef HAVE_prefetch
1032 if (HAVE_prefetch)
1033 {
1034 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1035 (op0,
1036 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1037 || (GET_MODE (op0) != Pmode))
1038 {
1039 op0 = convert_memory_address (Pmode, op0);
1040 op0 = force_reg (Pmode, op0);
1041 }
1042 emit_insn (gen_prefetch (op0, op1, op2));
1043 }
1044 #endif
1045
1046 /* Don't do anything with direct references to volatile memory, but
1047 generate code to handle other side effects. */
1048 if (!MEM_P (op0) && side_effects_p (op0))
1049 emit_insn (op0);
1050 }
1051
1052 /* Get a MEM rtx for expression EXP which is the address of an operand
1053 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1054 the maximum length of the block of memory that might be accessed or
1055 NULL if unknown. */
1056
1057 static rtx
1058 get_memory_rtx (tree exp, tree len)
1059 {
1060 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1061 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1062
1063 /* Get an expression we can use to find the attributes to assign to MEM.
1064 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1065 we can. First remove any nops. */
1066 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1067 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1068 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1069 exp = TREE_OPERAND (exp, 0);
1070
1071 if (TREE_CODE (exp) == ADDR_EXPR)
1072 exp = TREE_OPERAND (exp, 0);
1073 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1074 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1075 else
1076 exp = NULL;
1077
1078 /* Honor attributes derived from exp, except for the alias set
1079 (as builtin stringops may alias with anything) and the size
1080 (as stringops may access multiple array elements). */
1081 if (exp)
1082 {
1083 set_mem_attributes (mem, exp, 0);
1084
1085 /* Allow the string and memory builtins to overflow from one
1086 field into another, see http://gcc.gnu.org/PR23561.
1087 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1088 memory accessed by the string or memory builtin will fit
1089 within the field. */
1090 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1091 {
1092 tree mem_expr = MEM_EXPR (mem);
1093 HOST_WIDE_INT offset = -1, length = -1;
1094 tree inner = exp;
1095
1096 while (TREE_CODE (inner) == ARRAY_REF
1097 || TREE_CODE (inner) == NOP_EXPR
1098 || TREE_CODE (inner) == CONVERT_EXPR
1099 || TREE_CODE (inner) == NON_LVALUE_EXPR
1100 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1101 || TREE_CODE (inner) == SAVE_EXPR)
1102 inner = TREE_OPERAND (inner, 0);
1103
1104 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1105
1106 if (MEM_OFFSET (mem)
1107 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1108 offset = INTVAL (MEM_OFFSET (mem));
1109
1110 if (offset >= 0 && len && host_integerp (len, 0))
1111 length = tree_low_cst (len, 0);
1112
1113 while (TREE_CODE (inner) == COMPONENT_REF)
1114 {
1115 tree field = TREE_OPERAND (inner, 1);
1116 gcc_assert (! DECL_BIT_FIELD (field));
1117 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1118 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1119
1120 if (length >= 0
1121 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1122 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1123 {
1124 HOST_WIDE_INT size
1125 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1126 /* If we can prove the memory starting at XEXP (mem, 0)
1127 and ending at XEXP (mem, 0) + LENGTH will fit into
1128 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1129 if (offset <= size
1130 && length <= size
1131 && offset + length <= size)
1132 break;
1133 }
1134
1135 if (offset >= 0
1136 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1137 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1138 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1139 / BITS_PER_UNIT;
1140 else
1141 {
1142 offset = -1;
1143 length = -1;
1144 }
1145
1146 mem_expr = TREE_OPERAND (mem_expr, 0);
1147 inner = TREE_OPERAND (inner, 0);
1148 }
1149
1150 if (mem_expr == NULL)
1151 offset = -1;
1152 if (mem_expr != MEM_EXPR (mem))
1153 {
1154 set_mem_expr (mem, mem_expr);
1155 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1156 }
1157 }
1158 set_mem_alias_set (mem, 0);
1159 set_mem_size (mem, NULL_RTX);
1160 }
1161
1162 return mem;
1163 }
1164 \f
1165 /* Built-in functions to perform an untyped call and return. */
1166
1167 /* For each register that may be used for calling a function, this
1168 gives a mode used to copy the register's value. VOIDmode indicates
1169 the register is not used for calling a function. If the machine
1170 has register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1173
1174 /* For each register that may be used for returning values, this gives
1175 a mode used to copy the register's value. VOIDmode indicates the
1176 register is not used for returning values. If the machine has
1177 register windows, this gives only the outbound registers.
1178 INCOMING_REGNO gives the corresponding inbound register. */
1179 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1180
1181 /* For each register that may be used for calling a function, this
1182 gives the offset of that register into the block returned by
1183 __builtin_apply_args. 0 indicates that the register is not
1184 used for calling a function. */
1185 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1186
1187 /* Return the size required for the block returned by __builtin_apply_args,
1188 and initialize apply_args_mode. */
1189
1190 static int
1191 apply_args_size (void)
1192 {
1193 static int size = -1;
1194 int align;
1195 unsigned int regno;
1196 enum machine_mode mode;
1197
1198 /* The values computed by this function never change. */
1199 if (size < 0)
1200 {
1201 /* The first value is the incoming arg-pointer. */
1202 size = GET_MODE_SIZE (Pmode);
1203
1204 /* The second value is the structure value address unless this is
1205 passed as an "invisible" first argument. */
1206 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1207 size += GET_MODE_SIZE (Pmode);
1208
1209 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1210 if (FUNCTION_ARG_REGNO_P (regno))
1211 {
1212 mode = reg_raw_mode[regno];
1213
1214 gcc_assert (mode != VOIDmode);
1215
1216 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1217 if (size % align != 0)
1218 size = CEIL (size, align) * align;
1219 apply_args_reg_offset[regno] = size;
1220 size += GET_MODE_SIZE (mode);
1221 apply_args_mode[regno] = mode;
1222 }
1223 else
1224 {
1225 apply_args_mode[regno] = VOIDmode;
1226 apply_args_reg_offset[regno] = 0;
1227 }
1228 }
1229 return size;
1230 }
1231
1232 /* Return the size required for the block returned by __builtin_apply,
1233 and initialize apply_result_mode. */
1234
1235 static int
1236 apply_result_size (void)
1237 {
1238 static int size = -1;
1239 int align, regno;
1240 enum machine_mode mode;
1241
1242 /* The values computed by this function never change. */
1243 if (size < 0)
1244 {
1245 size = 0;
1246
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_VALUE_REGNO_P (regno))
1249 {
1250 mode = reg_raw_mode[regno];
1251
1252 gcc_assert (mode != VOIDmode);
1253
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 size += GET_MODE_SIZE (mode);
1258 apply_result_mode[regno] = mode;
1259 }
1260 else
1261 apply_result_mode[regno] = VOIDmode;
1262
1263 /* Allow targets that use untyped_call and untyped_return to override
1264 the size so that machine-specific information can be stored here. */
1265 #ifdef APPLY_RESULT_SIZE
1266 size = APPLY_RESULT_SIZE;
1267 #endif
1268 }
1269 return size;
1270 }
1271
1272 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1273 /* Create a vector describing the result block RESULT. If SAVEP is true,
1274 the result block is used to save the values; otherwise it is used to
1275 restore the values. */
1276
1277 static rtx
1278 result_vector (int savep, rtx result)
1279 {
1280 int regno, size, align, nelts;
1281 enum machine_mode mode;
1282 rtx reg, mem;
1283 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1284
1285 size = nelts = 0;
1286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1287 if ((mode = apply_result_mode[regno]) != VOIDmode)
1288 {
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1293 mem = adjust_address (result, mode, size);
1294 savevec[nelts++] = (savep
1295 ? gen_rtx_SET (VOIDmode, mem, reg)
1296 : gen_rtx_SET (VOIDmode, reg, mem));
1297 size += GET_MODE_SIZE (mode);
1298 }
1299 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1300 }
1301 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1302
1303 /* Save the state required to perform an untyped call with the same
1304 arguments as were passed to the current function. */
1305
1306 static rtx
1307 expand_builtin_apply_args_1 (void)
1308 {
1309 rtx registers, tem;
1310 int size, align, regno;
1311 enum machine_mode mode;
1312 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1313
1314 /* Create a block where the arg-pointer, structure value address,
1315 and argument registers can be saved. */
1316 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1317
1318 /* Walk past the arg-pointer and structure value address. */
1319 size = GET_MODE_SIZE (Pmode);
1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1321 size += GET_MODE_SIZE (Pmode);
1322
1323 /* Save each register used in calling a function to the block. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_args_mode[regno]) != VOIDmode)
1326 {
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330
1331 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1332
1333 emit_move_insn (adjust_address (registers, mode, size), tem);
1334 size += GET_MODE_SIZE (mode);
1335 }
1336
1337 /* Save the arg pointer to the block. */
1338 tem = copy_to_reg (virtual_incoming_args_rtx);
1339 #ifdef STACK_GROWS_DOWNWARD
1340 /* We need the pointer as the caller actually passed them to us, not
1341 as we might have pretended they were passed. Make sure it's a valid
1342 operand, as emit_move_insn isn't expected to handle a PLUS. */
1343 tem
1344 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1345 NULL_RTX);
1346 #endif
1347 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1348
1349 size = GET_MODE_SIZE (Pmode);
1350
1351 /* Save the structure value address unless this is passed as an
1352 "invisible" first argument. */
1353 if (struct_incoming_value)
1354 {
1355 emit_move_insn (adjust_address (registers, Pmode, size),
1356 copy_to_reg (struct_incoming_value));
1357 size += GET_MODE_SIZE (Pmode);
1358 }
1359
1360 /* Return the address of the block. */
1361 return copy_addr_to_reg (XEXP (registers, 0));
1362 }
1363
1364 /* __builtin_apply_args returns block of memory allocated on
1365 the stack into which is stored the arg pointer, structure
1366 value address, static chain, and all the registers that might
1367 possibly be used in performing a function call. The code is
1368 moved to the start of the function so the incoming values are
1369 saved. */
1370
1371 static rtx
1372 expand_builtin_apply_args (void)
1373 {
1374 /* Don't do __builtin_apply_args more than once in a function.
1375 Save the result of the first call and reuse it. */
1376 if (apply_args_value != 0)
1377 return apply_args_value;
1378 {
1379 /* When this function is called, it means that registers must be
1380 saved on entry to this function. So we migrate the
1381 call to the first insn of this function. */
1382 rtx temp;
1383 rtx seq;
1384
1385 start_sequence ();
1386 temp = expand_builtin_apply_args_1 ();
1387 seq = get_insns ();
1388 end_sequence ();
1389
1390 apply_args_value = temp;
1391
1392 /* Put the insns after the NOTE that starts the function.
1393 If this is inside a start_sequence, make the outer-level insn
1394 chain current, so the code is placed at the start of the
1395 function. */
1396 push_topmost_sequence ();
1397 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1398 pop_topmost_sequence ();
1399 return temp;
1400 }
1401 }
1402
1403 /* Perform an untyped call and save the state required to perform an
1404 untyped return of whatever value was returned by the given function. */
1405
1406 static rtx
1407 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1408 {
1409 int size, align, regno;
1410 enum machine_mode mode;
1411 rtx incoming_args, result, reg, dest, src, call_insn;
1412 rtx old_stack_level = 0;
1413 rtx call_fusage = 0;
1414 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1415
1416 arguments = convert_memory_address (Pmode, arguments);
1417
1418 /* Create a block where the return registers can be saved. */
1419 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1420
1421 /* Fetch the arg pointer from the ARGUMENTS block. */
1422 incoming_args = gen_reg_rtx (Pmode);
1423 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1424 #ifndef STACK_GROWS_DOWNWARD
1425 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1426 incoming_args, 0, OPTAB_LIB_WIDEN);
1427 #endif
1428
1429 /* Push a new argument block and copy the arguments. Do not allow
1430 the (potential) memcpy call below to interfere with our stack
1431 manipulations. */
1432 do_pending_stack_adjust ();
1433 NO_DEFER_POP;
1434
1435 /* Save the stack with nonlocal if available. */
1436 #ifdef HAVE_save_stack_nonlocal
1437 if (HAVE_save_stack_nonlocal)
1438 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1439 else
1440 #endif
1441 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1442
1443 /* Allocate a block of memory onto the stack and copy the memory
1444 arguments to the outgoing arguments address. */
1445 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1446 dest = virtual_outgoing_args_rtx;
1447 #ifndef STACK_GROWS_DOWNWARD
1448 if (GET_CODE (argsize) == CONST_INT)
1449 dest = plus_constant (dest, -INTVAL (argsize));
1450 else
1451 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1452 #endif
1453 dest = gen_rtx_MEM (BLKmode, dest);
1454 set_mem_align (dest, PARM_BOUNDARY);
1455 src = gen_rtx_MEM (BLKmode, incoming_args);
1456 set_mem_align (src, PARM_BOUNDARY);
1457 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1458
1459 /* Refer to the argument block. */
1460 apply_args_size ();
1461 arguments = gen_rtx_MEM (BLKmode, arguments);
1462 set_mem_align (arguments, PARM_BOUNDARY);
1463
1464 /* Walk past the arg-pointer and structure value address. */
1465 size = GET_MODE_SIZE (Pmode);
1466 if (struct_value)
1467 size += GET_MODE_SIZE (Pmode);
1468
1469 /* Restore each of the registers previously saved. Make USE insns
1470 for each of these registers for use in making the call. */
1471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 if ((mode = apply_args_mode[regno]) != VOIDmode)
1473 {
1474 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1475 if (size % align != 0)
1476 size = CEIL (size, align) * align;
1477 reg = gen_rtx_REG (mode, regno);
1478 emit_move_insn (reg, adjust_address (arguments, mode, size));
1479 use_reg (&call_fusage, reg);
1480 size += GET_MODE_SIZE (mode);
1481 }
1482
1483 /* Restore the structure value address unless this is passed as an
1484 "invisible" first argument. */
1485 size = GET_MODE_SIZE (Pmode);
1486 if (struct_value)
1487 {
1488 rtx value = gen_reg_rtx (Pmode);
1489 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1490 emit_move_insn (struct_value, value);
1491 if (REG_P (struct_value))
1492 use_reg (&call_fusage, struct_value);
1493 size += GET_MODE_SIZE (Pmode);
1494 }
1495
1496 /* All arguments and registers used for the call are set up by now! */
1497 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1498
1499 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1500 and we don't want to load it into a register as an optimization,
1501 because prepare_call_address already did it if it should be done. */
1502 if (GET_CODE (function) != SYMBOL_REF)
1503 function = memory_address (FUNCTION_MODE, function);
1504
1505 /* Generate the actual call instruction and save the return value. */
1506 #ifdef HAVE_untyped_call
1507 if (HAVE_untyped_call)
1508 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1509 result, result_vector (1, result)));
1510 else
1511 #endif
1512 #ifdef HAVE_call_value
1513 if (HAVE_call_value)
1514 {
1515 rtx valreg = 0;
1516
1517 /* Locate the unique return register. It is not possible to
1518 express a call that sets more than one return register using
1519 call_value; use untyped_call for that. In fact, untyped_call
1520 only needs to save the return registers in the given block. */
1521 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1522 if ((mode = apply_result_mode[regno]) != VOIDmode)
1523 {
1524 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1525
1526 valreg = gen_rtx_REG (mode, regno);
1527 }
1528
1529 emit_call_insn (GEN_CALL_VALUE (valreg,
1530 gen_rtx_MEM (FUNCTION_MODE, function),
1531 const0_rtx, NULL_RTX, const0_rtx));
1532
1533 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1534 }
1535 else
1536 #endif
1537 gcc_unreachable ();
1538
1539 /* Find the CALL insn we just emitted, and attach the register usage
1540 information. */
1541 call_insn = last_call_insn ();
1542 add_function_usage_to (call_insn, call_fusage);
1543
1544 /* Restore the stack. */
1545 #ifdef HAVE_save_stack_nonlocal
1546 if (HAVE_save_stack_nonlocal)
1547 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1548 else
1549 #endif
1550 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1551
1552 OK_DEFER_POP;
1553
1554 /* Return the address of the result block. */
1555 result = copy_addr_to_reg (XEXP (result, 0));
1556 return convert_memory_address (ptr_mode, result);
1557 }
1558
1559 /* Perform an untyped return. */
1560
1561 static void
1562 expand_builtin_return (rtx result)
1563 {
1564 int size, align, regno;
1565 enum machine_mode mode;
1566 rtx reg;
1567 rtx call_fusage = 0;
1568
1569 result = convert_memory_address (Pmode, result);
1570
1571 apply_result_size ();
1572 result = gen_rtx_MEM (BLKmode, result);
1573
1574 #ifdef HAVE_untyped_return
1575 if (HAVE_untyped_return)
1576 {
1577 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1578 emit_barrier ();
1579 return;
1580 }
1581 #endif
1582
1583 /* Restore the return value and note that each value is used. */
1584 size = 0;
1585 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1586 if ((mode = apply_result_mode[regno]) != VOIDmode)
1587 {
1588 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1589 if (size % align != 0)
1590 size = CEIL (size, align) * align;
1591 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1592 emit_move_insn (reg, adjust_address (result, mode, size));
1593
1594 push_to_sequence (call_fusage);
1595 emit_insn (gen_rtx_USE (VOIDmode, reg));
1596 call_fusage = get_insns ();
1597 end_sequence ();
1598 size += GET_MODE_SIZE (mode);
1599 }
1600
1601 /* Put the USE insns before the return. */
1602 emit_insn (call_fusage);
1603
1604 /* Return whatever values was restored by jumping directly to the end
1605 of the function. */
1606 expand_naked_return ();
1607 }
1608
1609 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1610
1611 static enum type_class
1612 type_to_class (tree type)
1613 {
1614 switch (TREE_CODE (type))
1615 {
1616 case VOID_TYPE: return void_type_class;
1617 case INTEGER_TYPE: return integer_type_class;
1618 case ENUMERAL_TYPE: return enumeral_type_class;
1619 case BOOLEAN_TYPE: return boolean_type_class;
1620 case POINTER_TYPE: return pointer_type_class;
1621 case REFERENCE_TYPE: return reference_type_class;
1622 case OFFSET_TYPE: return offset_type_class;
1623 case REAL_TYPE: return real_type_class;
1624 case COMPLEX_TYPE: return complex_type_class;
1625 case FUNCTION_TYPE: return function_type_class;
1626 case METHOD_TYPE: return method_type_class;
1627 case RECORD_TYPE: return record_type_class;
1628 case UNION_TYPE:
1629 case QUAL_UNION_TYPE: return union_type_class;
1630 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1631 ? string_type_class : array_type_class);
1632 case LANG_TYPE: return lang_type_class;
1633 default: return no_type_class;
1634 }
1635 }
1636
1637 /* Expand a call EXP to __builtin_classify_type. */
1638
1639 static rtx
1640 expand_builtin_classify_type (tree exp)
1641 {
1642 if (call_expr_nargs (exp))
1643 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1644 return GEN_INT (no_type_class);
1645 }
1646
1647 /* This helper macro, meant to be used in mathfn_built_in below,
1648 determines which among a set of three builtin math functions is
1649 appropriate for a given type mode. The `F' and `L' cases are
1650 automatically generated from the `double' case. */
1651 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1652 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1653 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1654 fcodel = BUILT_IN_MATHFN##L ; break;
1655 /* Similar to above, but appends _R after any F/L suffix. */
1656 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1657 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1658 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1659 fcodel = BUILT_IN_MATHFN##L_R ; break;
1660
1661 /* Return mathematic function equivalent to FN but operating directly
1662 on TYPE, if available. If we can't do the conversion, return zero. */
1663 tree
1664 mathfn_built_in (tree type, enum built_in_function fn)
1665 {
1666 enum built_in_function fcode, fcodef, fcodel;
1667
1668 switch (fn)
1669 {
1670 CASE_MATHFN (BUILT_IN_ACOS)
1671 CASE_MATHFN (BUILT_IN_ACOSH)
1672 CASE_MATHFN (BUILT_IN_ASIN)
1673 CASE_MATHFN (BUILT_IN_ASINH)
1674 CASE_MATHFN (BUILT_IN_ATAN)
1675 CASE_MATHFN (BUILT_IN_ATAN2)
1676 CASE_MATHFN (BUILT_IN_ATANH)
1677 CASE_MATHFN (BUILT_IN_CBRT)
1678 CASE_MATHFN (BUILT_IN_CEIL)
1679 CASE_MATHFN (BUILT_IN_CEXPI)
1680 CASE_MATHFN (BUILT_IN_COPYSIGN)
1681 CASE_MATHFN (BUILT_IN_COS)
1682 CASE_MATHFN (BUILT_IN_COSH)
1683 CASE_MATHFN (BUILT_IN_DREM)
1684 CASE_MATHFN (BUILT_IN_ERF)
1685 CASE_MATHFN (BUILT_IN_ERFC)
1686 CASE_MATHFN (BUILT_IN_EXP)
1687 CASE_MATHFN (BUILT_IN_EXP10)
1688 CASE_MATHFN (BUILT_IN_EXP2)
1689 CASE_MATHFN (BUILT_IN_EXPM1)
1690 CASE_MATHFN (BUILT_IN_FABS)
1691 CASE_MATHFN (BUILT_IN_FDIM)
1692 CASE_MATHFN (BUILT_IN_FLOOR)
1693 CASE_MATHFN (BUILT_IN_FMA)
1694 CASE_MATHFN (BUILT_IN_FMAX)
1695 CASE_MATHFN (BUILT_IN_FMIN)
1696 CASE_MATHFN (BUILT_IN_FMOD)
1697 CASE_MATHFN (BUILT_IN_FREXP)
1698 CASE_MATHFN (BUILT_IN_GAMMA)
1699 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1700 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1701 CASE_MATHFN (BUILT_IN_HYPOT)
1702 CASE_MATHFN (BUILT_IN_ILOGB)
1703 CASE_MATHFN (BUILT_IN_INF)
1704 CASE_MATHFN (BUILT_IN_ISINF)
1705 CASE_MATHFN (BUILT_IN_J0)
1706 CASE_MATHFN (BUILT_IN_J1)
1707 CASE_MATHFN (BUILT_IN_JN)
1708 CASE_MATHFN (BUILT_IN_LCEIL)
1709 CASE_MATHFN (BUILT_IN_LDEXP)
1710 CASE_MATHFN (BUILT_IN_LFLOOR)
1711 CASE_MATHFN (BUILT_IN_LGAMMA)
1712 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1713 CASE_MATHFN (BUILT_IN_LLCEIL)
1714 CASE_MATHFN (BUILT_IN_LLFLOOR)
1715 CASE_MATHFN (BUILT_IN_LLRINT)
1716 CASE_MATHFN (BUILT_IN_LLROUND)
1717 CASE_MATHFN (BUILT_IN_LOG)
1718 CASE_MATHFN (BUILT_IN_LOG10)
1719 CASE_MATHFN (BUILT_IN_LOG1P)
1720 CASE_MATHFN (BUILT_IN_LOG2)
1721 CASE_MATHFN (BUILT_IN_LOGB)
1722 CASE_MATHFN (BUILT_IN_LRINT)
1723 CASE_MATHFN (BUILT_IN_LROUND)
1724 CASE_MATHFN (BUILT_IN_MODF)
1725 CASE_MATHFN (BUILT_IN_NAN)
1726 CASE_MATHFN (BUILT_IN_NANS)
1727 CASE_MATHFN (BUILT_IN_NEARBYINT)
1728 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1729 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1730 CASE_MATHFN (BUILT_IN_POW)
1731 CASE_MATHFN (BUILT_IN_POWI)
1732 CASE_MATHFN (BUILT_IN_POW10)
1733 CASE_MATHFN (BUILT_IN_REMAINDER)
1734 CASE_MATHFN (BUILT_IN_REMQUO)
1735 CASE_MATHFN (BUILT_IN_RINT)
1736 CASE_MATHFN (BUILT_IN_ROUND)
1737 CASE_MATHFN (BUILT_IN_SCALB)
1738 CASE_MATHFN (BUILT_IN_SCALBLN)
1739 CASE_MATHFN (BUILT_IN_SCALBN)
1740 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1741 CASE_MATHFN (BUILT_IN_SIN)
1742 CASE_MATHFN (BUILT_IN_SINCOS)
1743 CASE_MATHFN (BUILT_IN_SINH)
1744 CASE_MATHFN (BUILT_IN_SQRT)
1745 CASE_MATHFN (BUILT_IN_TAN)
1746 CASE_MATHFN (BUILT_IN_TANH)
1747 CASE_MATHFN (BUILT_IN_TGAMMA)
1748 CASE_MATHFN (BUILT_IN_TRUNC)
1749 CASE_MATHFN (BUILT_IN_Y0)
1750 CASE_MATHFN (BUILT_IN_Y1)
1751 CASE_MATHFN (BUILT_IN_YN)
1752
1753 default:
1754 return NULL_TREE;
1755 }
1756
1757 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1758 return implicit_built_in_decls[fcode];
1759 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1760 return implicit_built_in_decls[fcodef];
1761 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1762 return implicit_built_in_decls[fcodel];
1763 else
1764 return NULL_TREE;
1765 }
1766
1767 /* If errno must be maintained, expand the RTL to check if the result,
1768 TARGET, of a built-in function call, EXP, is NaN, and if so set
1769 errno to EDOM. */
1770
1771 static void
1772 expand_errno_check (tree exp, rtx target)
1773 {
1774 rtx lab = gen_label_rtx ();
1775
1776 /* Test the result; if it is NaN, set errno=EDOM because
1777 the argument was not in the domain. */
1778 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1779 0, lab);
1780
1781 #ifdef TARGET_EDOM
1782 /* If this built-in doesn't throw an exception, set errno directly. */
1783 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1784 {
1785 #ifdef GEN_ERRNO_RTX
1786 rtx errno_rtx = GEN_ERRNO_RTX;
1787 #else
1788 rtx errno_rtx
1789 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1790 #endif
1791 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1792 emit_label (lab);
1793 return;
1794 }
1795 #endif
1796
1797 /* We can't set errno=EDOM directly; let the library call do it.
1798 Pop the arguments right away in case the call gets deleted. */
1799 NO_DEFER_POP;
1800 expand_call (exp, target, 0);
1801 OK_DEFER_POP;
1802 emit_label (lab);
1803 }
1804
1805 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1806 Return NULL_RTX if a normal call should be emitted rather than expanding
1807 the function in-line. EXP is the expression that is a call to the builtin
1808 function; if convenient, the result should be placed in TARGET.
1809 SUBTARGET may be used as the target for computing one of EXP's operands. */
1810
1811 static rtx
1812 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1813 {
1814 optab builtin_optab;
1815 rtx op0, insns, before_call;
1816 tree fndecl = get_callee_fndecl (exp);
1817 enum machine_mode mode;
1818 bool errno_set = false;
1819 tree arg, narg;
1820
1821 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1822 return NULL_RTX;
1823
1824 arg = CALL_EXPR_ARG (exp, 0);
1825
1826 switch (DECL_FUNCTION_CODE (fndecl))
1827 {
1828 CASE_FLT_FN (BUILT_IN_SQRT):
1829 errno_set = ! tree_expr_nonnegative_p (arg);
1830 builtin_optab = sqrt_optab;
1831 break;
1832 CASE_FLT_FN (BUILT_IN_EXP):
1833 errno_set = true; builtin_optab = exp_optab; break;
1834 CASE_FLT_FN (BUILT_IN_EXP10):
1835 CASE_FLT_FN (BUILT_IN_POW10):
1836 errno_set = true; builtin_optab = exp10_optab; break;
1837 CASE_FLT_FN (BUILT_IN_EXP2):
1838 errno_set = true; builtin_optab = exp2_optab; break;
1839 CASE_FLT_FN (BUILT_IN_EXPM1):
1840 errno_set = true; builtin_optab = expm1_optab; break;
1841 CASE_FLT_FN (BUILT_IN_LOGB):
1842 errno_set = true; builtin_optab = logb_optab; break;
1843 CASE_FLT_FN (BUILT_IN_LOG):
1844 errno_set = true; builtin_optab = log_optab; break;
1845 CASE_FLT_FN (BUILT_IN_LOG10):
1846 errno_set = true; builtin_optab = log10_optab; break;
1847 CASE_FLT_FN (BUILT_IN_LOG2):
1848 errno_set = true; builtin_optab = log2_optab; break;
1849 CASE_FLT_FN (BUILT_IN_LOG1P):
1850 errno_set = true; builtin_optab = log1p_optab; break;
1851 CASE_FLT_FN (BUILT_IN_ASIN):
1852 builtin_optab = asin_optab; break;
1853 CASE_FLT_FN (BUILT_IN_ACOS):
1854 builtin_optab = acos_optab; break;
1855 CASE_FLT_FN (BUILT_IN_TAN):
1856 builtin_optab = tan_optab; break;
1857 CASE_FLT_FN (BUILT_IN_ATAN):
1858 builtin_optab = atan_optab; break;
1859 CASE_FLT_FN (BUILT_IN_FLOOR):
1860 builtin_optab = floor_optab; break;
1861 CASE_FLT_FN (BUILT_IN_CEIL):
1862 builtin_optab = ceil_optab; break;
1863 CASE_FLT_FN (BUILT_IN_TRUNC):
1864 builtin_optab = btrunc_optab; break;
1865 CASE_FLT_FN (BUILT_IN_ROUND):
1866 builtin_optab = round_optab; break;
1867 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1868 builtin_optab = nearbyint_optab;
1869 if (flag_trapping_math)
1870 break;
1871 /* Else fallthrough and expand as rint. */
1872 CASE_FLT_FN (BUILT_IN_RINT):
1873 builtin_optab = rint_optab; break;
1874 default:
1875 gcc_unreachable ();
1876 }
1877
1878 /* Make a suitable register to place result in. */
1879 mode = TYPE_MODE (TREE_TYPE (exp));
1880
1881 if (! flag_errno_math || ! HONOR_NANS (mode))
1882 errno_set = false;
1883
1884 /* Before working hard, check whether the instruction is available. */
1885 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1886 {
1887 target = gen_reg_rtx (mode);
1888
1889 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1890 need to expand the argument again. This way, we will not perform
1891 side-effects more the once. */
1892 narg = builtin_save_expr (arg);
1893 if (narg != arg)
1894 {
1895 arg = narg;
1896 exp = build_call_expr (fndecl, 1, arg);
1897 }
1898
1899 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1900
1901 start_sequence ();
1902
1903 /* Compute into TARGET.
1904 Set TARGET to wherever the result comes back. */
1905 target = expand_unop (mode, builtin_optab, op0, target, 0);
1906
1907 if (target != 0)
1908 {
1909 if (errno_set)
1910 expand_errno_check (exp, target);
1911
1912 /* Output the entire sequence. */
1913 insns = get_insns ();
1914 end_sequence ();
1915 emit_insn (insns);
1916 return target;
1917 }
1918
1919 /* If we were unable to expand via the builtin, stop the sequence
1920 (without outputting the insns) and call to the library function
1921 with the stabilized argument list. */
1922 end_sequence ();
1923 }
1924
1925 before_call = get_last_insn ();
1926
1927 target = expand_call (exp, target, target == const0_rtx);
1928
1929 /* If this is a sqrt operation and we don't care about errno, try to
1930 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1931 This allows the semantics of the libcall to be visible to the RTL
1932 optimizers. */
1933 if (builtin_optab == sqrt_optab && !errno_set)
1934 {
1935 /* Search backwards through the insns emitted by expand_call looking
1936 for the instruction with the REG_RETVAL note. */
1937 rtx last = get_last_insn ();
1938 while (last != before_call)
1939 {
1940 if (find_reg_note (last, REG_RETVAL, NULL))
1941 {
1942 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1943 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1944 two elements, i.e. symbol_ref(sqrt) and the operand. */
1945 if (note
1946 && GET_CODE (note) == EXPR_LIST
1947 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1948 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1949 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1950 {
1951 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1952 /* Check operand is a register with expected mode. */
1953 if (operand
1954 && REG_P (operand)
1955 && GET_MODE (operand) == mode)
1956 {
1957 /* Replace the REG_EQUAL note with a SQRT rtx. */
1958 rtx equiv = gen_rtx_SQRT (mode, operand);
1959 set_unique_reg_note (last, REG_EQUAL, equiv);
1960 }
1961 }
1962 break;
1963 }
1964 last = PREV_INSN (last);
1965 }
1966 }
1967
1968 return target;
1969 }
1970
1971 /* Expand a call to the builtin binary math functions (pow and atan2).
1972 Return NULL_RTX if a normal call should be emitted rather than expanding the
1973 function in-line. EXP is the expression that is a call to the builtin
1974 function; if convenient, the result should be placed in TARGET.
1975 SUBTARGET may be used as the target for computing one of EXP's
1976 operands. */
1977
1978 static rtx
1979 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1980 {
1981 optab builtin_optab;
1982 rtx op0, op1, insns;
1983 int op1_type = REAL_TYPE;
1984 tree fndecl = get_callee_fndecl (exp);
1985 tree arg0, arg1, narg;
1986 enum machine_mode mode;
1987 bool errno_set = true;
1988 bool stable = true;
1989
1990 switch (DECL_FUNCTION_CODE (fndecl))
1991 {
1992 CASE_FLT_FN (BUILT_IN_SCALBN):
1993 CASE_FLT_FN (BUILT_IN_SCALBLN):
1994 CASE_FLT_FN (BUILT_IN_LDEXP):
1995 op1_type = INTEGER_TYPE;
1996 default:
1997 break;
1998 }
1999
2000 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2001 return NULL_RTX;
2002
2003 arg0 = CALL_EXPR_ARG (exp, 0);
2004 arg1 = CALL_EXPR_ARG (exp, 1);
2005
2006 switch (DECL_FUNCTION_CODE (fndecl))
2007 {
2008 CASE_FLT_FN (BUILT_IN_POW):
2009 builtin_optab = pow_optab; break;
2010 CASE_FLT_FN (BUILT_IN_ATAN2):
2011 builtin_optab = atan2_optab; break;
2012 CASE_FLT_FN (BUILT_IN_SCALB):
2013 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2014 return 0;
2015 builtin_optab = scalb_optab; break;
2016 CASE_FLT_FN (BUILT_IN_SCALBN):
2017 CASE_FLT_FN (BUILT_IN_SCALBLN):
2018 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2019 return 0;
2020 /* Fall through... */
2021 CASE_FLT_FN (BUILT_IN_LDEXP):
2022 builtin_optab = ldexp_optab; break;
2023 CASE_FLT_FN (BUILT_IN_FMOD):
2024 builtin_optab = fmod_optab; break;
2025 CASE_FLT_FN (BUILT_IN_REMAINDER):
2026 CASE_FLT_FN (BUILT_IN_DREM):
2027 builtin_optab = remainder_optab; break;
2028 default:
2029 gcc_unreachable ();
2030 }
2031
2032 /* Make a suitable register to place result in. */
2033 mode = TYPE_MODE (TREE_TYPE (exp));
2034
2035 /* Before working hard, check whether the instruction is available. */
2036 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2037 return NULL_RTX;
2038
2039 target = gen_reg_rtx (mode);
2040
2041 if (! flag_errno_math || ! HONOR_NANS (mode))
2042 errno_set = false;
2043
2044 /* Always stabilize the argument list. */
2045 narg = builtin_save_expr (arg1);
2046 if (narg != arg1)
2047 {
2048 arg1 = narg;
2049 stable = false;
2050 }
2051 narg = builtin_save_expr (arg0);
2052 if (narg != arg0)
2053 {
2054 arg0 = narg;
2055 stable = false;
2056 }
2057
2058 if (! stable)
2059 exp = build_call_expr (fndecl, 2, arg0, arg1);
2060
2061 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2062 op1 = expand_normal (arg1);
2063
2064 start_sequence ();
2065
2066 /* Compute into TARGET.
2067 Set TARGET to wherever the result comes back. */
2068 target = expand_binop (mode, builtin_optab, op0, op1,
2069 target, 0, OPTAB_DIRECT);
2070
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2074 if (target == 0)
2075 {
2076 end_sequence ();
2077 return expand_call (exp, target, target == const0_rtx);
2078 }
2079
2080 if (errno_set)
2081 expand_errno_check (exp, target);
2082
2083 /* Output the entire sequence. */
2084 insns = get_insns ();
2085 end_sequence ();
2086 emit_insn (insns);
2087
2088 return target;
2089 }
2090
2091 /* Expand a call to the builtin sin and cos math functions.
2092 Return NULL_RTX if a normal call should be emitted rather than expanding the
2093 function in-line. EXP is the expression that is a call to the builtin
2094 function; if convenient, the result should be placed in TARGET.
2095 SUBTARGET may be used as the target for computing one of EXP's
2096 operands. */
2097
2098 static rtx
2099 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2100 {
2101 optab builtin_optab;
2102 rtx op0, insns;
2103 tree fndecl = get_callee_fndecl (exp);
2104 enum machine_mode mode;
2105 tree arg, narg;
2106
2107 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2108 return NULL_RTX;
2109
2110 arg = CALL_EXPR_ARG (exp, 0);
2111
2112 switch (DECL_FUNCTION_CODE (fndecl))
2113 {
2114 CASE_FLT_FN (BUILT_IN_SIN):
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 builtin_optab = sincos_optab; break;
2117 default:
2118 gcc_unreachable ();
2119 }
2120
2121 /* Make a suitable register to place result in. */
2122 mode = TYPE_MODE (TREE_TYPE (exp));
2123
2124 /* Check if sincos insn is available, otherwise fallback
2125 to sin or cos insn. */
2126 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2127 switch (DECL_FUNCTION_CODE (fndecl))
2128 {
2129 CASE_FLT_FN (BUILT_IN_SIN):
2130 builtin_optab = sin_optab; break;
2131 CASE_FLT_FN (BUILT_IN_COS):
2132 builtin_optab = cos_optab; break;
2133 default:
2134 gcc_unreachable ();
2135 }
2136
2137 /* Before working hard, check whether the instruction is available. */
2138 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2139 {
2140 target = gen_reg_rtx (mode);
2141
2142 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2143 need to expand the argument again. This way, we will not perform
2144 side-effects more the once. */
2145 narg = save_expr (arg);
2146 if (narg != arg)
2147 {
2148 arg = narg;
2149 exp = build_call_expr (fndecl, 1, arg);
2150 }
2151
2152 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2153
2154 start_sequence ();
2155
2156 /* Compute into TARGET.
2157 Set TARGET to wherever the result comes back. */
2158 if (builtin_optab == sincos_optab)
2159 {
2160 int result;
2161
2162 switch (DECL_FUNCTION_CODE (fndecl))
2163 {
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2166 break;
2167 CASE_FLT_FN (BUILT_IN_COS):
2168 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2169 break;
2170 default:
2171 gcc_unreachable ();
2172 }
2173 gcc_assert (result);
2174 }
2175 else
2176 {
2177 target = expand_unop (mode, builtin_optab, op0, target, 0);
2178 }
2179
2180 if (target != 0)
2181 {
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2186 return target;
2187 }
2188
2189 /* If we were unable to expand via the builtin, stop the sequence
2190 (without outputting the insns) and call to the library function
2191 with the stabilized argument list. */
2192 end_sequence ();
2193 }
2194
2195 target = expand_call (exp, target, target == const0_rtx);
2196
2197 return target;
2198 }
2199
2200 /* Expand a call to one of the builtin math functions that operate on
2201 floating point argument and output an integer result (ilogb, isinf,
2202 isnan, etc).
2203 Return 0 if a normal call should be emitted rather than expanding the
2204 function in-line. EXP is the expression that is a call to the builtin
2205 function; if convenient, the result should be placed in TARGET.
2206 SUBTARGET may be used as the target for computing one of EXP's operands. */
2207
2208 static rtx
2209 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2210 {
2211 optab builtin_optab;
2212 enum insn_code icode;
2213 rtx op0;
2214 tree fndecl = get_callee_fndecl (exp);
2215 enum machine_mode mode;
2216 bool errno_set = false;
2217 tree arg, narg;
2218
2219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2220 return NULL_RTX;
2221
2222 arg = CALL_EXPR_ARG (exp, 0);
2223
2224 switch (DECL_FUNCTION_CODE (fndecl))
2225 {
2226 CASE_FLT_FN (BUILT_IN_ILOGB):
2227 errno_set = true; builtin_optab = ilogb_optab; break;
2228 CASE_FLT_FN (BUILT_IN_ISINF):
2229 builtin_optab = isinf_optab; break;
2230 default:
2231 gcc_unreachable ();
2232 }
2233
2234 /* There's no easy way to detect the case we need to set EDOM. */
2235 if (flag_errno_math && errno_set)
2236 return NULL_RTX;
2237
2238 /* Optab mode depends on the mode of the input argument. */
2239 mode = TYPE_MODE (TREE_TYPE (arg));
2240
2241 icode = builtin_optab->handlers[(int) mode].insn_code;
2242
2243 /* Before working hard, check whether the instruction is available. */
2244 if (icode != CODE_FOR_nothing)
2245 {
2246 /* Make a suitable register to place result in. */
2247 if (!target
2248 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2249 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2250
2251 gcc_assert (insn_data[icode].operand[0].predicate
2252 (target, GET_MODE (target)));
2253
2254 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2255 need to expand the argument again. This way, we will not perform
2256 side-effects more the once. */
2257 narg = builtin_save_expr (arg);
2258 if (narg != arg)
2259 {
2260 arg = narg;
2261 exp = build_call_expr (fndecl, 1, arg);
2262 }
2263
2264 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2265
2266 if (mode != GET_MODE (op0))
2267 op0 = convert_to_mode (mode, op0, 0);
2268
2269 /* Compute into TARGET.
2270 Set TARGET to wherever the result comes back. */
2271 emit_unop_insn (icode, target, op0, UNKNOWN);
2272 return target;
2273 }
2274
2275 target = expand_call (exp, target, target == const0_rtx);
2276
2277 return target;
2278 }
2279
2280 /* Expand a call to the builtin sincos math function.
2281 Return NULL_RTX if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2283 function. */
2284
2285 static rtx
2286 expand_builtin_sincos (tree exp)
2287 {
2288 rtx op0, op1, op2, target1, target2;
2289 enum machine_mode mode;
2290 tree arg, sinp, cosp;
2291 int result;
2292
2293 if (!validate_arglist (exp, REAL_TYPE,
2294 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2295 return NULL_RTX;
2296
2297 arg = CALL_EXPR_ARG (exp, 0);
2298 sinp = CALL_EXPR_ARG (exp, 1);
2299 cosp = CALL_EXPR_ARG (exp, 2);
2300
2301 /* Make a suitable register to place result in. */
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2303
2304 /* Check if sincos insn is available, otherwise emit the call. */
2305 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2306 return NULL_RTX;
2307
2308 target1 = gen_reg_rtx (mode);
2309 target2 = gen_reg_rtx (mode);
2310
2311 op0 = expand_normal (arg);
2312 op1 = expand_normal (build_fold_indirect_ref (sinp));
2313 op2 = expand_normal (build_fold_indirect_ref (cosp));
2314
2315 /* Compute into target1 and target2.
2316 Set TARGET to wherever the result comes back. */
2317 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2318 gcc_assert (result);
2319
2320 /* Move target1 and target2 to the memory locations indicated
2321 by op1 and op2. */
2322 emit_move_insn (op1, target1);
2323 emit_move_insn (op2, target2);
2324
2325 return const0_rtx;
2326 }
2327
2328 /* Expand a call to the internal cexpi builtin to the sincos math function.
2329 EXP is the expression that is a call to the builtin function; if convenient,
2330 the result should be placed in TARGET. SUBTARGET may be used as the target
2331 for computing one of EXP's operands. */
2332
2333 static rtx
2334 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2335 {
2336 tree fndecl = get_callee_fndecl (exp);
2337 tree arg, type;
2338 enum machine_mode mode;
2339 rtx op0, op1, op2;
2340
2341 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2342 return NULL_RTX;
2343
2344 arg = CALL_EXPR_ARG (exp, 0);
2345 type = TREE_TYPE (arg);
2346 mode = TYPE_MODE (TREE_TYPE (arg));
2347
2348 /* Try expanding via a sincos optab, fall back to emitting a libcall
2349 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2350 is only generated from sincos, cexp or if we have either of them. */
2351 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2352 {
2353 op1 = gen_reg_rtx (mode);
2354 op2 = gen_reg_rtx (mode);
2355
2356 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2357
2358 /* Compute into op1 and op2. */
2359 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2360 }
2361 else if (TARGET_HAS_SINCOS)
2362 {
2363 tree call, fn = NULL_TREE;
2364 tree top1, top2;
2365 rtx op1a, op2a;
2366
2367 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2368 fn = built_in_decls[BUILT_IN_SINCOSF];
2369 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2370 fn = built_in_decls[BUILT_IN_SINCOS];
2371 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2372 fn = built_in_decls[BUILT_IN_SINCOSL];
2373 else
2374 gcc_unreachable ();
2375
2376 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2377 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2378 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2379 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2380 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2381 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2382
2383 /* Make sure not to fold the sincos call again. */
2384 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2385 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2386 call, 3, arg, top1, top2));
2387 }
2388 else
2389 {
2390 tree call, fn = NULL_TREE, narg;
2391 tree ctype = build_complex_type (type);
2392
2393 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2394 fn = built_in_decls[BUILT_IN_CEXPF];
2395 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2396 fn = built_in_decls[BUILT_IN_CEXP];
2397 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2398 fn = built_in_decls[BUILT_IN_CEXPL];
2399 else
2400 gcc_unreachable ();
2401
2402 /* If we don't have a decl for cexp create one. This is the
2403 friendliest fallback if the user calls __builtin_cexpi
2404 without full target C99 function support. */
2405 if (fn == NULL_TREE)
2406 {
2407 tree fntype;
2408 const char *name = NULL;
2409
2410 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2411 name = "cexpf";
2412 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2413 name = "cexp";
2414 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2415 name = "cexpl";
2416
2417 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2418 fn = build_fn_decl (name, fntype);
2419 }
2420
2421 narg = fold_build2 (COMPLEX_EXPR, ctype,
2422 build_real (type, dconst0), arg);
2423
2424 /* Make sure not to fold the cexp call again. */
2425 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2426 return expand_expr (build_call_nary (ctype, call, 1, narg),
2427 target, VOIDmode, EXPAND_NORMAL);
2428 }
2429
2430 /* Now build the proper return type. */
2431 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2432 make_tree (TREE_TYPE (arg), op2),
2433 make_tree (TREE_TYPE (arg), op1)),
2434 target, VOIDmode, EXPAND_NORMAL);
2435 }
2436
2437 /* Expand a call to one of the builtin rounding functions gcc defines
2438 as an extension (lfloor and lceil). As these are gcc extensions we
2439 do not need to worry about setting errno to EDOM.
2440 If expanding via optab fails, lower expression to (int)(floor(x)).
2441 EXP is the expression that is a call to the builtin function;
2442 if convenient, the result should be placed in TARGET. SUBTARGET may
2443 be used as the target for computing one of EXP's operands. */
2444
2445 static rtx
2446 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2447 {
2448 convert_optab builtin_optab;
2449 rtx op0, insns, tmp;
2450 tree fndecl = get_callee_fndecl (exp);
2451 enum built_in_function fallback_fn;
2452 tree fallback_fndecl;
2453 enum machine_mode mode;
2454 tree arg, narg;
2455
2456 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2457 gcc_unreachable ();
2458
2459 arg = CALL_EXPR_ARG (exp, 0);
2460
2461 switch (DECL_FUNCTION_CODE (fndecl))
2462 {
2463 CASE_FLT_FN (BUILT_IN_LCEIL):
2464 CASE_FLT_FN (BUILT_IN_LLCEIL):
2465 builtin_optab = lceil_optab;
2466 fallback_fn = BUILT_IN_CEIL;
2467 break;
2468
2469 CASE_FLT_FN (BUILT_IN_LFLOOR):
2470 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2471 builtin_optab = lfloor_optab;
2472 fallback_fn = BUILT_IN_FLOOR;
2473 break;
2474
2475 default:
2476 gcc_unreachable ();
2477 }
2478
2479 /* Make a suitable register to place result in. */
2480 mode = TYPE_MODE (TREE_TYPE (exp));
2481
2482 target = gen_reg_rtx (mode);
2483
2484 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2485 need to expand the argument again. This way, we will not perform
2486 side-effects more the once. */
2487 narg = builtin_save_expr (arg);
2488 if (narg != arg)
2489 {
2490 arg = narg;
2491 exp = build_call_expr (fndecl, 1, arg);
2492 }
2493
2494 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2495
2496 start_sequence ();
2497
2498 /* Compute into TARGET. */
2499 if (expand_sfix_optab (target, op0, builtin_optab))
2500 {
2501 /* Output the entire sequence. */
2502 insns = get_insns ();
2503 end_sequence ();
2504 emit_insn (insns);
2505 return target;
2506 }
2507
2508 /* If we were unable to expand via the builtin, stop the sequence
2509 (without outputting the insns). */
2510 end_sequence ();
2511
2512 /* Fall back to floating point rounding optab. */
2513 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2514
2515 /* For non-C99 targets we may end up without a fallback fndecl here
2516 if the user called __builtin_lfloor directly. In this case emit
2517 a call to the floor/ceil variants nevertheless. This should result
2518 in the best user experience for not full C99 targets. */
2519 if (fallback_fndecl == NULL_TREE)
2520 {
2521 tree fntype;
2522 const char *name = NULL;
2523
2524 switch (DECL_FUNCTION_CODE (fndecl))
2525 {
2526 case BUILT_IN_LCEIL:
2527 case BUILT_IN_LLCEIL:
2528 name = "ceil";
2529 break;
2530 case BUILT_IN_LCEILF:
2531 case BUILT_IN_LLCEILF:
2532 name = "ceilf";
2533 break;
2534 case BUILT_IN_LCEILL:
2535 case BUILT_IN_LLCEILL:
2536 name = "ceill";
2537 break;
2538 case BUILT_IN_LFLOOR:
2539 case BUILT_IN_LLFLOOR:
2540 name = "floor";
2541 break;
2542 case BUILT_IN_LFLOORF:
2543 case BUILT_IN_LLFLOORF:
2544 name = "floorf";
2545 break;
2546 case BUILT_IN_LFLOORL:
2547 case BUILT_IN_LLFLOORL:
2548 name = "floorl";
2549 break;
2550 default:
2551 gcc_unreachable ();
2552 }
2553
2554 fntype = build_function_type_list (TREE_TYPE (arg),
2555 TREE_TYPE (arg), NULL_TREE);
2556 fallback_fndecl = build_fn_decl (name, fntype);
2557 }
2558
2559 exp = build_call_expr (fallback_fndecl, 1, arg);
2560
2561 tmp = expand_normal (exp);
2562
2563 /* Truncate the result of floating point optab to integer
2564 via expand_fix (). */
2565 target = gen_reg_rtx (mode);
2566 expand_fix (target, tmp, 0);
2567
2568 return target;
2569 }
2570
2571 /* Expand a call to one of the builtin math functions doing integer
2572 conversion (lrint).
2573 Return 0 if a normal call should be emitted rather than expanding the
2574 function in-line. EXP is the expression that is a call to the builtin
2575 function; if convenient, the result should be placed in TARGET.
2576 SUBTARGET may be used as the target for computing one of EXP's operands. */
2577
2578 static rtx
2579 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2580 {
2581 convert_optab builtin_optab;
2582 rtx op0, insns;
2583 tree fndecl = get_callee_fndecl (exp);
2584 tree arg, narg;
2585 enum machine_mode mode;
2586
2587 /* There's no easy way to detect the case we need to set EDOM. */
2588 if (flag_errno_math)
2589 return NULL_RTX;
2590
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2592 gcc_unreachable ();
2593
2594 arg = CALL_EXPR_ARG (exp, 0);
2595
2596 switch (DECL_FUNCTION_CODE (fndecl))
2597 {
2598 CASE_FLT_FN (BUILT_IN_LRINT):
2599 CASE_FLT_FN (BUILT_IN_LLRINT):
2600 builtin_optab = lrint_optab; break;
2601 CASE_FLT_FN (BUILT_IN_LROUND):
2602 CASE_FLT_FN (BUILT_IN_LLROUND):
2603 builtin_optab = lround_optab; break;
2604 default:
2605 gcc_unreachable ();
2606 }
2607
2608 /* Make a suitable register to place result in. */
2609 mode = TYPE_MODE (TREE_TYPE (exp));
2610
2611 target = gen_reg_rtx (mode);
2612
2613 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2614 need to expand the argument again. This way, we will not perform
2615 side-effects more the once. */
2616 narg = builtin_save_expr (arg);
2617 if (narg != arg)
2618 {
2619 arg = narg;
2620 exp = build_call_expr (fndecl, 1, arg);
2621 }
2622
2623 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2624
2625 start_sequence ();
2626
2627 if (expand_sfix_optab (target, op0, builtin_optab))
2628 {
2629 /* Output the entire sequence. */
2630 insns = get_insns ();
2631 end_sequence ();
2632 emit_insn (insns);
2633 return target;
2634 }
2635
2636 /* If we were unable to expand via the builtin, stop the sequence
2637 (without outputting the insns) and call to the library function
2638 with the stabilized argument list. */
2639 end_sequence ();
2640
2641 target = expand_call (exp, target, target == const0_rtx);
2642
2643 return target;
2644 }
2645
2646 /* To evaluate powi(x,n), the floating point value x raised to the
2647 constant integer exponent n, we use a hybrid algorithm that
2648 combines the "window method" with look-up tables. For an
2649 introduction to exponentiation algorithms and "addition chains",
2650 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2651 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2652 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2653 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2654
2655 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2656 multiplications to inline before calling the system library's pow
2657 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2658 so this default never requires calling pow, powf or powl. */
2659
2660 #ifndef POWI_MAX_MULTS
2661 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2662 #endif
2663
2664 /* The size of the "optimal power tree" lookup table. All
2665 exponents less than this value are simply looked up in the
2666 powi_table below. This threshold is also used to size the
2667 cache of pseudo registers that hold intermediate results. */
2668 #define POWI_TABLE_SIZE 256
2669
2670 /* The size, in bits of the window, used in the "window method"
2671 exponentiation algorithm. This is equivalent to a radix of
2672 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2673 #define POWI_WINDOW_SIZE 3
2674
2675 /* The following table is an efficient representation of an
2676 "optimal power tree". For each value, i, the corresponding
2677 value, j, in the table states than an optimal evaluation
2678 sequence for calculating pow(x,i) can be found by evaluating
2679 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2680 100 integers is given in Knuth's "Seminumerical algorithms". */
2681
2682 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2683 {
2684 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2685 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2686 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2687 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2688 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2689 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2690 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2691 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2692 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2693 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2694 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2695 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2696 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2697 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2698 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2699 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2700 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2701 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2702 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2703 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2704 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2705 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2706 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2707 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2708 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2709 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2710 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2711 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2712 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2713 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2714 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2715 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2716 };
2717
2718
2719 /* Return the number of multiplications required to calculate
2720 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2721 subroutine of powi_cost. CACHE is an array indicating
2722 which exponents have already been calculated. */
2723
2724 static int
2725 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2726 {
2727 /* If we've already calculated this exponent, then this evaluation
2728 doesn't require any additional multiplications. */
2729 if (cache[n])
2730 return 0;
2731
2732 cache[n] = true;
2733 return powi_lookup_cost (n - powi_table[n], cache)
2734 + powi_lookup_cost (powi_table[n], cache) + 1;
2735 }
2736
2737 /* Return the number of multiplications required to calculate
2738 powi(x,n) for an arbitrary x, given the exponent N. This
2739 function needs to be kept in sync with expand_powi below. */
2740
2741 static int
2742 powi_cost (HOST_WIDE_INT n)
2743 {
2744 bool cache[POWI_TABLE_SIZE];
2745 unsigned HOST_WIDE_INT digit;
2746 unsigned HOST_WIDE_INT val;
2747 int result;
2748
2749 if (n == 0)
2750 return 0;
2751
2752 /* Ignore the reciprocal when calculating the cost. */
2753 val = (n < 0) ? -n : n;
2754
2755 /* Initialize the exponent cache. */
2756 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2757 cache[1] = true;
2758
2759 result = 0;
2760
2761 while (val >= POWI_TABLE_SIZE)
2762 {
2763 if (val & 1)
2764 {
2765 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2766 result += powi_lookup_cost (digit, cache)
2767 + POWI_WINDOW_SIZE + 1;
2768 val >>= POWI_WINDOW_SIZE;
2769 }
2770 else
2771 {
2772 val >>= 1;
2773 result++;
2774 }
2775 }
2776
2777 return result + powi_lookup_cost (val, cache);
2778 }
2779
2780 /* Recursive subroutine of expand_powi. This function takes the array,
2781 CACHE, of already calculated exponents and an exponent N and returns
2782 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2783
2784 static rtx
2785 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2786 {
2787 unsigned HOST_WIDE_INT digit;
2788 rtx target, result;
2789 rtx op0, op1;
2790
2791 if (n < POWI_TABLE_SIZE)
2792 {
2793 if (cache[n])
2794 return cache[n];
2795
2796 target = gen_reg_rtx (mode);
2797 cache[n] = target;
2798
2799 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2800 op1 = expand_powi_1 (mode, powi_table[n], cache);
2801 }
2802 else if (n & 1)
2803 {
2804 target = gen_reg_rtx (mode);
2805 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2806 op0 = expand_powi_1 (mode, n - digit, cache);
2807 op1 = expand_powi_1 (mode, digit, cache);
2808 }
2809 else
2810 {
2811 target = gen_reg_rtx (mode);
2812 op0 = expand_powi_1 (mode, n >> 1, cache);
2813 op1 = op0;
2814 }
2815
2816 result = expand_mult (mode, op0, op1, target, 0);
2817 if (result != target)
2818 emit_move_insn (target, result);
2819 return target;
2820 }
2821
2822 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2823 floating point operand in mode MODE, and N is the exponent. This
2824 function needs to be kept in sync with powi_cost above. */
2825
2826 static rtx
2827 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2828 {
2829 unsigned HOST_WIDE_INT val;
2830 rtx cache[POWI_TABLE_SIZE];
2831 rtx result;
2832
2833 if (n == 0)
2834 return CONST1_RTX (mode);
2835
2836 val = (n < 0) ? -n : n;
2837
2838 memset (cache, 0, sizeof (cache));
2839 cache[1] = x;
2840
2841 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2842
2843 /* If the original exponent was negative, reciprocate the result. */
2844 if (n < 0)
2845 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2846 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2847
2848 return result;
2849 }
2850
2851 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2852 a normal call should be emitted rather than expanding the function
2853 in-line. EXP is the expression that is a call to the builtin
2854 function; if convenient, the result should be placed in TARGET. */
2855
2856 static rtx
2857 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2858 {
2859 tree arg0, arg1;
2860 tree fn, narg0;
2861 tree type = TREE_TYPE (exp);
2862 REAL_VALUE_TYPE cint, c, c2;
2863 HOST_WIDE_INT n;
2864 rtx op, op2;
2865 enum machine_mode mode = TYPE_MODE (type);
2866
2867 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2868 return NULL_RTX;
2869
2870 arg0 = CALL_EXPR_ARG (exp, 0);
2871 arg1 = CALL_EXPR_ARG (exp, 1);
2872
2873 if (TREE_CODE (arg1) != REAL_CST
2874 || TREE_OVERFLOW (arg1))
2875 return expand_builtin_mathfn_2 (exp, target, subtarget);
2876
2877 /* Handle constant exponents. */
2878
2879 /* For integer valued exponents we can expand to an optimal multiplication
2880 sequence using expand_powi. */
2881 c = TREE_REAL_CST (arg1);
2882 n = real_to_integer (&c);
2883 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2884 if (real_identical (&c, &cint)
2885 && ((n >= -1 && n <= 2)
2886 || (flag_unsafe_math_optimizations
2887 && !optimize_size
2888 && powi_cost (n) <= POWI_MAX_MULTS)))
2889 {
2890 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2891 if (n != 1)
2892 {
2893 op = force_reg (mode, op);
2894 op = expand_powi (op, mode, n);
2895 }
2896 return op;
2897 }
2898
2899 narg0 = builtin_save_expr (arg0);
2900
2901 /* If the exponent is not integer valued, check if it is half of an integer.
2902 In this case we can expand to sqrt (x) * x**(n/2). */
2903 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2904 if (fn != NULL_TREE)
2905 {
2906 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2907 n = real_to_integer (&c2);
2908 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2909 if (real_identical (&c2, &cint)
2910 && ((flag_unsafe_math_optimizations
2911 && !optimize_size
2912 && powi_cost (n/2) <= POWI_MAX_MULTS)
2913 || n == 1))
2914 {
2915 tree call_expr = build_call_expr (fn, 1, narg0);
2916 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2917 if (n != 1)
2918 {
2919 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2920 op2 = force_reg (mode, op2);
2921 op2 = expand_powi (op2, mode, abs (n / 2));
2922 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2923 0, OPTAB_LIB_WIDEN);
2924 /* If the original exponent was negative, reciprocate the
2925 result. */
2926 if (n < 0)
2927 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2928 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2929 }
2930 return op;
2931 }
2932 }
2933
2934 /* Try if the exponent is a third of an integer. In this case
2935 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2936 different from pow (x, 1./3.) due to rounding and behavior
2937 with negative x we need to constrain this transformation to
2938 unsafe math and positive x or finite math. */
2939 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2940 if (fn != NULL_TREE
2941 && flag_unsafe_math_optimizations
2942 && (tree_expr_nonnegative_p (arg0)
2943 || !HONOR_NANS (mode)))
2944 {
2945 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2946 real_round (&c2, mode, &c2);
2947 n = real_to_integer (&c2);
2948 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2949 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2950 real_convert (&c2, mode, &c2);
2951 if (real_identical (&c2, &c)
2952 && ((!optimize_size
2953 && powi_cost (n/3) <= POWI_MAX_MULTS)
2954 || n == 1))
2955 {
2956 tree call_expr = build_call_expr (fn, 1,narg0);
2957 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2958 if (abs (n) % 3 == 2)
2959 op = expand_simple_binop (mode, MULT, op, op, op,
2960 0, OPTAB_LIB_WIDEN);
2961 if (n != 1)
2962 {
2963 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2964 op2 = force_reg (mode, op2);
2965 op2 = expand_powi (op2, mode, abs (n / 3));
2966 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2967 0, OPTAB_LIB_WIDEN);
2968 /* If the original exponent was negative, reciprocate the
2969 result. */
2970 if (n < 0)
2971 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2972 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2973 }
2974 return op;
2975 }
2976 }
2977
2978 /* Fall back to optab expansion. */
2979 return expand_builtin_mathfn_2 (exp, target, subtarget);
2980 }
2981
2982 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2983 a normal call should be emitted rather than expanding the function
2984 in-line. EXP is the expression that is a call to the builtin
2985 function; if convenient, the result should be placed in TARGET. */
2986
2987 static rtx
2988 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2989 {
2990 tree arg0, arg1;
2991 rtx op0, op1;
2992 enum machine_mode mode;
2993 enum machine_mode mode2;
2994
2995 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
2997
2998 arg0 = CALL_EXPR_ARG (exp, 0);
2999 arg1 = CALL_EXPR_ARG (exp, 1);
3000 mode = TYPE_MODE (TREE_TYPE (exp));
3001
3002 /* Handle constant power. */
3003
3004 if (TREE_CODE (arg1) == INTEGER_CST
3005 && !TREE_OVERFLOW (arg1))
3006 {
3007 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3008
3009 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3010 Otherwise, check the number of multiplications required. */
3011 if ((TREE_INT_CST_HIGH (arg1) == 0
3012 || TREE_INT_CST_HIGH (arg1) == -1)
3013 && ((n >= -1 && n <= 2)
3014 || (! optimize_size
3015 && powi_cost (n) <= POWI_MAX_MULTS)))
3016 {
3017 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3018 op0 = force_reg (mode, op0);
3019 return expand_powi (op0, mode, n);
3020 }
3021 }
3022
3023 /* Emit a libcall to libgcc. */
3024
3025 /* Mode of the 2nd argument must match that of an int. */
3026 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3027
3028 if (target == NULL_RTX)
3029 target = gen_reg_rtx (mode);
3030
3031 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3032 if (GET_MODE (op0) != mode)
3033 op0 = convert_to_mode (mode, op0, 0);
3034 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3035 if (GET_MODE (op1) != mode2)
3036 op1 = convert_to_mode (mode2, op1, 0);
3037
3038 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3039 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3040 op0, mode, op1, mode2);
3041
3042 return target;
3043 }
3044
3045 /* Expand expression EXP which is a call to the strlen builtin. Return
3046 NULL_RTX if we failed the caller should emit a normal call, otherwise
3047 try to get the result in TARGET, if convenient. */
3048
3049 static rtx
3050 expand_builtin_strlen (tree exp, rtx target,
3051 enum machine_mode target_mode)
3052 {
3053 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3054 return NULL_RTX;
3055 else
3056 {
3057 rtx pat;
3058 tree len;
3059 tree src = CALL_EXPR_ARG (exp, 0);
3060 rtx result, src_reg, char_rtx, before_strlen;
3061 enum machine_mode insn_mode = target_mode, char_mode;
3062 enum insn_code icode = CODE_FOR_nothing;
3063 int align;
3064
3065 /* If the length can be computed at compile-time, return it. */
3066 len = c_strlen (src, 0);
3067 if (len)
3068 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3069
3070 /* If the length can be computed at compile-time and is constant
3071 integer, but there are side-effects in src, evaluate
3072 src for side-effects, then return len.
3073 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3074 can be optimized into: i++; x = 3; */
3075 len = c_strlen (src, 1);
3076 if (len && TREE_CODE (len) == INTEGER_CST)
3077 {
3078 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3079 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3080 }
3081
3082 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3083
3084 /* If SRC is not a pointer type, don't do this operation inline. */
3085 if (align == 0)
3086 return NULL_RTX;
3087
3088 /* Bail out if we can't compute strlen in the right mode. */
3089 while (insn_mode != VOIDmode)
3090 {
3091 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3092 if (icode != CODE_FOR_nothing)
3093 break;
3094
3095 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3096 }
3097 if (insn_mode == VOIDmode)
3098 return NULL_RTX;
3099
3100 /* Make a place to write the result of the instruction. */
3101 result = target;
3102 if (! (result != 0
3103 && REG_P (result)
3104 && GET_MODE (result) == insn_mode
3105 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3106 result = gen_reg_rtx (insn_mode);
3107
3108 /* Make a place to hold the source address. We will not expand
3109 the actual source until we are sure that the expansion will
3110 not fail -- there are trees that cannot be expanded twice. */
3111 src_reg = gen_reg_rtx (Pmode);
3112
3113 /* Mark the beginning of the strlen sequence so we can emit the
3114 source operand later. */
3115 before_strlen = get_last_insn ();
3116
3117 char_rtx = const0_rtx;
3118 char_mode = insn_data[(int) icode].operand[2].mode;
3119 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3120 char_mode))
3121 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3122
3123 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3124 char_rtx, GEN_INT (align));
3125 if (! pat)
3126 return NULL_RTX;
3127 emit_insn (pat);
3128
3129 /* Now that we are assured of success, expand the source. */
3130 start_sequence ();
3131 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3132 if (pat != src_reg)
3133 emit_move_insn (src_reg, pat);
3134 pat = get_insns ();
3135 end_sequence ();
3136
3137 if (before_strlen)
3138 emit_insn_after (pat, before_strlen);
3139 else
3140 emit_insn_before (pat, get_insns ());
3141
3142 /* Return the value in the proper mode for this function. */
3143 if (GET_MODE (result) == target_mode)
3144 target = result;
3145 else if (target != 0)
3146 convert_move (target, result, 0);
3147 else
3148 target = convert_to_mode (target_mode, result, 0);
3149
3150 return target;
3151 }
3152 }
3153
3154 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3155 caller should emit a normal call, otherwise try to get the result
3156 in TARGET, if convenient (and in mode MODE if that's convenient). */
3157
3158 static rtx
3159 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3160 {
3161 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3162 {
3163 tree type = TREE_TYPE (exp);
3164 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3165 CALL_EXPR_ARG (exp, 1), type);
3166 if (result)
3167 return expand_expr (result, target, mode, EXPAND_NORMAL);
3168 }
3169 return NULL_RTX;
3170 }
3171
3172 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3173 caller should emit a normal call, otherwise try to get the result
3174 in TARGET, if convenient (and in mode MODE if that's convenient). */
3175
3176 static rtx
3177 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3178 {
3179 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 {
3181 tree type = TREE_TYPE (exp);
3182 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3183 CALL_EXPR_ARG (exp, 1), type);
3184 if (result)
3185 return expand_expr (result, target, mode, EXPAND_NORMAL);
3186
3187 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3188 }
3189 return NULL_RTX;
3190 }
3191
3192 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3193 caller should emit a normal call, otherwise try to get the result
3194 in TARGET, if convenient (and in mode MODE if that's convenient). */
3195
3196 static rtx
3197 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3198 {
3199 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3200 {
3201 tree type = TREE_TYPE (exp);
3202 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3203 CALL_EXPR_ARG (exp, 1), type);
3204 if (result)
3205 return expand_expr (result, target, mode, EXPAND_NORMAL);
3206 }
3207 return NULL_RTX;
3208 }
3209
3210 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3211 caller should emit a normal call, otherwise try to get the result
3212 in TARGET, if convenient (and in mode MODE if that's convenient). */
3213
3214 static rtx
3215 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3216 {
3217 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3218 {
3219 tree type = TREE_TYPE (exp);
3220 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3221 CALL_EXPR_ARG (exp, 1), type);
3222 if (result)
3223 return expand_expr (result, target, mode, EXPAND_NORMAL);
3224 }
3225 return NULL_RTX;
3226 }
3227
3228 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3229 bytes from constant string DATA + OFFSET and return it as target
3230 constant. */
3231
3232 static rtx
3233 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3234 enum machine_mode mode)
3235 {
3236 const char *str = (const char *) data;
3237
3238 gcc_assert (offset >= 0
3239 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3240 <= strlen (str) + 1));
3241
3242 return c_readstr (str + offset, mode);
3243 }
3244
3245 /* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3249
3250 static rtx
3251 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3252 {
3253 tree fndecl = get_callee_fndecl (exp);
3254
3255 if (!validate_arglist (exp,
3256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3257 return NULL_RTX;
3258 else
3259 {
3260 tree dest = CALL_EXPR_ARG (exp, 0);
3261 tree src = CALL_EXPR_ARG (exp, 1);
3262 tree len = CALL_EXPR_ARG (exp, 2);
3263 const char *src_str;
3264 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3265 unsigned int dest_align
3266 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3267 rtx dest_mem, src_mem, dest_addr, len_rtx;
3268 tree result = fold_builtin_memory_op (dest, src, len,
3269 TREE_TYPE (TREE_TYPE (fndecl)),
3270 false, /*endp=*/0);
3271 HOST_WIDE_INT expected_size = -1;
3272 unsigned int expected_align = 0;
3273
3274 if (result)
3275 {
3276 while (TREE_CODE (result) == COMPOUND_EXPR)
3277 {
3278 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3279 EXPAND_NORMAL);
3280 result = TREE_OPERAND (result, 1);
3281 }
3282 return expand_expr (result, target, mode, EXPAND_NORMAL);
3283 }
3284
3285 /* If DEST is not a pointer type, call the normal function. */
3286 if (dest_align == 0)
3287 return NULL_RTX;
3288
3289 /* If either SRC is not a pointer type, don't do this
3290 operation in-line. */
3291 if (src_align == 0)
3292 return NULL_RTX;
3293
3294 stringop_block_profile (exp, &expected_align, &expected_size);
3295 if (expected_align < dest_align)
3296 expected_align = dest_align;
3297 dest_mem = get_memory_rtx (dest, len);
3298 set_mem_align (dest_mem, dest_align);
3299 len_rtx = expand_normal (len);
3300 src_str = c_getstr (src);
3301
3302 /* If SRC is a string constant and block move would be done
3303 by pieces, we can avoid loading the string from memory
3304 and only stored the computed constants. */
3305 if (src_str
3306 && GET_CODE (len_rtx) == CONST_INT
3307 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3308 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3309 (void *) src_str, dest_align))
3310 {
3311 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3312 builtin_memcpy_read_str,
3313 (void *) src_str, dest_align, 0);
3314 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3315 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3316 return dest_mem;
3317 }
3318
3319 src_mem = get_memory_rtx (src, len);
3320 set_mem_align (src_mem, src_align);
3321
3322 /* Copy word part most expediently. */
3323 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3324 CALL_EXPR_TAILCALL (exp)
3325 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3326 expected_align, expected_size);
3327
3328 if (dest_addr == 0)
3329 {
3330 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3331 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3332 }
3333 return dest_addr;
3334 }
3335 }
3336
3337 /* Expand a call EXP to the mempcpy builtin.
3338 Return NULL_RTX if we failed; the caller should emit a normal call,
3339 otherwise try to get the result in TARGET, if convenient (and in
3340 mode MODE if that's convenient). If ENDP is 0 return the
3341 destination pointer, if ENDP is 1 return the end pointer ala
3342 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3343 stpcpy. */
3344
3345 static rtx
3346 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3347 {
3348 if (!validate_arglist (exp,
3349 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3350 return NULL_RTX;
3351 else
3352 {
3353 tree dest = CALL_EXPR_ARG (exp, 0);
3354 tree src = CALL_EXPR_ARG (exp, 1);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3356 return expand_builtin_mempcpy_args (dest, src, len,
3357 TREE_TYPE (exp),
3358 target, mode, /*endp=*/ 1);
3359 }
3360 }
3361
3362 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3363 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3364 so that this can also be called without constructing an actual CALL_EXPR.
3365 TYPE is the return type of the call. The other arguments and return value
3366 are the same as for expand_builtin_mempcpy. */
3367
3368 static rtx
3369 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3370 rtx target, enum machine_mode mode, int endp)
3371 {
3372 /* If return value is ignored, transform mempcpy into memcpy. */
3373 if (target == const0_rtx)
3374 {
3375 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3376
3377 if (!fn)
3378 return NULL_RTX;
3379
3380 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3381 target, mode, EXPAND_NORMAL);
3382 }
3383 else
3384 {
3385 const char *src_str;
3386 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3387 unsigned int dest_align
3388 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3389 rtx dest_mem, src_mem, len_rtx;
3390 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3391
3392 if (result)
3393 {
3394 while (TREE_CODE (result) == COMPOUND_EXPR)
3395 {
3396 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3397 EXPAND_NORMAL);
3398 result = TREE_OPERAND (result, 1);
3399 }
3400 return expand_expr (result, target, mode, EXPAND_NORMAL);
3401 }
3402
3403 /* If either SRC or DEST is not a pointer type, don't do this
3404 operation in-line. */
3405 if (dest_align == 0 || src_align == 0)
3406 return NULL_RTX;
3407
3408 /* If LEN is not constant, call the normal function. */
3409 if (! host_integerp (len, 1))
3410 return NULL_RTX;
3411
3412 len_rtx = expand_normal (len);
3413 src_str = c_getstr (src);
3414
3415 /* If SRC is a string constant and block move would be done
3416 by pieces, we can avoid loading the string from memory
3417 and only stored the computed constants. */
3418 if (src_str
3419 && GET_CODE (len_rtx) == CONST_INT
3420 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3421 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3422 (void *) src_str, dest_align))
3423 {
3424 dest_mem = get_memory_rtx (dest, len);
3425 set_mem_align (dest_mem, dest_align);
3426 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3427 builtin_memcpy_read_str,
3428 (void *) src_str, dest_align, endp);
3429 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3430 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3431 return dest_mem;
3432 }
3433
3434 if (GET_CODE (len_rtx) == CONST_INT
3435 && can_move_by_pieces (INTVAL (len_rtx),
3436 MIN (dest_align, src_align)))
3437 {
3438 dest_mem = get_memory_rtx (dest, len);
3439 set_mem_align (dest_mem, dest_align);
3440 src_mem = get_memory_rtx (src, len);
3441 set_mem_align (src_mem, src_align);
3442 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3443 MIN (dest_align, src_align), endp);
3444 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3445 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3446 return dest_mem;
3447 }
3448
3449 return NULL_RTX;
3450 }
3451 }
3452
3453 /* Expand expression EXP, which is a call to the memmove builtin. Return
3454 NULL_RTX if we failed; the caller should emit a normal call. */
3455
3456 static rtx
3457 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3458 {
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462 else
3463 {
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree src = CALL_EXPR_ARG (exp, 1);
3466 tree len = CALL_EXPR_ARG (exp, 2);
3467 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3468 target, mode, ignore);
3469 }
3470 }
3471
3472 /* Helper function to do the actual work for expand_builtin_memmove. The
3473 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3474 so that this can also be called without constructing an actual CALL_EXPR.
3475 TYPE is the return type of the call. The other arguments and return value
3476 are the same as for expand_builtin_memmove. */
3477
3478 static rtx
3479 expand_builtin_memmove_args (tree dest, tree src, tree len,
3480 tree type, rtx target, enum machine_mode mode,
3481 int ignore)
3482 {
3483 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3484
3485 if (result)
3486 {
3487 STRIP_TYPE_NOPS (result);
3488 while (TREE_CODE (result) == COMPOUND_EXPR)
3489 {
3490 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3491 EXPAND_NORMAL);
3492 result = TREE_OPERAND (result, 1);
3493 }
3494 return expand_expr (result, target, mode, EXPAND_NORMAL);
3495 }
3496
3497 /* Otherwise, call the normal function. */
3498 return NULL_RTX;
3499 }
3500
3501 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3502 NULL_RTX if we failed the caller should emit a normal call. */
3503
3504 static rtx
3505 expand_builtin_bcopy (tree exp, int ignore)
3506 {
3507 tree type = TREE_TYPE (exp);
3508 tree src, dest, size;
3509
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3512 return NULL_RTX;
3513
3514 src = CALL_EXPR_ARG (exp, 0);
3515 dest = CALL_EXPR_ARG (exp, 1);
3516 size = CALL_EXPR_ARG (exp, 2);
3517
3518 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3519 This is done this way so that if it isn't expanded inline, we fall
3520 back to calling bcopy instead of memmove. */
3521 return expand_builtin_memmove_args (dest, src,
3522 fold_convert (sizetype, size),
3523 type, const0_rtx, VOIDmode,
3524 ignore);
3525 }
3526
3527 #ifndef HAVE_movstr
3528 # define HAVE_movstr 0
3529 # define CODE_FOR_movstr CODE_FOR_nothing
3530 #endif
3531
3532 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3533 we failed, the caller should emit a normal call, otherwise try to
3534 get the result in TARGET, if convenient. If ENDP is 0 return the
3535 destination pointer, if ENDP is 1 return the end pointer ala
3536 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3537 stpcpy. */
3538
3539 static rtx
3540 expand_movstr (tree dest, tree src, rtx target, int endp)
3541 {
3542 rtx end;
3543 rtx dest_mem;
3544 rtx src_mem;
3545 rtx insn;
3546 const struct insn_data * data;
3547
3548 if (!HAVE_movstr)
3549 return NULL_RTX;
3550
3551 dest_mem = get_memory_rtx (dest, NULL);
3552 src_mem = get_memory_rtx (src, NULL);
3553 if (!endp)
3554 {
3555 target = force_reg (Pmode, XEXP (dest_mem, 0));
3556 dest_mem = replace_equiv_address (dest_mem, target);
3557 end = gen_reg_rtx (Pmode);
3558 }
3559 else
3560 {
3561 if (target == 0 || target == const0_rtx)
3562 {
3563 end = gen_reg_rtx (Pmode);
3564 if (target == 0)
3565 target = end;
3566 }
3567 else
3568 end = target;
3569 }
3570
3571 data = insn_data + CODE_FOR_movstr;
3572
3573 if (data->operand[0].mode != VOIDmode)
3574 end = gen_lowpart (data->operand[0].mode, end);
3575
3576 insn = data->genfun (end, dest_mem, src_mem);
3577
3578 gcc_assert (insn);
3579
3580 emit_insn (insn);
3581
3582 /* movstr is supposed to set end to the address of the NUL
3583 terminator. If the caller requested a mempcpy-like return value,
3584 adjust it. */
3585 if (endp == 1 && target != const0_rtx)
3586 {
3587 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3588 emit_move_insn (target, force_operand (tem, NULL_RTX));
3589 }
3590
3591 return target;
3592 }
3593
3594 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3595 NULL_RTX if we failed the caller should emit a normal call, otherwise
3596 try to get the result in TARGET, if convenient (and in mode MODE if that's
3597 convenient). */
3598
3599 static rtx
3600 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3601 {
3602 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3603 {
3604 tree dest = CALL_EXPR_ARG (exp, 0);
3605 tree src = CALL_EXPR_ARG (exp, 1);
3606 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3607 }
3608 return NULL_RTX;
3609 }
3610
3611 /* Helper function to do the actual work for expand_builtin_strcpy. The
3612 arguments to the builtin_strcpy call DEST and SRC are broken out
3613 so that this can also be called without constructing an actual CALL_EXPR.
3614 The other arguments and return value are the same as for
3615 expand_builtin_strcpy. */
3616
3617 static rtx
3618 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3619 rtx target, enum machine_mode mode)
3620 {
3621 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3622 if (result)
3623 return expand_expr (result, target, mode, EXPAND_NORMAL);
3624 return expand_movstr (dest, src, target, /*endp=*/0);
3625
3626 }
3627
3628 /* Expand a call EXP to the stpcpy builtin.
3629 Return NULL_RTX if we failed the caller should emit a normal call,
3630 otherwise try to get the result in TARGET, if convenient (and in
3631 mode MODE if that's convenient). */
3632
3633 static rtx
3634 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3635 {
3636 tree dst, src;
3637
3638 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3640
3641 dst = CALL_EXPR_ARG (exp, 0);
3642 src = CALL_EXPR_ARG (exp, 1);
3643
3644 /* If return value is ignored, transform stpcpy into strcpy. */
3645 if (target == const0_rtx)
3646 {
3647 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3648 if (!fn)
3649 return NULL_RTX;
3650
3651 return expand_expr (build_call_expr (fn, 2, dst, src),
3652 target, mode, EXPAND_NORMAL);
3653 }
3654 else
3655 {
3656 tree len, lenp1;
3657 rtx ret;
3658
3659 /* Ensure we get an actual string whose length can be evaluated at
3660 compile-time, not an expression containing a string. This is
3661 because the latter will potentially produce pessimized code
3662 when used to produce the return value. */
3663 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3664 return expand_movstr (dst, src, target, /*endp=*/2);
3665
3666 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3667 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3668 target, mode, /*endp=*/2);
3669
3670 if (ret)
3671 return ret;
3672
3673 if (TREE_CODE (len) == INTEGER_CST)
3674 {
3675 rtx len_rtx = expand_normal (len);
3676
3677 if (GET_CODE (len_rtx) == CONST_INT)
3678 {
3679 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3680 dst, src, target, mode);
3681
3682 if (ret)
3683 {
3684 if (! target)
3685 {
3686 if (mode != VOIDmode)
3687 target = gen_reg_rtx (mode);
3688 else
3689 target = gen_reg_rtx (GET_MODE (ret));
3690 }
3691 if (GET_MODE (target) != GET_MODE (ret))
3692 ret = gen_lowpart (GET_MODE (target), ret);
3693
3694 ret = plus_constant (ret, INTVAL (len_rtx));
3695 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3696 gcc_assert (ret);
3697
3698 return target;
3699 }
3700 }
3701 }
3702
3703 return expand_movstr (dst, src, target, /*endp=*/2);
3704 }
3705 }
3706
3707 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3708 bytes from constant string DATA + OFFSET and return it as target
3709 constant. */
3710
3711 static rtx
3712 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3713 enum machine_mode mode)
3714 {
3715 const char *str = (const char *) data;
3716
3717 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3718 return const0_rtx;
3719
3720 return c_readstr (str + offset, mode);
3721 }
3722
3723 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3724 NULL_RTX if we failed the caller should emit a normal call. */
3725
3726 static rtx
3727 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3728 {
3729 tree fndecl = get_callee_fndecl (exp);
3730
3731 if (validate_arglist (exp,
3732 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3733 {
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree src = CALL_EXPR_ARG (exp, 1);
3736 tree len = CALL_EXPR_ARG (exp, 2);
3737 tree slen = c_strlen (src, 1);
3738 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3739
3740 if (result)
3741 {
3742 while (TREE_CODE (result) == COMPOUND_EXPR)
3743 {
3744 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3745 EXPAND_NORMAL);
3746 result = TREE_OPERAND (result, 1);
3747 }
3748 return expand_expr (result, target, mode, EXPAND_NORMAL);
3749 }
3750
3751 /* We must be passed a constant len and src parameter. */
3752 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3753 return NULL_RTX;
3754
3755 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3756
3757 /* We're required to pad with trailing zeros if the requested
3758 len is greater than strlen(s2)+1. In that case try to
3759 use store_by_pieces, if it fails, punt. */
3760 if (tree_int_cst_lt (slen, len))
3761 {
3762 unsigned int dest_align
3763 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3764 const char *p = c_getstr (src);
3765 rtx dest_mem;
3766
3767 if (!p || dest_align == 0 || !host_integerp (len, 1)
3768 || !can_store_by_pieces (tree_low_cst (len, 1),
3769 builtin_strncpy_read_str,
3770 (void *) p, dest_align))
3771 return NULL_RTX;
3772
3773 dest_mem = get_memory_rtx (dest, len);
3774 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3775 builtin_strncpy_read_str,
3776 (void *) p, dest_align, 0);
3777 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3778 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3779 return dest_mem;
3780 }
3781 }
3782 return NULL_RTX;
3783 }
3784
3785 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3786 bytes from constant string DATA + OFFSET and return it as target
3787 constant. */
3788
3789 rtx
3790 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3791 enum machine_mode mode)
3792 {
3793 const char *c = (const char *) data;
3794 char *p = alloca (GET_MODE_SIZE (mode));
3795
3796 memset (p, *c, GET_MODE_SIZE (mode));
3797
3798 return c_readstr (p, mode);
3799 }
3800
3801 /* Callback routine for store_by_pieces. Return the RTL of a register
3802 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3803 char value given in the RTL register data. For example, if mode is
3804 4 bytes wide, return the RTL for 0x01010101*data. */
3805
3806 static rtx
3807 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3808 enum machine_mode mode)
3809 {
3810 rtx target, coeff;
3811 size_t size;
3812 char *p;
3813
3814 size = GET_MODE_SIZE (mode);
3815 if (size == 1)
3816 return (rtx) data;
3817
3818 p = alloca (size);
3819 memset (p, 1, size);
3820 coeff = c_readstr (p, mode);
3821
3822 target = convert_to_mode (mode, (rtx) data, 1);
3823 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3824 return force_reg (mode, target);
3825 }
3826
3827 /* Expand expression EXP, which is a call to the memset builtin. Return
3828 NULL_RTX if we failed the caller should emit a normal call, otherwise
3829 try to get the result in TARGET, if convenient (and in mode MODE if that's
3830 convenient). */
3831
3832 static rtx
3833 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3834 {
3835 if (!validate_arglist (exp,
3836 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3837 return NULL_RTX;
3838 else
3839 {
3840 tree dest = CALL_EXPR_ARG (exp, 0);
3841 tree val = CALL_EXPR_ARG (exp, 1);
3842 tree len = CALL_EXPR_ARG (exp, 2);
3843 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3844 }
3845 }
3846
3847 /* Helper function to do the actual work for expand_builtin_memset. The
3848 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3849 so that this can also be called without constructing an actual CALL_EXPR.
3850 The other arguments and return value are the same as for
3851 expand_builtin_memset. */
3852
3853 static rtx
3854 expand_builtin_memset_args (tree dest, tree val, tree len,
3855 rtx target, enum machine_mode mode, tree orig_exp)
3856 {
3857 tree fndecl, fn;
3858 enum built_in_function fcode;
3859 char c;
3860 unsigned int dest_align;
3861 rtx dest_mem, dest_addr, len_rtx;
3862 HOST_WIDE_INT expected_size = -1;
3863 unsigned int expected_align = 0;
3864
3865 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3866
3867 /* If DEST is not a pointer type, don't do this operation in-line. */
3868 if (dest_align == 0)
3869 return NULL_RTX;
3870
3871 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3872 if (expected_align < dest_align)
3873 expected_align = dest_align;
3874
3875 /* If the LEN parameter is zero, return DEST. */
3876 if (integer_zerop (len))
3877 {
3878 /* Evaluate and ignore VAL in case it has side-effects. */
3879 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3880 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3881 }
3882
3883 /* Stabilize the arguments in case we fail. */
3884 dest = builtin_save_expr (dest);
3885 val = builtin_save_expr (val);
3886 len = builtin_save_expr (len);
3887
3888 len_rtx = expand_normal (len);
3889 dest_mem = get_memory_rtx (dest, len);
3890
3891 if (TREE_CODE (val) != INTEGER_CST)
3892 {
3893 rtx val_rtx;
3894
3895 val_rtx = expand_normal (val);
3896 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3897 val_rtx, 0);
3898
3899 /* Assume that we can memset by pieces if we can store
3900 * the coefficients by pieces (in the required modes).
3901 * We can't pass builtin_memset_gen_str as that emits RTL. */
3902 c = 1;
3903 if (host_integerp (len, 1)
3904 && !(optimize_size && tree_low_cst (len, 1) > 1)
3905 && can_store_by_pieces (tree_low_cst (len, 1),
3906 builtin_memset_read_str, &c, dest_align))
3907 {
3908 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3909 val_rtx);
3910 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3911 builtin_memset_gen_str, val_rtx, dest_align, 0);
3912 }
3913 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3914 dest_align, expected_align,
3915 expected_size))
3916 goto do_libcall;
3917
3918 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3919 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3920 return dest_mem;
3921 }
3922
3923 if (target_char_cast (val, &c))
3924 goto do_libcall;
3925
3926 if (c)
3927 {
3928 if (host_integerp (len, 1)
3929 && !(optimize_size && tree_low_cst (len, 1) > 1)
3930 && can_store_by_pieces (tree_low_cst (len, 1),
3931 builtin_memset_read_str, &c, dest_align))
3932 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3933 builtin_memset_read_str, &c, dest_align, 0);
3934 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3935 dest_align, expected_align,
3936 expected_size))
3937 goto do_libcall;
3938
3939 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3940 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3941 return dest_mem;
3942 }
3943
3944 set_mem_align (dest_mem, dest_align);
3945 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3946 CALL_EXPR_TAILCALL (orig_exp)
3947 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3948 expected_align, expected_size);
3949
3950 if (dest_addr == 0)
3951 {
3952 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3953 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3954 }
3955
3956 return dest_addr;
3957
3958 do_libcall:
3959 fndecl = get_callee_fndecl (orig_exp);
3960 fcode = DECL_FUNCTION_CODE (fndecl);
3961 if (fcode == BUILT_IN_MEMSET)
3962 fn = build_call_expr (fndecl, 3, dest, val, len);
3963 else if (fcode == BUILT_IN_BZERO)
3964 fn = build_call_expr (fndecl, 2, dest, len);
3965 else
3966 gcc_unreachable ();
3967 if (TREE_CODE (fn) == CALL_EXPR)
3968 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3969 return expand_call (fn, target, target == const0_rtx);
3970 }
3971
3972 /* Expand expression EXP, which is a call to the bzero builtin. Return
3973 NULL_RTX if we failed the caller should emit a normal call. */
3974
3975 static rtx
3976 expand_builtin_bzero (tree exp)
3977 {
3978 tree dest, size;
3979
3980 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3981 return NULL_RTX;
3982
3983 dest = CALL_EXPR_ARG (exp, 0);
3984 size = CALL_EXPR_ARG (exp, 1);
3985
3986 /* New argument list transforming bzero(ptr x, int y) to
3987 memset(ptr x, int 0, size_t y). This is done this way
3988 so that if it isn't expanded inline, we fallback to
3989 calling bzero instead of memset. */
3990
3991 return expand_builtin_memset_args (dest, integer_zero_node,
3992 fold_convert (sizetype, size),
3993 const0_rtx, VOIDmode, exp);
3994 }
3995
3996 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3997 caller should emit a normal call, otherwise try to get the result
3998 in TARGET, if convenient (and in mode MODE if that's convenient). */
3999
4000 static rtx
4001 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4002 {
4003 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4004 INTEGER_TYPE, VOID_TYPE))
4005 {
4006 tree type = TREE_TYPE (exp);
4007 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4008 CALL_EXPR_ARG (exp, 1),
4009 CALL_EXPR_ARG (exp, 2), type);
4010 if (result)
4011 return expand_expr (result, target, mode, EXPAND_NORMAL);
4012 }
4013 return NULL_RTX;
4014 }
4015
4016 /* Expand expression EXP, which is a call to the memcmp built-in function.
4017 Return NULL_RTX if we failed and the
4018 caller should emit a normal call, otherwise try to get the result in
4019 TARGET, if convenient (and in mode MODE, if that's convenient). */
4020
4021 static rtx
4022 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4023 {
4024 if (!validate_arglist (exp,
4025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4026 return NULL_RTX;
4027 else
4028 {
4029 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4030 CALL_EXPR_ARG (exp, 1),
4031 CALL_EXPR_ARG (exp, 2));
4032 if (result)
4033 return expand_expr (result, target, mode, EXPAND_NORMAL);
4034 }
4035
4036 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4037 {
4038 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4039 rtx result;
4040 rtx insn;
4041 tree arg1 = CALL_EXPR_ARG (exp, 0);
4042 tree arg2 = CALL_EXPR_ARG (exp, 1);
4043 tree len = CALL_EXPR_ARG (exp, 2);
4044
4045 int arg1_align
4046 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4047 int arg2_align
4048 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4049 enum machine_mode insn_mode;
4050
4051 #ifdef HAVE_cmpmemsi
4052 if (HAVE_cmpmemsi)
4053 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4054 else
4055 #endif
4056 #ifdef HAVE_cmpstrnsi
4057 if (HAVE_cmpstrnsi)
4058 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4059 else
4060 #endif
4061 return NULL_RTX;
4062
4063 /* If we don't have POINTER_TYPE, call the function. */
4064 if (arg1_align == 0 || arg2_align == 0)
4065 return NULL_RTX;
4066
4067 /* Make a place to write the result of the instruction. */
4068 result = target;
4069 if (! (result != 0
4070 && REG_P (result) && GET_MODE (result) == insn_mode
4071 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4072 result = gen_reg_rtx (insn_mode);
4073
4074 arg1_rtx = get_memory_rtx (arg1, len);
4075 arg2_rtx = get_memory_rtx (arg2, len);
4076 arg3_rtx = expand_normal (len);
4077
4078 /* Set MEM_SIZE as appropriate. */
4079 if (GET_CODE (arg3_rtx) == CONST_INT)
4080 {
4081 set_mem_size (arg1_rtx, arg3_rtx);
4082 set_mem_size (arg2_rtx, arg3_rtx);
4083 }
4084
4085 #ifdef HAVE_cmpmemsi
4086 if (HAVE_cmpmemsi)
4087 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4088 GEN_INT (MIN (arg1_align, arg2_align)));
4089 else
4090 #endif
4091 #ifdef HAVE_cmpstrnsi
4092 if (HAVE_cmpstrnsi)
4093 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4094 GEN_INT (MIN (arg1_align, arg2_align)));
4095 else
4096 #endif
4097 gcc_unreachable ();
4098
4099 if (insn)
4100 emit_insn (insn);
4101 else
4102 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4103 TYPE_MODE (integer_type_node), 3,
4104 XEXP (arg1_rtx, 0), Pmode,
4105 XEXP (arg2_rtx, 0), Pmode,
4106 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4107 TYPE_UNSIGNED (sizetype)),
4108 TYPE_MODE (sizetype));
4109
4110 /* Return the value in the proper mode for this function. */
4111 mode = TYPE_MODE (TREE_TYPE (exp));
4112 if (GET_MODE (result) == mode)
4113 return result;
4114 else if (target != 0)
4115 {
4116 convert_move (target, result, 0);
4117 return target;
4118 }
4119 else
4120 return convert_to_mode (mode, result, 0);
4121 }
4122 #endif
4123
4124 return NULL_RTX;
4125 }
4126
4127 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4128 if we failed the caller should emit a normal call, otherwise try to get
4129 the result in TARGET, if convenient. */
4130
4131 static rtx
4132 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4133 {
4134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4135 return NULL_RTX;
4136 else
4137 {
4138 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4139 CALL_EXPR_ARG (exp, 1));
4140 if (result)
4141 return expand_expr (result, target, mode, EXPAND_NORMAL);
4142 }
4143
4144 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4145 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4146 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4147 {
4148 rtx arg1_rtx, arg2_rtx;
4149 rtx result, insn = NULL_RTX;
4150 tree fndecl, fn;
4151 tree arg1 = CALL_EXPR_ARG (exp, 0);
4152 tree arg2 = CALL_EXPR_ARG (exp, 1);
4153
4154 int arg1_align
4155 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4156 int arg2_align
4157 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4158
4159 /* If we don't have POINTER_TYPE, call the function. */
4160 if (arg1_align == 0 || arg2_align == 0)
4161 return NULL_RTX;
4162
4163 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4164 arg1 = builtin_save_expr (arg1);
4165 arg2 = builtin_save_expr (arg2);
4166
4167 arg1_rtx = get_memory_rtx (arg1, NULL);
4168 arg2_rtx = get_memory_rtx (arg2, NULL);
4169
4170 #ifdef HAVE_cmpstrsi
4171 /* Try to call cmpstrsi. */
4172 if (HAVE_cmpstrsi)
4173 {
4174 enum machine_mode insn_mode
4175 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4176
4177 /* Make a place to write the result of the instruction. */
4178 result = target;
4179 if (! (result != 0
4180 && REG_P (result) && GET_MODE (result) == insn_mode
4181 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4182 result = gen_reg_rtx (insn_mode);
4183
4184 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4185 GEN_INT (MIN (arg1_align, arg2_align)));
4186 }
4187 #endif
4188 #ifdef HAVE_cmpstrnsi
4189 /* Try to determine at least one length and call cmpstrnsi. */
4190 if (!insn && HAVE_cmpstrnsi)
4191 {
4192 tree len;
4193 rtx arg3_rtx;
4194
4195 enum machine_mode insn_mode
4196 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4197 tree len1 = c_strlen (arg1, 1);
4198 tree len2 = c_strlen (arg2, 1);
4199
4200 if (len1)
4201 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4202 if (len2)
4203 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4204
4205 /* If we don't have a constant length for the first, use the length
4206 of the second, if we know it. We don't require a constant for
4207 this case; some cost analysis could be done if both are available
4208 but neither is constant. For now, assume they're equally cheap,
4209 unless one has side effects. If both strings have constant lengths,
4210 use the smaller. */
4211
4212 if (!len1)
4213 len = len2;
4214 else if (!len2)
4215 len = len1;
4216 else if (TREE_SIDE_EFFECTS (len1))
4217 len = len2;
4218 else if (TREE_SIDE_EFFECTS (len2))
4219 len = len1;
4220 else if (TREE_CODE (len1) != INTEGER_CST)
4221 len = len2;
4222 else if (TREE_CODE (len2) != INTEGER_CST)
4223 len = len1;
4224 else if (tree_int_cst_lt (len1, len2))
4225 len = len1;
4226 else
4227 len = len2;
4228
4229 /* If both arguments have side effects, we cannot optimize. */
4230 if (!len || TREE_SIDE_EFFECTS (len))
4231 goto do_libcall;
4232
4233 arg3_rtx = expand_normal (len);
4234
4235 /* Make a place to write the result of the instruction. */
4236 result = target;
4237 if (! (result != 0
4238 && REG_P (result) && GET_MODE (result) == insn_mode
4239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4240 result = gen_reg_rtx (insn_mode);
4241
4242 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4243 GEN_INT (MIN (arg1_align, arg2_align)));
4244 }
4245 #endif
4246
4247 if (insn)
4248 {
4249 emit_insn (insn);
4250
4251 /* Return the value in the proper mode for this function. */
4252 mode = TYPE_MODE (TREE_TYPE (exp));
4253 if (GET_MODE (result) == mode)
4254 return result;
4255 if (target == 0)
4256 return convert_to_mode (mode, result, 0);
4257 convert_move (target, result, 0);
4258 return target;
4259 }
4260
4261 /* Expand the library call ourselves using a stabilized argument
4262 list to avoid re-evaluating the function's arguments twice. */
4263 #ifdef HAVE_cmpstrnsi
4264 do_libcall:
4265 #endif
4266 fndecl = get_callee_fndecl (exp);
4267 fn = build_call_expr (fndecl, 2, arg1, arg2);
4268 if (TREE_CODE (fn) == CALL_EXPR)
4269 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4270 return expand_call (fn, target, target == const0_rtx);
4271 }
4272 #endif
4273 return NULL_RTX;
4274 }
4275
4276 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4277 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4278 the result in TARGET, if convenient. */
4279
4280 static rtx
4281 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4282 {
4283 if (!validate_arglist (exp,
4284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4285 return NULL_RTX;
4286 else
4287 {
4288 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4289 CALL_EXPR_ARG (exp, 1),
4290 CALL_EXPR_ARG (exp, 2));
4291 if (result)
4292 return expand_expr (result, target, mode, EXPAND_NORMAL);
4293 }
4294
4295 /* If c_strlen can determine an expression for one of the string
4296 lengths, and it doesn't have side effects, then emit cmpstrnsi
4297 using length MIN(strlen(string)+1, arg3). */
4298 #ifdef HAVE_cmpstrnsi
4299 if (HAVE_cmpstrnsi)
4300 {
4301 tree len, len1, len2;
4302 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4303 rtx result, insn;
4304 tree fndecl, fn;
4305 tree arg1 = CALL_EXPR_ARG (exp, 0);
4306 tree arg2 = CALL_EXPR_ARG (exp, 1);
4307 tree arg3 = CALL_EXPR_ARG (exp, 2);
4308
4309 int arg1_align
4310 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4311 int arg2_align
4312 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4313 enum machine_mode insn_mode
4314 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4315
4316 len1 = c_strlen (arg1, 1);
4317 len2 = c_strlen (arg2, 1);
4318
4319 if (len1)
4320 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4321 if (len2)
4322 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4323
4324 /* If we don't have a constant length for the first, use the length
4325 of the second, if we know it. We don't require a constant for
4326 this case; some cost analysis could be done if both are available
4327 but neither is constant. For now, assume they're equally cheap,
4328 unless one has side effects. If both strings have constant lengths,
4329 use the smaller. */
4330
4331 if (!len1)
4332 len = len2;
4333 else if (!len2)
4334 len = len1;
4335 else if (TREE_SIDE_EFFECTS (len1))
4336 len = len2;
4337 else if (TREE_SIDE_EFFECTS (len2))
4338 len = len1;
4339 else if (TREE_CODE (len1) != INTEGER_CST)
4340 len = len2;
4341 else if (TREE_CODE (len2) != INTEGER_CST)
4342 len = len1;
4343 else if (tree_int_cst_lt (len1, len2))
4344 len = len1;
4345 else
4346 len = len2;
4347
4348 /* If both arguments have side effects, we cannot optimize. */
4349 if (!len || TREE_SIDE_EFFECTS (len))
4350 return NULL_RTX;
4351
4352 /* The actual new length parameter is MIN(len,arg3). */
4353 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4354 fold_convert (TREE_TYPE (len), arg3));
4355
4356 /* If we don't have POINTER_TYPE, call the function. */
4357 if (arg1_align == 0 || arg2_align == 0)
4358 return NULL_RTX;
4359
4360 /* Make a place to write the result of the instruction. */
4361 result = target;
4362 if (! (result != 0
4363 && REG_P (result) && GET_MODE (result) == insn_mode
4364 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4365 result = gen_reg_rtx (insn_mode);
4366
4367 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4368 arg1 = builtin_save_expr (arg1);
4369 arg2 = builtin_save_expr (arg2);
4370 len = builtin_save_expr (len);
4371
4372 arg1_rtx = get_memory_rtx (arg1, len);
4373 arg2_rtx = get_memory_rtx (arg2, len);
4374 arg3_rtx = expand_normal (len);
4375 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4376 GEN_INT (MIN (arg1_align, arg2_align)));
4377 if (insn)
4378 {
4379 emit_insn (insn);
4380
4381 /* Return the value in the proper mode for this function. */
4382 mode = TYPE_MODE (TREE_TYPE (exp));
4383 if (GET_MODE (result) == mode)
4384 return result;
4385 if (target == 0)
4386 return convert_to_mode (mode, result, 0);
4387 convert_move (target, result, 0);
4388 return target;
4389 }
4390
4391 /* Expand the library call ourselves using a stabilized argument
4392 list to avoid re-evaluating the function's arguments twice. */
4393 fndecl = get_callee_fndecl (exp);
4394 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4395 if (TREE_CODE (fn) == CALL_EXPR)
4396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4397 return expand_call (fn, target, target == const0_rtx);
4398 }
4399 #endif
4400 return NULL_RTX;
4401 }
4402
4403 /* Expand expression EXP, which is a call to the strcat builtin.
4404 Return NULL_RTX if we failed the caller should emit a normal call,
4405 otherwise try to get the result in TARGET, if convenient. */
4406
4407 static rtx
4408 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4409 {
4410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4411 return NULL_RTX;
4412 else
4413 {
4414 tree dst = CALL_EXPR_ARG (exp, 0);
4415 tree src = CALL_EXPR_ARG (exp, 1);
4416 const char *p = c_getstr (src);
4417
4418 /* If the string length is zero, return the dst parameter. */
4419 if (p && *p == '\0')
4420 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4421
4422 if (!optimize_size)
4423 {
4424 /* See if we can store by pieces into (dst + strlen(dst)). */
4425 tree newsrc, newdst,
4426 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4427 rtx insns;
4428
4429 /* Stabilize the argument list. */
4430 newsrc = builtin_save_expr (src);
4431 dst = builtin_save_expr (dst);
4432
4433 start_sequence ();
4434
4435 /* Create strlen (dst). */
4436 newdst = build_call_expr (strlen_fn, 1, dst);
4437 /* Create (dst p+ strlen (dst)). */
4438
4439 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4440 newdst = builtin_save_expr (newdst);
4441
4442 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4443 {
4444 end_sequence (); /* Stop sequence. */
4445 return NULL_RTX;
4446 }
4447
4448 /* Output the entire sequence. */
4449 insns = get_insns ();
4450 end_sequence ();
4451 emit_insn (insns);
4452
4453 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4454 }
4455
4456 return NULL_RTX;
4457 }
4458 }
4459
4460 /* Expand expression EXP, which is a call to the strncat builtin.
4461 Return NULL_RTX if we failed the caller should emit a normal call,
4462 otherwise try to get the result in TARGET, if convenient. */
4463
4464 static rtx
4465 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4466 {
4467 if (validate_arglist (exp,
4468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4469 {
4470 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4471 CALL_EXPR_ARG (exp, 1),
4472 CALL_EXPR_ARG (exp, 2));
4473 if (result)
4474 return expand_expr (result, target, mode, EXPAND_NORMAL);
4475 }
4476 return NULL_RTX;
4477 }
4478
4479 /* Expand expression EXP, which is a call to the strspn builtin.
4480 Return NULL_RTX if we failed the caller should emit a normal call,
4481 otherwise try to get the result in TARGET, if convenient. */
4482
4483 static rtx
4484 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4485 {
4486 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4487 {
4488 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4489 CALL_EXPR_ARG (exp, 1));
4490 if (result)
4491 return expand_expr (result, target, mode, EXPAND_NORMAL);
4492 }
4493 return NULL_RTX;
4494 }
4495
4496 /* Expand expression EXP, which is a call to the strcspn builtin.
4497 Return NULL_RTX if we failed the caller should emit a normal call,
4498 otherwise try to get the result in TARGET, if convenient. */
4499
4500 static rtx
4501 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4502 {
4503 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4504 {
4505 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4506 CALL_EXPR_ARG (exp, 1));
4507 if (result)
4508 return expand_expr (result, target, mode, EXPAND_NORMAL);
4509 }
4510 return NULL_RTX;
4511 }
4512
4513 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4514 if that's convenient. */
4515
4516 rtx
4517 expand_builtin_saveregs (void)
4518 {
4519 rtx val, seq;
4520
4521 /* Don't do __builtin_saveregs more than once in a function.
4522 Save the result of the first call and reuse it. */
4523 if (saveregs_value != 0)
4524 return saveregs_value;
4525
4526 /* When this function is called, it means that registers must be
4527 saved on entry to this function. So we migrate the call to the
4528 first insn of this function. */
4529
4530 start_sequence ();
4531
4532 /* Do whatever the machine needs done in this case. */
4533 val = targetm.calls.expand_builtin_saveregs ();
4534
4535 seq = get_insns ();
4536 end_sequence ();
4537
4538 saveregs_value = val;
4539
4540 /* Put the insns after the NOTE that starts the function. If this
4541 is inside a start_sequence, make the outer-level insn chain current, so
4542 the code is placed at the start of the function. */
4543 push_topmost_sequence ();
4544 emit_insn_after (seq, entry_of_function ());
4545 pop_topmost_sequence ();
4546
4547 return val;
4548 }
4549
4550 /* __builtin_args_info (N) returns word N of the arg space info
4551 for the current function. The number and meanings of words
4552 is controlled by the definition of CUMULATIVE_ARGS. */
4553
4554 static rtx
4555 expand_builtin_args_info (tree exp)
4556 {
4557 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4558 int *word_ptr = (int *) &current_function_args_info;
4559
4560 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4561
4562 if (call_expr_nargs (exp) != 0)
4563 {
4564 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4565 error ("argument of %<__builtin_args_info%> must be constant");
4566 else
4567 {
4568 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4569
4570 if (wordnum < 0 || wordnum >= nwords)
4571 error ("argument of %<__builtin_args_info%> out of range");
4572 else
4573 return GEN_INT (word_ptr[wordnum]);
4574 }
4575 }
4576 else
4577 error ("missing argument in %<__builtin_args_info%>");
4578
4579 return const0_rtx;
4580 }
4581
4582 /* Expand a call to __builtin_next_arg. */
4583
4584 static rtx
4585 expand_builtin_next_arg (void)
4586 {
4587 /* Checking arguments is already done in fold_builtin_next_arg
4588 that must be called before this function. */
4589 return expand_binop (ptr_mode, add_optab,
4590 current_function_internal_arg_pointer,
4591 current_function_arg_offset_rtx,
4592 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4593 }
4594
4595 /* Make it easier for the backends by protecting the valist argument
4596 from multiple evaluations. */
4597
4598 static tree
4599 stabilize_va_list (tree valist, int needs_lvalue)
4600 {
4601 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4602 {
4603 if (TREE_SIDE_EFFECTS (valist))
4604 valist = save_expr (valist);
4605
4606 /* For this case, the backends will be expecting a pointer to
4607 TREE_TYPE (va_list_type_node), but it's possible we've
4608 actually been given an array (an actual va_list_type_node).
4609 So fix it. */
4610 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4611 {
4612 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4613 valist = build_fold_addr_expr_with_type (valist, p1);
4614 }
4615 }
4616 else
4617 {
4618 tree pt;
4619
4620 if (! needs_lvalue)
4621 {
4622 if (! TREE_SIDE_EFFECTS (valist))
4623 return valist;
4624
4625 pt = build_pointer_type (va_list_type_node);
4626 valist = fold_build1 (ADDR_EXPR, pt, valist);
4627 TREE_SIDE_EFFECTS (valist) = 1;
4628 }
4629
4630 if (TREE_SIDE_EFFECTS (valist))
4631 valist = save_expr (valist);
4632 valist = build_fold_indirect_ref (valist);
4633 }
4634
4635 return valist;
4636 }
4637
4638 /* The "standard" definition of va_list is void*. */
4639
4640 tree
4641 std_build_builtin_va_list (void)
4642 {
4643 return ptr_type_node;
4644 }
4645
4646 /* The "standard" implementation of va_start: just assign `nextarg' to
4647 the variable. */
4648
4649 void
4650 std_expand_builtin_va_start (tree valist, rtx nextarg)
4651 {
4652 tree t;
4653 t = make_tree (sizetype, nextarg);
4654 t = fold_convert (ptr_type_node, t);
4655
4656 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4657 TREE_SIDE_EFFECTS (t) = 1;
4658
4659 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4660 }
4661
4662 /* Expand EXP, a call to __builtin_va_start. */
4663
4664 static rtx
4665 expand_builtin_va_start (tree exp)
4666 {
4667 rtx nextarg;
4668 tree valist;
4669
4670 if (call_expr_nargs (exp) < 2)
4671 {
4672 error ("too few arguments to function %<va_start%>");
4673 return const0_rtx;
4674 }
4675
4676 if (fold_builtin_next_arg (exp, true))
4677 return const0_rtx;
4678
4679 nextarg = expand_builtin_next_arg ();
4680 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4681
4682 #ifdef EXPAND_BUILTIN_VA_START
4683 EXPAND_BUILTIN_VA_START (valist, nextarg);
4684 #else
4685 std_expand_builtin_va_start (valist, nextarg);
4686 #endif
4687
4688 return const0_rtx;
4689 }
4690
4691 /* The "standard" implementation of va_arg: read the value from the
4692 current (padded) address and increment by the (padded) size. */
4693
4694 tree
4695 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4696 {
4697 tree addr, t, type_size, rounded_size, valist_tmp;
4698 unsigned HOST_WIDE_INT align, boundary;
4699 bool indirect;
4700
4701 #ifdef ARGS_GROW_DOWNWARD
4702 /* All of the alignment and movement below is for args-grow-up machines.
4703 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4704 implement their own specialized gimplify_va_arg_expr routines. */
4705 gcc_unreachable ();
4706 #endif
4707
4708 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4709 if (indirect)
4710 type = build_pointer_type (type);
4711
4712 align = PARM_BOUNDARY / BITS_PER_UNIT;
4713 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4714
4715 /* Hoist the valist value into a temporary for the moment. */
4716 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4717
4718 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4719 requires greater alignment, we must perform dynamic alignment. */
4720 if (boundary > align
4721 && !integer_zerop (TYPE_SIZE (type)))
4722 {
4723 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4724 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4725 valist_tmp, size_int (boundary - 1)));
4726 gimplify_and_add (t, pre_p);
4727
4728 t = fold_convert (sizetype, valist_tmp);
4729 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4730 fold_convert (TREE_TYPE (valist),
4731 fold_build2 (BIT_AND_EXPR, sizetype, t,
4732 size_int (-boundary))));
4733 gimplify_and_add (t, pre_p);
4734 }
4735 else
4736 boundary = align;
4737
4738 /* If the actual alignment is less than the alignment of the type,
4739 adjust the type accordingly so that we don't assume strict alignment
4740 when deferencing the pointer. */
4741 boundary *= BITS_PER_UNIT;
4742 if (boundary < TYPE_ALIGN (type))
4743 {
4744 type = build_variant_type_copy (type);
4745 TYPE_ALIGN (type) = boundary;
4746 }
4747
4748 /* Compute the rounded size of the type. */
4749 type_size = size_in_bytes (type);
4750 rounded_size = round_up (type_size, align);
4751
4752 /* Reduce rounded_size so it's sharable with the postqueue. */
4753 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4754
4755 /* Get AP. */
4756 addr = valist_tmp;
4757 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4758 {
4759 /* Small args are padded downward. */
4760 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4761 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4762 size_binop (MINUS_EXPR, rounded_size, type_size));
4763 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4764 }
4765
4766 /* Compute new value for AP. */
4767 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4768 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4769 gimplify_and_add (t, pre_p);
4770
4771 addr = fold_convert (build_pointer_type (type), addr);
4772
4773 if (indirect)
4774 addr = build_va_arg_indirect_ref (addr);
4775
4776 return build_va_arg_indirect_ref (addr);
4777 }
4778
4779 /* Build an indirect-ref expression over the given TREE, which represents a
4780 piece of a va_arg() expansion. */
4781 tree
4782 build_va_arg_indirect_ref (tree addr)
4783 {
4784 addr = build_fold_indirect_ref (addr);
4785
4786 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4787 mf_mark (addr);
4788
4789 return addr;
4790 }
4791
4792 /* Return a dummy expression of type TYPE in order to keep going after an
4793 error. */
4794
4795 static tree
4796 dummy_object (tree type)
4797 {
4798 tree t = build_int_cst (build_pointer_type (type), 0);
4799 return build1 (INDIRECT_REF, type, t);
4800 }
4801
4802 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4803 builtin function, but a very special sort of operator. */
4804
4805 enum gimplify_status
4806 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4807 {
4808 tree promoted_type, want_va_type, have_va_type;
4809 tree valist = TREE_OPERAND (*expr_p, 0);
4810 tree type = TREE_TYPE (*expr_p);
4811 tree t;
4812
4813 /* Verify that valist is of the proper type. */
4814 want_va_type = va_list_type_node;
4815 have_va_type = TREE_TYPE (valist);
4816
4817 if (have_va_type == error_mark_node)
4818 return GS_ERROR;
4819
4820 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4821 {
4822 /* If va_list is an array type, the argument may have decayed
4823 to a pointer type, e.g. by being passed to another function.
4824 In that case, unwrap both types so that we can compare the
4825 underlying records. */
4826 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4827 || POINTER_TYPE_P (have_va_type))
4828 {
4829 want_va_type = TREE_TYPE (want_va_type);
4830 have_va_type = TREE_TYPE (have_va_type);
4831 }
4832 }
4833
4834 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4835 {
4836 error ("first argument to %<va_arg%> not of type %<va_list%>");
4837 return GS_ERROR;
4838 }
4839
4840 /* Generate a diagnostic for requesting data of a type that cannot
4841 be passed through `...' due to type promotion at the call site. */
4842 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4843 != type)
4844 {
4845 static bool gave_help;
4846
4847 /* Unfortunately, this is merely undefined, rather than a constraint
4848 violation, so we cannot make this an error. If this call is never
4849 executed, the program is still strictly conforming. */
4850 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4851 type, promoted_type);
4852 if (! gave_help)
4853 {
4854 gave_help = true;
4855 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4856 promoted_type, type);
4857 }
4858
4859 /* We can, however, treat "undefined" any way we please.
4860 Call abort to encourage the user to fix the program. */
4861 inform ("if this code is reached, the program will abort");
4862 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4863 append_to_statement_list (t, pre_p);
4864
4865 /* This is dead code, but go ahead and finish so that the
4866 mode of the result comes out right. */
4867 *expr_p = dummy_object (type);
4868 return GS_ALL_DONE;
4869 }
4870 else
4871 {
4872 /* Make it easier for the backends by protecting the valist argument
4873 from multiple evaluations. */
4874 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4875 {
4876 /* For this case, the backends will be expecting a pointer to
4877 TREE_TYPE (va_list_type_node), but it's possible we've
4878 actually been given an array (an actual va_list_type_node).
4879 So fix it. */
4880 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4881 {
4882 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4883 valist = build_fold_addr_expr_with_type (valist, p1);
4884 }
4885 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4886 }
4887 else
4888 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4889
4890 if (!targetm.gimplify_va_arg_expr)
4891 /* FIXME:Once most targets are converted we should merely
4892 assert this is non-null. */
4893 return GS_ALL_DONE;
4894
4895 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4896 return GS_OK;
4897 }
4898 }
4899
4900 /* Expand EXP, a call to __builtin_va_end. */
4901
4902 static rtx
4903 expand_builtin_va_end (tree exp)
4904 {
4905 tree valist = CALL_EXPR_ARG (exp, 0);
4906
4907 /* Evaluate for side effects, if needed. I hate macros that don't
4908 do that. */
4909 if (TREE_SIDE_EFFECTS (valist))
4910 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4911
4912 return const0_rtx;
4913 }
4914
4915 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4916 builtin rather than just as an assignment in stdarg.h because of the
4917 nastiness of array-type va_list types. */
4918
4919 static rtx
4920 expand_builtin_va_copy (tree exp)
4921 {
4922 tree dst, src, t;
4923
4924 dst = CALL_EXPR_ARG (exp, 0);
4925 src = CALL_EXPR_ARG (exp, 1);
4926
4927 dst = stabilize_va_list (dst, 1);
4928 src = stabilize_va_list (src, 0);
4929
4930 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4931 {
4932 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4933 TREE_SIDE_EFFECTS (t) = 1;
4934 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4935 }
4936 else
4937 {
4938 rtx dstb, srcb, size;
4939
4940 /* Evaluate to pointers. */
4941 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4942 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4943 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4944 VOIDmode, EXPAND_NORMAL);
4945
4946 dstb = convert_memory_address (Pmode, dstb);
4947 srcb = convert_memory_address (Pmode, srcb);
4948
4949 /* "Dereference" to BLKmode memories. */
4950 dstb = gen_rtx_MEM (BLKmode, dstb);
4951 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4952 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4953 srcb = gen_rtx_MEM (BLKmode, srcb);
4954 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4955 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4956
4957 /* Copy. */
4958 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4959 }
4960
4961 return const0_rtx;
4962 }
4963
4964 /* Expand a call to one of the builtin functions __builtin_frame_address or
4965 __builtin_return_address. */
4966
4967 static rtx
4968 expand_builtin_frame_address (tree fndecl, tree exp)
4969 {
4970 /* The argument must be a nonnegative integer constant.
4971 It counts the number of frames to scan up the stack.
4972 The value is the return address saved in that frame. */
4973 if (call_expr_nargs (exp) == 0)
4974 /* Warning about missing arg was already issued. */
4975 return const0_rtx;
4976 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4977 {
4978 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4979 error ("invalid argument to %<__builtin_frame_address%>");
4980 else
4981 error ("invalid argument to %<__builtin_return_address%>");
4982 return const0_rtx;
4983 }
4984 else
4985 {
4986 rtx tem
4987 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4988 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4989
4990 /* Some ports cannot access arbitrary stack frames. */
4991 if (tem == NULL)
4992 {
4993 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4994 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4995 else
4996 warning (0, "unsupported argument to %<__builtin_return_address%>");
4997 return const0_rtx;
4998 }
4999
5000 /* For __builtin_frame_address, return what we've got. */
5001 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5002 return tem;
5003
5004 if (!REG_P (tem)
5005 && ! CONSTANT_P (tem))
5006 tem = copy_to_mode_reg (Pmode, tem);
5007 return tem;
5008 }
5009 }
5010
5011 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5012 we failed and the caller should emit a normal call, otherwise try to get
5013 the result in TARGET, if convenient. */
5014
5015 static rtx
5016 expand_builtin_alloca (tree exp, rtx target)
5017 {
5018 rtx op0;
5019 rtx result;
5020
5021 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5022 should always expand to function calls. These can be intercepted
5023 in libmudflap. */
5024 if (flag_mudflap)
5025 return NULL_RTX;
5026
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5028 return NULL_RTX;
5029
5030 /* Compute the argument. */
5031 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5032
5033 /* Allocate the desired space. */
5034 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5035 result = convert_memory_address (ptr_mode, result);
5036
5037 return result;
5038 }
5039
5040 /* Expand a call to a bswap builtin with argument ARG0. MODE
5041 is the mode to expand with. */
5042
5043 static rtx
5044 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5045 {
5046 enum machine_mode mode;
5047 tree arg;
5048 rtx op0;
5049
5050 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5051 return NULL_RTX;
5052
5053 arg = CALL_EXPR_ARG (exp, 0);
5054 mode = TYPE_MODE (TREE_TYPE (arg));
5055 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5056
5057 target = expand_unop (mode, bswap_optab, op0, target, 1);
5058
5059 gcc_assert (target);
5060
5061 return convert_to_mode (mode, target, 0);
5062 }
5063
5064 /* Expand a call to a unary builtin in EXP.
5065 Return NULL_RTX if a normal call should be emitted rather than expanding the
5066 function in-line. If convenient, the result should be placed in TARGET.
5067 SUBTARGET may be used as the target for computing one of EXP's operands. */
5068
5069 static rtx
5070 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5071 rtx subtarget, optab op_optab)
5072 {
5073 rtx op0;
5074
5075 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5076 return NULL_RTX;
5077
5078 /* Compute the argument. */
5079 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5080 VOIDmode, EXPAND_NORMAL);
5081 /* Compute op, into TARGET if possible.
5082 Set TARGET to wherever the result comes back. */
5083 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5084 op_optab, op0, target, 1);
5085 gcc_assert (target);
5086
5087 return convert_to_mode (target_mode, target, 0);
5088 }
5089
5090 /* If the string passed to fputs is a constant and is one character
5091 long, we attempt to transform this call into __builtin_fputc(). */
5092
5093 static rtx
5094 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5095 {
5096 /* Verify the arguments in the original call. */
5097 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5098 {
5099 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5100 CALL_EXPR_ARG (exp, 1),
5101 (target == const0_rtx),
5102 unlocked, NULL_TREE);
5103 if (result)
5104 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5105 }
5106 return NULL_RTX;
5107 }
5108
5109 /* Expand a call to __builtin_expect. We just return our argument
5110 as the builtin_expect semantic should've been already executed by
5111 tree branch prediction pass. */
5112
5113 static rtx
5114 expand_builtin_expect (tree exp, rtx target)
5115 {
5116 tree arg, c;
5117
5118 if (call_expr_nargs (exp) < 2)
5119 return const0_rtx;
5120 arg = CALL_EXPR_ARG (exp, 0);
5121 c = CALL_EXPR_ARG (exp, 1);
5122
5123 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5124 /* When guessing was done, the hints should be already stripped away. */
5125 gcc_assert (!flag_guess_branch_prob
5126 || optimize == 0 || errorcount || sorrycount);
5127 return target;
5128 }
5129
5130 void
5131 expand_builtin_trap (void)
5132 {
5133 #ifdef HAVE_trap
5134 if (HAVE_trap)
5135 emit_insn (gen_trap ());
5136 else
5137 #endif
5138 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5139 emit_barrier ();
5140 }
5141
5142 /* Expand EXP, a call to fabs, fabsf or fabsl.
5143 Return NULL_RTX if a normal call should be emitted rather than expanding
5144 the function inline. If convenient, the result should be placed
5145 in TARGET. SUBTARGET may be used as the target for computing
5146 the operand. */
5147
5148 static rtx
5149 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5150 {
5151 enum machine_mode mode;
5152 tree arg;
5153 rtx op0;
5154
5155 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5156 return NULL_RTX;
5157
5158 arg = CALL_EXPR_ARG (exp, 0);
5159 mode = TYPE_MODE (TREE_TYPE (arg));
5160 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5161 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5162 }
5163
5164 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5165 Return NULL is a normal call should be emitted rather than expanding the
5166 function inline. If convenient, the result should be placed in TARGET.
5167 SUBTARGET may be used as the target for computing the operand. */
5168
5169 static rtx
5170 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5171 {
5172 rtx op0, op1;
5173 tree arg;
5174
5175 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5176 return NULL_RTX;
5177
5178 arg = CALL_EXPR_ARG (exp, 0);
5179 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5180
5181 arg = CALL_EXPR_ARG (exp, 1);
5182 op1 = expand_normal (arg);
5183
5184 return expand_copysign (op0, op1, target);
5185 }
5186
5187 /* Create a new constant string literal and return a char* pointer to it.
5188 The STRING_CST value is the LEN characters at STR. */
5189 tree
5190 build_string_literal (int len, const char *str)
5191 {
5192 tree t, elem, index, type;
5193
5194 t = build_string (len, str);
5195 elem = build_type_variant (char_type_node, 1, 0);
5196 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5197 type = build_array_type (elem, index);
5198 TREE_TYPE (t) = type;
5199 TREE_CONSTANT (t) = 1;
5200 TREE_INVARIANT (t) = 1;
5201 TREE_READONLY (t) = 1;
5202 TREE_STATIC (t) = 1;
5203
5204 type = build_pointer_type (type);
5205 t = build1 (ADDR_EXPR, type, t);
5206
5207 type = build_pointer_type (elem);
5208 t = build1 (NOP_EXPR, type, t);
5209 return t;
5210 }
5211
5212 /* Expand EXP, a call to printf or printf_unlocked.
5213 Return NULL_RTX if a normal call should be emitted rather than transforming
5214 the function inline. If convenient, the result should be placed in
5215 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5216 call. */
5217 static rtx
5218 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5219 bool unlocked)
5220 {
5221 /* If we're using an unlocked function, assume the other unlocked
5222 functions exist explicitly. */
5223 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5224 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5225 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5226 : implicit_built_in_decls[BUILT_IN_PUTS];
5227 const char *fmt_str;
5228 tree fn = 0;
5229 tree fmt, arg;
5230 int nargs = call_expr_nargs (exp);
5231
5232 /* If the return value is used, don't do the transformation. */
5233 if (target != const0_rtx)
5234 return NULL_RTX;
5235
5236 /* Verify the required arguments in the original call. */
5237 if (nargs == 0)
5238 return NULL_RTX;
5239 fmt = CALL_EXPR_ARG (exp, 0);
5240 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5241 return NULL_RTX;
5242
5243 /* Check whether the format is a literal string constant. */
5244 fmt_str = c_getstr (fmt);
5245 if (fmt_str == NULL)
5246 return NULL_RTX;
5247
5248 if (!init_target_chars ())
5249 return NULL_RTX;
5250
5251 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5252 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5253 {
5254 if ((nargs != 2)
5255 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5256 return NULL_RTX;
5257 if (fn_puts)
5258 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5259 }
5260 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5261 else if (strcmp (fmt_str, target_percent_c) == 0)
5262 {
5263 if ((nargs != 2)
5264 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5265 return NULL_RTX;
5266 if (fn_putchar)
5267 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5268 }
5269 else
5270 {
5271 /* We can't handle anything else with % args or %% ... yet. */
5272 if (strchr (fmt_str, target_percent))
5273 return NULL_RTX;
5274
5275 if (nargs > 1)
5276 return NULL_RTX;
5277
5278 /* If the format specifier was "", printf does nothing. */
5279 if (fmt_str[0] == '\0')
5280 return const0_rtx;
5281 /* If the format specifier has length of 1, call putchar. */
5282 if (fmt_str[1] == '\0')
5283 {
5284 /* Given printf("c"), (where c is any one character,)
5285 convert "c"[0] to an int and pass that to the replacement
5286 function. */
5287 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5288 if (fn_putchar)
5289 fn = build_call_expr (fn_putchar, 1, arg);
5290 }
5291 else
5292 {
5293 /* If the format specifier was "string\n", call puts("string"). */
5294 size_t len = strlen (fmt_str);
5295 if ((unsigned char)fmt_str[len - 1] == target_newline)
5296 {
5297 /* Create a NUL-terminated string that's one char shorter
5298 than the original, stripping off the trailing '\n'. */
5299 char *newstr = alloca (len);
5300 memcpy (newstr, fmt_str, len - 1);
5301 newstr[len - 1] = 0;
5302 arg = build_string_literal (len, newstr);
5303 if (fn_puts)
5304 fn = build_call_expr (fn_puts, 1, arg);
5305 }
5306 else
5307 /* We'd like to arrange to call fputs(string,stdout) here,
5308 but we need stdout and don't have a way to get it yet. */
5309 return NULL_RTX;
5310 }
5311 }
5312
5313 if (!fn)
5314 return NULL_RTX;
5315 if (TREE_CODE (fn) == CALL_EXPR)
5316 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5317 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5318 }
5319
5320 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5321 Return NULL_RTX if a normal call should be emitted rather than transforming
5322 the function inline. If convenient, the result should be placed in
5323 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5324 call. */
5325 static rtx
5326 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5327 bool unlocked)
5328 {
5329 /* If we're using an unlocked function, assume the other unlocked
5330 functions exist explicitly. */
5331 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5332 : implicit_built_in_decls[BUILT_IN_FPUTC];
5333 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5334 : implicit_built_in_decls[BUILT_IN_FPUTS];
5335 const char *fmt_str;
5336 tree fn = 0;
5337 tree fmt, fp, arg;
5338 int nargs = call_expr_nargs (exp);
5339
5340 /* If the return value is used, don't do the transformation. */
5341 if (target != const0_rtx)
5342 return NULL_RTX;
5343
5344 /* Verify the required arguments in the original call. */
5345 if (nargs < 2)
5346 return NULL_RTX;
5347 fp = CALL_EXPR_ARG (exp, 0);
5348 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5349 return NULL_RTX;
5350 fmt = CALL_EXPR_ARG (exp, 1);
5351 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5352 return NULL_RTX;
5353
5354 /* Check whether the format is a literal string constant. */
5355 fmt_str = c_getstr (fmt);
5356 if (fmt_str == NULL)
5357 return NULL_RTX;
5358
5359 if (!init_target_chars ())
5360 return NULL_RTX;
5361
5362 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5363 if (strcmp (fmt_str, target_percent_s) == 0)
5364 {
5365 if ((nargs != 3)
5366 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5367 return NULL_RTX;
5368 arg = CALL_EXPR_ARG (exp, 2);
5369 if (fn_fputs)
5370 fn = build_call_expr (fn_fputs, 2, arg, fp);
5371 }
5372 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5373 else if (strcmp (fmt_str, target_percent_c) == 0)
5374 {
5375 if ((nargs != 3)
5376 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5377 return NULL_RTX;
5378 arg = CALL_EXPR_ARG (exp, 2);
5379 if (fn_fputc)
5380 fn = build_call_expr (fn_fputc, 2, arg, fp);
5381 }
5382 else
5383 {
5384 /* We can't handle anything else with % args or %% ... yet. */
5385 if (strchr (fmt_str, target_percent))
5386 return NULL_RTX;
5387
5388 if (nargs > 2)
5389 return NULL_RTX;
5390
5391 /* If the format specifier was "", fprintf does nothing. */
5392 if (fmt_str[0] == '\0')
5393 {
5394 /* Evaluate and ignore FILE* argument for side-effects. */
5395 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5396 return const0_rtx;
5397 }
5398
5399 /* When "string" doesn't contain %, replace all cases of
5400 fprintf(stream,string) with fputs(string,stream). The fputs
5401 builtin will take care of special cases like length == 1. */
5402 if (fn_fputs)
5403 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5404 }
5405
5406 if (!fn)
5407 return NULL_RTX;
5408 if (TREE_CODE (fn) == CALL_EXPR)
5409 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5410 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5411 }
5412
5413 /* Expand a call EXP to sprintf. Return NULL_RTX if
5414 a normal call should be emitted rather than expanding the function
5415 inline. If convenient, the result should be placed in TARGET with
5416 mode MODE. */
5417
5418 static rtx
5419 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5420 {
5421 tree dest, fmt;
5422 const char *fmt_str;
5423 int nargs = call_expr_nargs (exp);
5424
5425 /* Verify the required arguments in the original call. */
5426 if (nargs < 2)
5427 return NULL_RTX;
5428 dest = CALL_EXPR_ARG (exp, 0);
5429 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5430 return NULL_RTX;
5431 fmt = CALL_EXPR_ARG (exp, 0);
5432 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5433 return NULL_RTX;
5434
5435 /* Check whether the format is a literal string constant. */
5436 fmt_str = c_getstr (fmt);
5437 if (fmt_str == NULL)
5438 return NULL_RTX;
5439
5440 if (!init_target_chars ())
5441 return NULL_RTX;
5442
5443 /* If the format doesn't contain % args or %%, use strcpy. */
5444 if (strchr (fmt_str, target_percent) == 0)
5445 {
5446 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5447 tree exp;
5448
5449 if ((nargs > 2) || ! fn)
5450 return NULL_RTX;
5451 expand_expr (build_call_expr (fn, 2, dest, fmt),
5452 const0_rtx, VOIDmode, EXPAND_NORMAL);
5453 if (target == const0_rtx)
5454 return const0_rtx;
5455 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5456 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5457 }
5458 /* If the format is "%s", use strcpy if the result isn't used. */
5459 else if (strcmp (fmt_str, target_percent_s) == 0)
5460 {
5461 tree fn, arg, len;
5462 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5463
5464 if (! fn)
5465 return NULL_RTX;
5466 if (nargs != 3)
5467 return NULL_RTX;
5468 arg = CALL_EXPR_ARG (exp, 2);
5469 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5470 return NULL_RTX;
5471
5472 if (target != const0_rtx)
5473 {
5474 len = c_strlen (arg, 1);
5475 if (! len || TREE_CODE (len) != INTEGER_CST)
5476 return NULL_RTX;
5477 }
5478 else
5479 len = NULL_TREE;
5480
5481 expand_expr (build_call_expr (fn, 2, dest, arg),
5482 const0_rtx, VOIDmode, EXPAND_NORMAL);
5483
5484 if (target == const0_rtx)
5485 return const0_rtx;
5486 return expand_expr (len, target, mode, EXPAND_NORMAL);
5487 }
5488
5489 return NULL_RTX;
5490 }
5491
5492 /* Expand a call to either the entry or exit function profiler. */
5493
5494 static rtx
5495 expand_builtin_profile_func (bool exitp)
5496 {
5497 rtx this, which;
5498
5499 this = DECL_RTL (current_function_decl);
5500 gcc_assert (MEM_P (this));
5501 this = XEXP (this, 0);
5502
5503 if (exitp)
5504 which = profile_function_exit_libfunc;
5505 else
5506 which = profile_function_entry_libfunc;
5507
5508 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5509 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5510 0),
5511 Pmode);
5512
5513 return const0_rtx;
5514 }
5515
5516 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5517
5518 static rtx
5519 round_trampoline_addr (rtx tramp)
5520 {
5521 rtx temp, addend, mask;
5522
5523 /* If we don't need too much alignment, we'll have been guaranteed
5524 proper alignment by get_trampoline_type. */
5525 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5526 return tramp;
5527
5528 /* Round address up to desired boundary. */
5529 temp = gen_reg_rtx (Pmode);
5530 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5531 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5532
5533 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5534 temp, 0, OPTAB_LIB_WIDEN);
5535 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5536 temp, 0, OPTAB_LIB_WIDEN);
5537
5538 return tramp;
5539 }
5540
5541 static rtx
5542 expand_builtin_init_trampoline (tree exp)
5543 {
5544 tree t_tramp, t_func, t_chain;
5545 rtx r_tramp, r_func, r_chain;
5546 #ifdef TRAMPOLINE_TEMPLATE
5547 rtx blktramp;
5548 #endif
5549
5550 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5551 POINTER_TYPE, VOID_TYPE))
5552 return NULL_RTX;
5553
5554 t_tramp = CALL_EXPR_ARG (exp, 0);
5555 t_func = CALL_EXPR_ARG (exp, 1);
5556 t_chain = CALL_EXPR_ARG (exp, 2);
5557
5558 r_tramp = expand_normal (t_tramp);
5559 r_func = expand_normal (t_func);
5560 r_chain = expand_normal (t_chain);
5561
5562 /* Generate insns to initialize the trampoline. */
5563 r_tramp = round_trampoline_addr (r_tramp);
5564 #ifdef TRAMPOLINE_TEMPLATE
5565 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5566 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5567 emit_block_move (blktramp, assemble_trampoline_template (),
5568 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5569 #endif
5570 trampolines_created = 1;
5571 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5572
5573 return const0_rtx;
5574 }
5575
5576 static rtx
5577 expand_builtin_adjust_trampoline (tree exp)
5578 {
5579 rtx tramp;
5580
5581 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5582 return NULL_RTX;
5583
5584 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5585 tramp = round_trampoline_addr (tramp);
5586 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5587 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5588 #endif
5589
5590 return tramp;
5591 }
5592
5593 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5594 signbitd64, or signbitd128 function.
5595 Return NULL_RTX if a normal call should be emitted rather than expanding
5596 the function in-line. EXP is the expression that is a call to the builtin
5597 function; if convenient, the result should be placed in TARGET. */
5598
5599 static rtx
5600 expand_builtin_signbit (tree exp, rtx target)
5601 {
5602 const struct real_format *fmt;
5603 enum machine_mode fmode, imode, rmode;
5604 HOST_WIDE_INT hi, lo;
5605 tree arg;
5606 int word, bitpos;
5607 rtx temp;
5608
5609 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5610 return NULL_RTX;
5611
5612 arg = CALL_EXPR_ARG (exp, 0);
5613 fmode = TYPE_MODE (TREE_TYPE (arg));
5614 rmode = TYPE_MODE (TREE_TYPE (exp));
5615 fmt = REAL_MODE_FORMAT (fmode);
5616
5617 /* For floating point formats without a sign bit, implement signbit
5618 as "ARG < 0.0". */
5619 bitpos = fmt->signbit_ro;
5620 if (bitpos < 0)
5621 {
5622 /* But we can't do this if the format supports signed zero. */
5623 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5624 return NULL_RTX;
5625
5626 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5627 build_real (TREE_TYPE (arg), dconst0));
5628 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5629 }
5630
5631 temp = expand_normal (arg);
5632 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5633 {
5634 imode = int_mode_for_mode (fmode);
5635 if (imode == BLKmode)
5636 return NULL_RTX;
5637 temp = gen_lowpart (imode, temp);
5638 }
5639 else
5640 {
5641 imode = word_mode;
5642 /* Handle targets with different FP word orders. */
5643 if (FLOAT_WORDS_BIG_ENDIAN)
5644 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5645 else
5646 word = bitpos / BITS_PER_WORD;
5647 temp = operand_subword_force (temp, word, fmode);
5648 bitpos = bitpos % BITS_PER_WORD;
5649 }
5650
5651 /* Force the intermediate word_mode (or narrower) result into a
5652 register. This avoids attempting to create paradoxical SUBREGs
5653 of floating point modes below. */
5654 temp = force_reg (imode, temp);
5655
5656 /* If the bitpos is within the "result mode" lowpart, the operation
5657 can be implement with a single bitwise AND. Otherwise, we need
5658 a right shift and an AND. */
5659
5660 if (bitpos < GET_MODE_BITSIZE (rmode))
5661 {
5662 if (bitpos < HOST_BITS_PER_WIDE_INT)
5663 {
5664 hi = 0;
5665 lo = (HOST_WIDE_INT) 1 << bitpos;
5666 }
5667 else
5668 {
5669 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5670 lo = 0;
5671 }
5672
5673 if (imode != rmode)
5674 temp = gen_lowpart (rmode, temp);
5675 temp = expand_binop (rmode, and_optab, temp,
5676 immed_double_const (lo, hi, rmode),
5677 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5678 }
5679 else
5680 {
5681 /* Perform a logical right shift to place the signbit in the least
5682 significant bit, then truncate the result to the desired mode
5683 and mask just this bit. */
5684 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5685 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5686 temp = gen_lowpart (rmode, temp);
5687 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5689 }
5690
5691 return temp;
5692 }
5693
5694 /* Expand fork or exec calls. TARGET is the desired target of the
5695 call. EXP is the call. FN is the
5696 identificator of the actual function. IGNORE is nonzero if the
5697 value is to be ignored. */
5698
5699 static rtx
5700 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5701 {
5702 tree id, decl;
5703 tree call;
5704
5705 /* If we are not profiling, just call the function. */
5706 if (!profile_arc_flag)
5707 return NULL_RTX;
5708
5709 /* Otherwise call the wrapper. This should be equivalent for the rest of
5710 compiler, so the code does not diverge, and the wrapper may run the
5711 code necessary for keeping the profiling sane. */
5712
5713 switch (DECL_FUNCTION_CODE (fn))
5714 {
5715 case BUILT_IN_FORK:
5716 id = get_identifier ("__gcov_fork");
5717 break;
5718
5719 case BUILT_IN_EXECL:
5720 id = get_identifier ("__gcov_execl");
5721 break;
5722
5723 case BUILT_IN_EXECV:
5724 id = get_identifier ("__gcov_execv");
5725 break;
5726
5727 case BUILT_IN_EXECLP:
5728 id = get_identifier ("__gcov_execlp");
5729 break;
5730
5731 case BUILT_IN_EXECLE:
5732 id = get_identifier ("__gcov_execle");
5733 break;
5734
5735 case BUILT_IN_EXECVP:
5736 id = get_identifier ("__gcov_execvp");
5737 break;
5738
5739 case BUILT_IN_EXECVE:
5740 id = get_identifier ("__gcov_execve");
5741 break;
5742
5743 default:
5744 gcc_unreachable ();
5745 }
5746
5747 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5748 DECL_EXTERNAL (decl) = 1;
5749 TREE_PUBLIC (decl) = 1;
5750 DECL_ARTIFICIAL (decl) = 1;
5751 TREE_NOTHROW (decl) = 1;
5752 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5753 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5754 call = rewrite_call_expr (exp, 0, decl, 0);
5755 return expand_call (call, target, ignore);
5756 }
5757
5758
5759 \f
5760 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5761 the pointer in these functions is void*, the tree optimizers may remove
5762 casts. The mode computed in expand_builtin isn't reliable either, due
5763 to __sync_bool_compare_and_swap.
5764
5765 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5766 group of builtins. This gives us log2 of the mode size. */
5767
5768 static inline enum machine_mode
5769 get_builtin_sync_mode (int fcode_diff)
5770 {
5771 /* The size is not negotiable, so ask not to get BLKmode in return
5772 if the target indicates that a smaller size would be better. */
5773 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5774 }
5775
5776 /* Expand the memory expression LOC and return the appropriate memory operand
5777 for the builtin_sync operations. */
5778
5779 static rtx
5780 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5781 {
5782 rtx addr, mem;
5783
5784 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5785
5786 /* Note that we explicitly do not want any alias information for this
5787 memory, so that we kill all other live memories. Otherwise we don't
5788 satisfy the full barrier semantics of the intrinsic. */
5789 mem = validize_mem (gen_rtx_MEM (mode, addr));
5790
5791 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5792 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5793 MEM_VOLATILE_P (mem) = 1;
5794
5795 return mem;
5796 }
5797
5798 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5799 EXP is the CALL_EXPR. CODE is the rtx code
5800 that corresponds to the arithmetic or logical operation from the name;
5801 an exception here is that NOT actually means NAND. TARGET is an optional
5802 place for us to store the results; AFTER is true if this is the
5803 fetch_and_xxx form. IGNORE is true if we don't actually care about
5804 the result of the operation at all. */
5805
5806 static rtx
5807 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5808 enum rtx_code code, bool after,
5809 rtx target, bool ignore)
5810 {
5811 rtx val, mem;
5812 enum machine_mode old_mode;
5813
5814 /* Expand the operands. */
5815 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5816
5817 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5818 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5819 of CONST_INTs, where we know the old_mode only from the call argument. */
5820 old_mode = GET_MODE (val);
5821 if (old_mode == VOIDmode)
5822 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5823 val = convert_modes (mode, old_mode, val, 1);
5824
5825 if (ignore)
5826 return expand_sync_operation (mem, val, code);
5827 else
5828 return expand_sync_fetch_operation (mem, val, code, after, target);
5829 }
5830
5831 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5832 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5833 true if this is the boolean form. TARGET is a place for us to store the
5834 results; this is NOT optional if IS_BOOL is true. */
5835
5836 static rtx
5837 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5838 bool is_bool, rtx target)
5839 {
5840 rtx old_val, new_val, mem;
5841 enum machine_mode old_mode;
5842
5843 /* Expand the operands. */
5844 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5845
5846
5847 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5848 mode, EXPAND_NORMAL);
5849 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5850 of CONST_INTs, where we know the old_mode only from the call argument. */
5851 old_mode = GET_MODE (old_val);
5852 if (old_mode == VOIDmode)
5853 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5854 old_val = convert_modes (mode, old_mode, old_val, 1);
5855
5856 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5857 mode, EXPAND_NORMAL);
5858 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5859 of CONST_INTs, where we know the old_mode only from the call argument. */
5860 old_mode = GET_MODE (new_val);
5861 if (old_mode == VOIDmode)
5862 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5863 new_val = convert_modes (mode, old_mode, new_val, 1);
5864
5865 if (is_bool)
5866 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5867 else
5868 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5869 }
5870
5871 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5872 general form is actually an atomic exchange, and some targets only
5873 support a reduced form with the second argument being a constant 1.
5874 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5875 the results. */
5876
5877 static rtx
5878 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5879 rtx target)
5880 {
5881 rtx val, mem;
5882 enum machine_mode old_mode;
5883
5884 /* Expand the operands. */
5885 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5886 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5887 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5888 of CONST_INTs, where we know the old_mode only from the call argument. */
5889 old_mode = GET_MODE (val);
5890 if (old_mode == VOIDmode)
5891 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5892 val = convert_modes (mode, old_mode, val, 1);
5893
5894 return expand_sync_lock_test_and_set (mem, val, target);
5895 }
5896
5897 /* Expand the __sync_synchronize intrinsic. */
5898
5899 static void
5900 expand_builtin_synchronize (void)
5901 {
5902 tree x;
5903
5904 #ifdef HAVE_memory_barrier
5905 if (HAVE_memory_barrier)
5906 {
5907 emit_insn (gen_memory_barrier ());
5908 return;
5909 }
5910 #endif
5911
5912 /* If no explicit memory barrier instruction is available, create an
5913 empty asm stmt with a memory clobber. */
5914 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5915 tree_cons (NULL, build_string (6, "memory"), NULL));
5916 ASM_VOLATILE_P (x) = 1;
5917 expand_asm_expr (x);
5918 }
5919
5920 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5921
5922 static void
5923 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5924 {
5925 enum insn_code icode;
5926 rtx mem, insn;
5927 rtx val = const0_rtx;
5928
5929 /* Expand the operands. */
5930 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5931
5932 /* If there is an explicit operation in the md file, use it. */
5933 icode = sync_lock_release[mode];
5934 if (icode != CODE_FOR_nothing)
5935 {
5936 if (!insn_data[icode].operand[1].predicate (val, mode))
5937 val = force_reg (mode, val);
5938
5939 insn = GEN_FCN (icode) (mem, val);
5940 if (insn)
5941 {
5942 emit_insn (insn);
5943 return;
5944 }
5945 }
5946
5947 /* Otherwise we can implement this operation by emitting a barrier
5948 followed by a store of zero. */
5949 expand_builtin_synchronize ();
5950 emit_move_insn (mem, val);
5951 }
5952 \f
5953 /* Expand an expression EXP that calls a built-in function,
5954 with result going to TARGET if that's convenient
5955 (and in mode MODE if that's convenient).
5956 SUBTARGET may be used as the target for computing one of EXP's operands.
5957 IGNORE is nonzero if the value is to be ignored. */
5958
5959 rtx
5960 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5961 int ignore)
5962 {
5963 tree fndecl = get_callee_fndecl (exp);
5964 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5965 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5966
5967 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5968 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5969
5970 /* When not optimizing, generate calls to library functions for a certain
5971 set of builtins. */
5972 if (!optimize
5973 && !called_as_built_in (fndecl)
5974 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5975 && fcode != BUILT_IN_ALLOCA)
5976 return expand_call (exp, target, ignore);
5977
5978 /* The built-in function expanders test for target == const0_rtx
5979 to determine whether the function's result will be ignored. */
5980 if (ignore)
5981 target = const0_rtx;
5982
5983 /* If the result of a pure or const built-in function is ignored, and
5984 none of its arguments are volatile, we can avoid expanding the
5985 built-in call and just evaluate the arguments for side-effects. */
5986 if (target == const0_rtx
5987 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5988 {
5989 bool volatilep = false;
5990 tree arg;
5991 call_expr_arg_iterator iter;
5992
5993 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5994 if (TREE_THIS_VOLATILE (arg))
5995 {
5996 volatilep = true;
5997 break;
5998 }
5999
6000 if (! volatilep)
6001 {
6002 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6003 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6004 return const0_rtx;
6005 }
6006 }
6007
6008 switch (fcode)
6009 {
6010 CASE_FLT_FN (BUILT_IN_FABS):
6011 target = expand_builtin_fabs (exp, target, subtarget);
6012 if (target)
6013 return target;
6014 break;
6015
6016 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6017 target = expand_builtin_copysign (exp, target, subtarget);
6018 if (target)
6019 return target;
6020 break;
6021
6022 /* Just do a normal library call if we were unable to fold
6023 the values. */
6024 CASE_FLT_FN (BUILT_IN_CABS):
6025 break;
6026
6027 CASE_FLT_FN (BUILT_IN_EXP):
6028 CASE_FLT_FN (BUILT_IN_EXP10):
6029 CASE_FLT_FN (BUILT_IN_POW10):
6030 CASE_FLT_FN (BUILT_IN_EXP2):
6031 CASE_FLT_FN (BUILT_IN_EXPM1):
6032 CASE_FLT_FN (BUILT_IN_LOGB):
6033 CASE_FLT_FN (BUILT_IN_LOG):
6034 CASE_FLT_FN (BUILT_IN_LOG10):
6035 CASE_FLT_FN (BUILT_IN_LOG2):
6036 CASE_FLT_FN (BUILT_IN_LOG1P):
6037 CASE_FLT_FN (BUILT_IN_TAN):
6038 CASE_FLT_FN (BUILT_IN_ASIN):
6039 CASE_FLT_FN (BUILT_IN_ACOS):
6040 CASE_FLT_FN (BUILT_IN_ATAN):
6041 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6042 because of possible accuracy problems. */
6043 if (! flag_unsafe_math_optimizations)
6044 break;
6045 CASE_FLT_FN (BUILT_IN_SQRT):
6046 CASE_FLT_FN (BUILT_IN_FLOOR):
6047 CASE_FLT_FN (BUILT_IN_CEIL):
6048 CASE_FLT_FN (BUILT_IN_TRUNC):
6049 CASE_FLT_FN (BUILT_IN_ROUND):
6050 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6051 CASE_FLT_FN (BUILT_IN_RINT):
6052 target = expand_builtin_mathfn (exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
6057 CASE_FLT_FN (BUILT_IN_ILOGB):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6060 CASE_FLT_FN (BUILT_IN_ISINF):
6061 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6062 if (target)
6063 return target;
6064 break;
6065
6066 CASE_FLT_FN (BUILT_IN_LCEIL):
6067 CASE_FLT_FN (BUILT_IN_LLCEIL):
6068 CASE_FLT_FN (BUILT_IN_LFLOOR):
6069 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6070 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6071 if (target)
6072 return target;
6073 break;
6074
6075 CASE_FLT_FN (BUILT_IN_LRINT):
6076 CASE_FLT_FN (BUILT_IN_LLRINT):
6077 CASE_FLT_FN (BUILT_IN_LROUND):
6078 CASE_FLT_FN (BUILT_IN_LLROUND):
6079 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_FLT_FN (BUILT_IN_POW):
6085 target = expand_builtin_pow (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6089
6090 CASE_FLT_FN (BUILT_IN_POWI):
6091 target = expand_builtin_powi (exp, target, subtarget);
6092 if (target)
6093 return target;
6094 break;
6095
6096 CASE_FLT_FN (BUILT_IN_ATAN2):
6097 CASE_FLT_FN (BUILT_IN_LDEXP):
6098 CASE_FLT_FN (BUILT_IN_SCALB):
6099 CASE_FLT_FN (BUILT_IN_SCALBN):
6100 CASE_FLT_FN (BUILT_IN_SCALBLN):
6101 if (! flag_unsafe_math_optimizations)
6102 break;
6103
6104 CASE_FLT_FN (BUILT_IN_FMOD):
6105 CASE_FLT_FN (BUILT_IN_REMAINDER):
6106 CASE_FLT_FN (BUILT_IN_DREM):
6107 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6108 if (target)
6109 return target;
6110 break;
6111
6112 CASE_FLT_FN (BUILT_IN_CEXPI):
6113 target = expand_builtin_cexpi (exp, target, subtarget);
6114 gcc_assert (target);
6115 return target;
6116
6117 CASE_FLT_FN (BUILT_IN_SIN):
6118 CASE_FLT_FN (BUILT_IN_COS):
6119 if (! flag_unsafe_math_optimizations)
6120 break;
6121 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6122 if (target)
6123 return target;
6124 break;
6125
6126 CASE_FLT_FN (BUILT_IN_SINCOS):
6127 if (! flag_unsafe_math_optimizations)
6128 break;
6129 target = expand_builtin_sincos (exp);
6130 if (target)
6131 return target;
6132 break;
6133
6134 case BUILT_IN_APPLY_ARGS:
6135 return expand_builtin_apply_args ();
6136
6137 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6138 FUNCTION with a copy of the parameters described by
6139 ARGUMENTS, and ARGSIZE. It returns a block of memory
6140 allocated on the stack into which is stored all the registers
6141 that might possibly be used for returning the result of a
6142 function. ARGUMENTS is the value returned by
6143 __builtin_apply_args. ARGSIZE is the number of bytes of
6144 arguments that must be copied. ??? How should this value be
6145 computed? We'll also need a safe worst case value for varargs
6146 functions. */
6147 case BUILT_IN_APPLY:
6148 if (!validate_arglist (exp, POINTER_TYPE,
6149 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6150 && !validate_arglist (exp, REFERENCE_TYPE,
6151 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6152 return const0_rtx;
6153 else
6154 {
6155 rtx ops[3];
6156
6157 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6158 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6159 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6160
6161 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6162 }
6163
6164 /* __builtin_return (RESULT) causes the function to return the
6165 value described by RESULT. RESULT is address of the block of
6166 memory returned by __builtin_apply. */
6167 case BUILT_IN_RETURN:
6168 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6169 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6170 return const0_rtx;
6171
6172 case BUILT_IN_SAVEREGS:
6173 return expand_builtin_saveregs ();
6174
6175 case BUILT_IN_ARGS_INFO:
6176 return expand_builtin_args_info (exp);
6177
6178 /* Return the address of the first anonymous stack arg. */
6179 case BUILT_IN_NEXT_ARG:
6180 if (fold_builtin_next_arg (exp, false))
6181 return const0_rtx;
6182 return expand_builtin_next_arg ();
6183
6184 case BUILT_IN_CLASSIFY_TYPE:
6185 return expand_builtin_classify_type (exp);
6186
6187 case BUILT_IN_CONSTANT_P:
6188 return const0_rtx;
6189
6190 case BUILT_IN_FRAME_ADDRESS:
6191 case BUILT_IN_RETURN_ADDRESS:
6192 return expand_builtin_frame_address (fndecl, exp);
6193
6194 /* Returns the address of the area where the structure is returned.
6195 0 otherwise. */
6196 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6197 if (call_expr_nargs (exp) != 0
6198 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6199 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6200 return const0_rtx;
6201 else
6202 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6203
6204 case BUILT_IN_ALLOCA:
6205 target = expand_builtin_alloca (exp, target);
6206 if (target)
6207 return target;
6208 break;
6209
6210 case BUILT_IN_STACK_SAVE:
6211 return expand_stack_save ();
6212
6213 case BUILT_IN_STACK_RESTORE:
6214 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6215 return const0_rtx;
6216
6217 case BUILT_IN_BSWAP32:
6218 case BUILT_IN_BSWAP64:
6219 target = expand_builtin_bswap (exp, target, subtarget);
6220
6221 if (target)
6222 return target;
6223 break;
6224
6225 CASE_INT_FN (BUILT_IN_FFS):
6226 case BUILT_IN_FFSIMAX:
6227 target = expand_builtin_unop (target_mode, exp, target,
6228 subtarget, ffs_optab);
6229 if (target)
6230 return target;
6231 break;
6232
6233 CASE_INT_FN (BUILT_IN_CLZ):
6234 case BUILT_IN_CLZIMAX:
6235 target = expand_builtin_unop (target_mode, exp, target,
6236 subtarget, clz_optab);
6237 if (target)
6238 return target;
6239 break;
6240
6241 CASE_INT_FN (BUILT_IN_CTZ):
6242 case BUILT_IN_CTZIMAX:
6243 target = expand_builtin_unop (target_mode, exp, target,
6244 subtarget, ctz_optab);
6245 if (target)
6246 return target;
6247 break;
6248
6249 CASE_INT_FN (BUILT_IN_POPCOUNT):
6250 case BUILT_IN_POPCOUNTIMAX:
6251 target = expand_builtin_unop (target_mode, exp, target,
6252 subtarget, popcount_optab);
6253 if (target)
6254 return target;
6255 break;
6256
6257 CASE_INT_FN (BUILT_IN_PARITY):
6258 case BUILT_IN_PARITYIMAX:
6259 target = expand_builtin_unop (target_mode, exp, target,
6260 subtarget, parity_optab);
6261 if (target)
6262 return target;
6263 break;
6264
6265 case BUILT_IN_STRLEN:
6266 target = expand_builtin_strlen (exp, target, target_mode);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_STRCPY:
6272 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6273 if (target)
6274 return target;
6275 break;
6276
6277 case BUILT_IN_STRNCPY:
6278 target = expand_builtin_strncpy (exp, target, mode);
6279 if (target)
6280 return target;
6281 break;
6282
6283 case BUILT_IN_STPCPY:
6284 target = expand_builtin_stpcpy (exp, target, mode);
6285 if (target)
6286 return target;
6287 break;
6288
6289 case BUILT_IN_STRCAT:
6290 target = expand_builtin_strcat (fndecl, exp, target, mode);
6291 if (target)
6292 return target;
6293 break;
6294
6295 case BUILT_IN_STRNCAT:
6296 target = expand_builtin_strncat (exp, target, mode);
6297 if (target)
6298 return target;
6299 break;
6300
6301 case BUILT_IN_STRSPN:
6302 target = expand_builtin_strspn (exp, target, mode);
6303 if (target)
6304 return target;
6305 break;
6306
6307 case BUILT_IN_STRCSPN:
6308 target = expand_builtin_strcspn (exp, target, mode);
6309 if (target)
6310 return target;
6311 break;
6312
6313 case BUILT_IN_STRSTR:
6314 target = expand_builtin_strstr (exp, target, mode);
6315 if (target)
6316 return target;
6317 break;
6318
6319 case BUILT_IN_STRPBRK:
6320 target = expand_builtin_strpbrk (exp, target, mode);
6321 if (target)
6322 return target;
6323 break;
6324
6325 case BUILT_IN_INDEX:
6326 case BUILT_IN_STRCHR:
6327 target = expand_builtin_strchr (exp, target, mode);
6328 if (target)
6329 return target;
6330 break;
6331
6332 case BUILT_IN_RINDEX:
6333 case BUILT_IN_STRRCHR:
6334 target = expand_builtin_strrchr (exp, target, mode);
6335 if (target)
6336 return target;
6337 break;
6338
6339 case BUILT_IN_MEMCPY:
6340 target = expand_builtin_memcpy (exp, target, mode);
6341 if (target)
6342 return target;
6343 break;
6344
6345 case BUILT_IN_MEMPCPY:
6346 target = expand_builtin_mempcpy (exp, target, mode);
6347 if (target)
6348 return target;
6349 break;
6350
6351 case BUILT_IN_MEMMOVE:
6352 target = expand_builtin_memmove (exp, target, mode, ignore);
6353 if (target)
6354 return target;
6355 break;
6356
6357 case BUILT_IN_BCOPY:
6358 target = expand_builtin_bcopy (exp, ignore);
6359 if (target)
6360 return target;
6361 break;
6362
6363 case BUILT_IN_MEMSET:
6364 target = expand_builtin_memset (exp, target, mode);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_BZERO:
6370 target = expand_builtin_bzero (exp);
6371 if (target)
6372 return target;
6373 break;
6374
6375 case BUILT_IN_STRCMP:
6376 target = expand_builtin_strcmp (exp, target, mode);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_STRNCMP:
6382 target = expand_builtin_strncmp (exp, target, mode);
6383 if (target)
6384 return target;
6385 break;
6386
6387 case BUILT_IN_MEMCHR:
6388 target = expand_builtin_memchr (exp, target, mode);
6389 if (target)
6390 return target;
6391 break;
6392
6393 case BUILT_IN_BCMP:
6394 case BUILT_IN_MEMCMP:
6395 target = expand_builtin_memcmp (exp, target, mode);
6396 if (target)
6397 return target;
6398 break;
6399
6400 case BUILT_IN_SETJMP:
6401 /* This should have been lowered to the builtins below. */
6402 gcc_unreachable ();
6403
6404 case BUILT_IN_SETJMP_SETUP:
6405 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6406 and the receiver label. */
6407 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6408 {
6409 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6410 VOIDmode, EXPAND_NORMAL);
6411 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6412 rtx label_r = label_rtx (label);
6413
6414 /* This is copied from the handling of non-local gotos. */
6415 expand_builtin_setjmp_setup (buf_addr, label_r);
6416 nonlocal_goto_handler_labels
6417 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6418 nonlocal_goto_handler_labels);
6419 /* ??? Do not let expand_label treat us as such since we would
6420 not want to be both on the list of non-local labels and on
6421 the list of forced labels. */
6422 FORCED_LABEL (label) = 0;
6423 return const0_rtx;
6424 }
6425 break;
6426
6427 case BUILT_IN_SETJMP_DISPATCHER:
6428 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6429 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6430 {
6431 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6432 rtx label_r = label_rtx (label);
6433
6434 /* Remove the dispatcher label from the list of non-local labels
6435 since the receiver labels have been added to it above. */
6436 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6437 return const0_rtx;
6438 }
6439 break;
6440
6441 case BUILT_IN_SETJMP_RECEIVER:
6442 /* __builtin_setjmp_receiver is passed the receiver label. */
6443 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6444 {
6445 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6446 rtx label_r = label_rtx (label);
6447
6448 expand_builtin_setjmp_receiver (label_r);
6449 return const0_rtx;
6450 }
6451 break;
6452
6453 /* __builtin_longjmp is passed a pointer to an array of five words.
6454 It's similar to the C library longjmp function but works with
6455 __builtin_setjmp above. */
6456 case BUILT_IN_LONGJMP:
6457 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6458 {
6459 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6460 VOIDmode, EXPAND_NORMAL);
6461 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6462
6463 if (value != const1_rtx)
6464 {
6465 error ("%<__builtin_longjmp%> second argument must be 1");
6466 return const0_rtx;
6467 }
6468
6469 expand_builtin_longjmp (buf_addr, value);
6470 return const0_rtx;
6471 }
6472 break;
6473
6474 case BUILT_IN_NONLOCAL_GOTO:
6475 target = expand_builtin_nonlocal_goto (exp);
6476 if (target)
6477 return target;
6478 break;
6479
6480 /* This updates the setjmp buffer that is its argument with the value
6481 of the current stack pointer. */
6482 case BUILT_IN_UPDATE_SETJMP_BUF:
6483 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6484 {
6485 rtx buf_addr
6486 = expand_normal (CALL_EXPR_ARG (exp, 0));
6487
6488 expand_builtin_update_setjmp_buf (buf_addr);
6489 return const0_rtx;
6490 }
6491 break;
6492
6493 case BUILT_IN_TRAP:
6494 expand_builtin_trap ();
6495 return const0_rtx;
6496
6497 case BUILT_IN_PRINTF:
6498 target = expand_builtin_printf (exp, target, mode, false);
6499 if (target)
6500 return target;
6501 break;
6502
6503 case BUILT_IN_PRINTF_UNLOCKED:
6504 target = expand_builtin_printf (exp, target, mode, true);
6505 if (target)
6506 return target;
6507 break;
6508
6509 case BUILT_IN_FPUTS:
6510 target = expand_builtin_fputs (exp, target, false);
6511 if (target)
6512 return target;
6513 break;
6514 case BUILT_IN_FPUTS_UNLOCKED:
6515 target = expand_builtin_fputs (exp, target, true);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_FPRINTF:
6521 target = expand_builtin_fprintf (exp, target, mode, false);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_FPRINTF_UNLOCKED:
6527 target = expand_builtin_fprintf (exp, target, mode, true);
6528 if (target)
6529 return target;
6530 break;
6531
6532 case BUILT_IN_SPRINTF:
6533 target = expand_builtin_sprintf (exp, target, mode);
6534 if (target)
6535 return target;
6536 break;
6537
6538 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6539 case BUILT_IN_SIGNBITD32:
6540 case BUILT_IN_SIGNBITD64:
6541 case BUILT_IN_SIGNBITD128:
6542 target = expand_builtin_signbit (exp, target);
6543 if (target)
6544 return target;
6545 break;
6546
6547 /* Various hooks for the DWARF 2 __throw routine. */
6548 case BUILT_IN_UNWIND_INIT:
6549 expand_builtin_unwind_init ();
6550 return const0_rtx;
6551 case BUILT_IN_DWARF_CFA:
6552 return virtual_cfa_rtx;
6553 #ifdef DWARF2_UNWIND_INFO
6554 case BUILT_IN_DWARF_SP_COLUMN:
6555 return expand_builtin_dwarf_sp_column ();
6556 case BUILT_IN_INIT_DWARF_REG_SIZES:
6557 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6558 return const0_rtx;
6559 #endif
6560 case BUILT_IN_FROB_RETURN_ADDR:
6561 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6562 case BUILT_IN_EXTRACT_RETURN_ADDR:
6563 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6564 case BUILT_IN_EH_RETURN:
6565 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6566 CALL_EXPR_ARG (exp, 1));
6567 return const0_rtx;
6568 #ifdef EH_RETURN_DATA_REGNO
6569 case BUILT_IN_EH_RETURN_DATA_REGNO:
6570 return expand_builtin_eh_return_data_regno (exp);
6571 #endif
6572 case BUILT_IN_EXTEND_POINTER:
6573 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6574
6575 case BUILT_IN_VA_START:
6576 case BUILT_IN_STDARG_START:
6577 return expand_builtin_va_start (exp);
6578 case BUILT_IN_VA_END:
6579 return expand_builtin_va_end (exp);
6580 case BUILT_IN_VA_COPY:
6581 return expand_builtin_va_copy (exp);
6582 case BUILT_IN_EXPECT:
6583 return expand_builtin_expect (exp, target);
6584 case BUILT_IN_PREFETCH:
6585 expand_builtin_prefetch (exp);
6586 return const0_rtx;
6587
6588 case BUILT_IN_PROFILE_FUNC_ENTER:
6589 return expand_builtin_profile_func (false);
6590 case BUILT_IN_PROFILE_FUNC_EXIT:
6591 return expand_builtin_profile_func (true);
6592
6593 case BUILT_IN_INIT_TRAMPOLINE:
6594 return expand_builtin_init_trampoline (exp);
6595 case BUILT_IN_ADJUST_TRAMPOLINE:
6596 return expand_builtin_adjust_trampoline (exp);
6597
6598 case BUILT_IN_FORK:
6599 case BUILT_IN_EXECL:
6600 case BUILT_IN_EXECV:
6601 case BUILT_IN_EXECLP:
6602 case BUILT_IN_EXECLE:
6603 case BUILT_IN_EXECVP:
6604 case BUILT_IN_EXECVE:
6605 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6606 if (target)
6607 return target;
6608 break;
6609
6610 case BUILT_IN_FETCH_AND_ADD_1:
6611 case BUILT_IN_FETCH_AND_ADD_2:
6612 case BUILT_IN_FETCH_AND_ADD_4:
6613 case BUILT_IN_FETCH_AND_ADD_8:
6614 case BUILT_IN_FETCH_AND_ADD_16:
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6616 target = expand_builtin_sync_operation (mode, exp, PLUS,
6617 false, target, ignore);
6618 if (target)
6619 return target;
6620 break;
6621
6622 case BUILT_IN_FETCH_AND_SUB_1:
6623 case BUILT_IN_FETCH_AND_SUB_2:
6624 case BUILT_IN_FETCH_AND_SUB_4:
6625 case BUILT_IN_FETCH_AND_SUB_8:
6626 case BUILT_IN_FETCH_AND_SUB_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6628 target = expand_builtin_sync_operation (mode, exp, MINUS,
6629 false, target, ignore);
6630 if (target)
6631 return target;
6632 break;
6633
6634 case BUILT_IN_FETCH_AND_OR_1:
6635 case BUILT_IN_FETCH_AND_OR_2:
6636 case BUILT_IN_FETCH_AND_OR_4:
6637 case BUILT_IN_FETCH_AND_OR_8:
6638 case BUILT_IN_FETCH_AND_OR_16:
6639 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6640 target = expand_builtin_sync_operation (mode, exp, IOR,
6641 false, target, ignore);
6642 if (target)
6643 return target;
6644 break;
6645
6646 case BUILT_IN_FETCH_AND_AND_1:
6647 case BUILT_IN_FETCH_AND_AND_2:
6648 case BUILT_IN_FETCH_AND_AND_4:
6649 case BUILT_IN_FETCH_AND_AND_8:
6650 case BUILT_IN_FETCH_AND_AND_16:
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6652 target = expand_builtin_sync_operation (mode, exp, AND,
6653 false, target, ignore);
6654 if (target)
6655 return target;
6656 break;
6657
6658 case BUILT_IN_FETCH_AND_XOR_1:
6659 case BUILT_IN_FETCH_AND_XOR_2:
6660 case BUILT_IN_FETCH_AND_XOR_4:
6661 case BUILT_IN_FETCH_AND_XOR_8:
6662 case BUILT_IN_FETCH_AND_XOR_16:
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6664 target = expand_builtin_sync_operation (mode, exp, XOR,
6665 false, target, ignore);
6666 if (target)
6667 return target;
6668 break;
6669
6670 case BUILT_IN_FETCH_AND_NAND_1:
6671 case BUILT_IN_FETCH_AND_NAND_2:
6672 case BUILT_IN_FETCH_AND_NAND_4:
6673 case BUILT_IN_FETCH_AND_NAND_8:
6674 case BUILT_IN_FETCH_AND_NAND_16:
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6676 target = expand_builtin_sync_operation (mode, exp, NOT,
6677 false, target, ignore);
6678 if (target)
6679 return target;
6680 break;
6681
6682 case BUILT_IN_ADD_AND_FETCH_1:
6683 case BUILT_IN_ADD_AND_FETCH_2:
6684 case BUILT_IN_ADD_AND_FETCH_4:
6685 case BUILT_IN_ADD_AND_FETCH_8:
6686 case BUILT_IN_ADD_AND_FETCH_16:
6687 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6688 target = expand_builtin_sync_operation (mode, exp, PLUS,
6689 true, target, ignore);
6690 if (target)
6691 return target;
6692 break;
6693
6694 case BUILT_IN_SUB_AND_FETCH_1:
6695 case BUILT_IN_SUB_AND_FETCH_2:
6696 case BUILT_IN_SUB_AND_FETCH_4:
6697 case BUILT_IN_SUB_AND_FETCH_8:
6698 case BUILT_IN_SUB_AND_FETCH_16:
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6700 target = expand_builtin_sync_operation (mode, exp, MINUS,
6701 true, target, ignore);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_OR_AND_FETCH_1:
6707 case BUILT_IN_OR_AND_FETCH_2:
6708 case BUILT_IN_OR_AND_FETCH_4:
6709 case BUILT_IN_OR_AND_FETCH_8:
6710 case BUILT_IN_OR_AND_FETCH_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6712 target = expand_builtin_sync_operation (mode, exp, IOR,
6713 true, target, ignore);
6714 if (target)
6715 return target;
6716 break;
6717
6718 case BUILT_IN_AND_AND_FETCH_1:
6719 case BUILT_IN_AND_AND_FETCH_2:
6720 case BUILT_IN_AND_AND_FETCH_4:
6721 case BUILT_IN_AND_AND_FETCH_8:
6722 case BUILT_IN_AND_AND_FETCH_16:
6723 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6724 target = expand_builtin_sync_operation (mode, exp, AND,
6725 true, target, ignore);
6726 if (target)
6727 return target;
6728 break;
6729
6730 case BUILT_IN_XOR_AND_FETCH_1:
6731 case BUILT_IN_XOR_AND_FETCH_2:
6732 case BUILT_IN_XOR_AND_FETCH_4:
6733 case BUILT_IN_XOR_AND_FETCH_8:
6734 case BUILT_IN_XOR_AND_FETCH_16:
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6736 target = expand_builtin_sync_operation (mode, exp, XOR,
6737 true, target, ignore);
6738 if (target)
6739 return target;
6740 break;
6741
6742 case BUILT_IN_NAND_AND_FETCH_1:
6743 case BUILT_IN_NAND_AND_FETCH_2:
6744 case BUILT_IN_NAND_AND_FETCH_4:
6745 case BUILT_IN_NAND_AND_FETCH_8:
6746 case BUILT_IN_NAND_AND_FETCH_16:
6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6748 target = expand_builtin_sync_operation (mode, exp, NOT,
6749 true, target, ignore);
6750 if (target)
6751 return target;
6752 break;
6753
6754 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6755 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6756 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6757 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6758 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6759 if (mode == VOIDmode)
6760 mode = TYPE_MODE (boolean_type_node);
6761 if (!target || !register_operand (target, mode))
6762 target = gen_reg_rtx (mode);
6763
6764 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6765 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6766 if (target)
6767 return target;
6768 break;
6769
6770 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6771 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6772 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6773 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6774 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6775 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6776 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6777 if (target)
6778 return target;
6779 break;
6780
6781 case BUILT_IN_LOCK_TEST_AND_SET_1:
6782 case BUILT_IN_LOCK_TEST_AND_SET_2:
6783 case BUILT_IN_LOCK_TEST_AND_SET_4:
6784 case BUILT_IN_LOCK_TEST_AND_SET_8:
6785 case BUILT_IN_LOCK_TEST_AND_SET_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6787 target = expand_builtin_lock_test_and_set (mode, exp, target);
6788 if (target)
6789 return target;
6790 break;
6791
6792 case BUILT_IN_LOCK_RELEASE_1:
6793 case BUILT_IN_LOCK_RELEASE_2:
6794 case BUILT_IN_LOCK_RELEASE_4:
6795 case BUILT_IN_LOCK_RELEASE_8:
6796 case BUILT_IN_LOCK_RELEASE_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6798 expand_builtin_lock_release (mode, exp);
6799 return const0_rtx;
6800
6801 case BUILT_IN_SYNCHRONIZE:
6802 expand_builtin_synchronize ();
6803 return const0_rtx;
6804
6805 case BUILT_IN_OBJECT_SIZE:
6806 return expand_builtin_object_size (exp);
6807
6808 case BUILT_IN_MEMCPY_CHK:
6809 case BUILT_IN_MEMPCPY_CHK:
6810 case BUILT_IN_MEMMOVE_CHK:
6811 case BUILT_IN_MEMSET_CHK:
6812 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_STRCPY_CHK:
6818 case BUILT_IN_STPCPY_CHK:
6819 case BUILT_IN_STRNCPY_CHK:
6820 case BUILT_IN_STRCAT_CHK:
6821 case BUILT_IN_STRNCAT_CHK:
6822 case BUILT_IN_SNPRINTF_CHK:
6823 case BUILT_IN_VSNPRINTF_CHK:
6824 maybe_emit_chk_warning (exp, fcode);
6825 break;
6826
6827 case BUILT_IN_SPRINTF_CHK:
6828 case BUILT_IN_VSPRINTF_CHK:
6829 maybe_emit_sprintf_chk_warning (exp, fcode);
6830 break;
6831
6832 default: /* just do library call, if unknown builtin */
6833 break;
6834 }
6835
6836 /* The switch statement above can drop through to cause the function
6837 to be called normally. */
6838 return expand_call (exp, target, ignore);
6839 }
6840
6841 /* Determine whether a tree node represents a call to a built-in
6842 function. If the tree T is a call to a built-in function with
6843 the right number of arguments of the appropriate types, return
6844 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6845 Otherwise the return value is END_BUILTINS. */
6846
6847 enum built_in_function
6848 builtin_mathfn_code (tree t)
6849 {
6850 tree fndecl, arg, parmlist;
6851 tree argtype, parmtype;
6852 call_expr_arg_iterator iter;
6853
6854 if (TREE_CODE (t) != CALL_EXPR
6855 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6856 return END_BUILTINS;
6857
6858 fndecl = get_callee_fndecl (t);
6859 if (fndecl == NULL_TREE
6860 || TREE_CODE (fndecl) != FUNCTION_DECL
6861 || ! DECL_BUILT_IN (fndecl)
6862 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6863 return END_BUILTINS;
6864
6865 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6866 init_call_expr_arg_iterator (t, &iter);
6867 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6868 {
6869 /* If a function doesn't take a variable number of arguments,
6870 the last element in the list will have type `void'. */
6871 parmtype = TREE_VALUE (parmlist);
6872 if (VOID_TYPE_P (parmtype))
6873 {
6874 if (more_call_expr_args_p (&iter))
6875 return END_BUILTINS;
6876 return DECL_FUNCTION_CODE (fndecl);
6877 }
6878
6879 if (! more_call_expr_args_p (&iter))
6880 return END_BUILTINS;
6881
6882 arg = next_call_expr_arg (&iter);
6883 argtype = TREE_TYPE (arg);
6884
6885 if (SCALAR_FLOAT_TYPE_P (parmtype))
6886 {
6887 if (! SCALAR_FLOAT_TYPE_P (argtype))
6888 return END_BUILTINS;
6889 }
6890 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6891 {
6892 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6893 return END_BUILTINS;
6894 }
6895 else if (POINTER_TYPE_P (parmtype))
6896 {
6897 if (! POINTER_TYPE_P (argtype))
6898 return END_BUILTINS;
6899 }
6900 else if (INTEGRAL_TYPE_P (parmtype))
6901 {
6902 if (! INTEGRAL_TYPE_P (argtype))
6903 return END_BUILTINS;
6904 }
6905 else
6906 return END_BUILTINS;
6907 }
6908
6909 /* Variable-length argument list. */
6910 return DECL_FUNCTION_CODE (fndecl);
6911 }
6912
6913 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6914 evaluate to a constant. */
6915
6916 static tree
6917 fold_builtin_constant_p (tree arg)
6918 {
6919 /* We return 1 for a numeric type that's known to be a constant
6920 value at compile-time or for an aggregate type that's a
6921 literal constant. */
6922 STRIP_NOPS (arg);
6923
6924 /* If we know this is a constant, emit the constant of one. */
6925 if (CONSTANT_CLASS_P (arg)
6926 || (TREE_CODE (arg) == CONSTRUCTOR
6927 && TREE_CONSTANT (arg)))
6928 return integer_one_node;
6929 if (TREE_CODE (arg) == ADDR_EXPR)
6930 {
6931 tree op = TREE_OPERAND (arg, 0);
6932 if (TREE_CODE (op) == STRING_CST
6933 || (TREE_CODE (op) == ARRAY_REF
6934 && integer_zerop (TREE_OPERAND (op, 1))
6935 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6936 return integer_one_node;
6937 }
6938
6939 /* If this expression has side effects, show we don't know it to be a
6940 constant. Likewise if it's a pointer or aggregate type since in
6941 those case we only want literals, since those are only optimized
6942 when generating RTL, not later.
6943 And finally, if we are compiling an initializer, not code, we
6944 need to return a definite result now; there's not going to be any
6945 more optimization done. */
6946 if (TREE_SIDE_EFFECTS (arg)
6947 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6948 || POINTER_TYPE_P (TREE_TYPE (arg))
6949 || cfun == 0
6950 || folding_initializer)
6951 return integer_zero_node;
6952
6953 return NULL_TREE;
6954 }
6955
6956 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6957 comparison against the argument will fold to a constant. In practice,
6958 this means a true constant or the address of a non-weak symbol. */
6959
6960 static tree
6961 fold_builtin_expect (tree arg)
6962 {
6963 tree inner;
6964
6965 /* If the argument isn't invariant, then there's nothing we can do. */
6966 if (!TREE_INVARIANT (arg))
6967 return NULL_TREE;
6968
6969 /* If we're looking at an address of a weak decl, then do not fold. */
6970 inner = arg;
6971 STRIP_NOPS (inner);
6972 if (TREE_CODE (inner) == ADDR_EXPR)
6973 {
6974 do
6975 {
6976 inner = TREE_OPERAND (inner, 0);
6977 }
6978 while (TREE_CODE (inner) == COMPONENT_REF
6979 || TREE_CODE (inner) == ARRAY_REF);
6980 if (DECL_P (inner) && DECL_WEAK (inner))
6981 return NULL_TREE;
6982 }
6983
6984 /* Otherwise, ARG already has the proper type for the return value. */
6985 return arg;
6986 }
6987
6988 /* Fold a call to __builtin_classify_type with argument ARG. */
6989
6990 static tree
6991 fold_builtin_classify_type (tree arg)
6992 {
6993 if (arg == 0)
6994 return build_int_cst (NULL_TREE, no_type_class);
6995
6996 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6997 }
6998
6999 /* Fold a call to __builtin_strlen with argument ARG. */
7000
7001 static tree
7002 fold_builtin_strlen (tree arg)
7003 {
7004 if (!validate_arg (arg, POINTER_TYPE))
7005 return NULL_TREE;
7006 else
7007 {
7008 tree len = c_strlen (arg, 0);
7009
7010 if (len)
7011 {
7012 /* Convert from the internal "sizetype" type to "size_t". */
7013 if (size_type_node)
7014 len = fold_convert (size_type_node, len);
7015 return len;
7016 }
7017
7018 return NULL_TREE;
7019 }
7020 }
7021
7022 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7023
7024 static tree
7025 fold_builtin_inf (tree type, int warn)
7026 {
7027 REAL_VALUE_TYPE real;
7028
7029 /* __builtin_inff is intended to be usable to define INFINITY on all
7030 targets. If an infinity is not available, INFINITY expands "to a
7031 positive constant of type float that overflows at translation
7032 time", footnote "In this case, using INFINITY will violate the
7033 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7034 Thus we pedwarn to ensure this constraint violation is
7035 diagnosed. */
7036 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7037 pedwarn ("target format does not support infinity");
7038
7039 real_inf (&real);
7040 return build_real (type, real);
7041 }
7042
7043 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7044
7045 static tree
7046 fold_builtin_nan (tree arg, tree type, int quiet)
7047 {
7048 REAL_VALUE_TYPE real;
7049 const char *str;
7050
7051 if (!validate_arg (arg, POINTER_TYPE))
7052 return NULL_TREE;
7053 str = c_getstr (arg);
7054 if (!str)
7055 return NULL_TREE;
7056
7057 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7058 return NULL_TREE;
7059
7060 return build_real (type, real);
7061 }
7062
7063 /* Return true if the floating point expression T has an integer value.
7064 We also allow +Inf, -Inf and NaN to be considered integer values. */
7065
7066 static bool
7067 integer_valued_real_p (tree t)
7068 {
7069 switch (TREE_CODE (t))
7070 {
7071 case FLOAT_EXPR:
7072 return true;
7073
7074 case ABS_EXPR:
7075 case SAVE_EXPR:
7076 case NON_LVALUE_EXPR:
7077 return integer_valued_real_p (TREE_OPERAND (t, 0));
7078
7079 case COMPOUND_EXPR:
7080 case MODIFY_EXPR:
7081 case BIND_EXPR:
7082 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7083
7084 case PLUS_EXPR:
7085 case MINUS_EXPR:
7086 case MULT_EXPR:
7087 case MIN_EXPR:
7088 case MAX_EXPR:
7089 return integer_valued_real_p (TREE_OPERAND (t, 0))
7090 && integer_valued_real_p (TREE_OPERAND (t, 1));
7091
7092 case COND_EXPR:
7093 return integer_valued_real_p (TREE_OPERAND (t, 1))
7094 && integer_valued_real_p (TREE_OPERAND (t, 2));
7095
7096 case REAL_CST:
7097 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7098
7099 case NOP_EXPR:
7100 {
7101 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7102 if (TREE_CODE (type) == INTEGER_TYPE)
7103 return true;
7104 if (TREE_CODE (type) == REAL_TYPE)
7105 return integer_valued_real_p (TREE_OPERAND (t, 0));
7106 break;
7107 }
7108
7109 case CALL_EXPR:
7110 switch (builtin_mathfn_code (t))
7111 {
7112 CASE_FLT_FN (BUILT_IN_CEIL):
7113 CASE_FLT_FN (BUILT_IN_FLOOR):
7114 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7115 CASE_FLT_FN (BUILT_IN_RINT):
7116 CASE_FLT_FN (BUILT_IN_ROUND):
7117 CASE_FLT_FN (BUILT_IN_TRUNC):
7118 return true;
7119
7120 CASE_FLT_FN (BUILT_IN_FMIN):
7121 CASE_FLT_FN (BUILT_IN_FMAX):
7122 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7123 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7124
7125 default:
7126 break;
7127 }
7128 break;
7129
7130 default:
7131 break;
7132 }
7133 return false;
7134 }
7135
7136 /* FNDECL is assumed to be a builtin where truncation can be propagated
7137 across (for instance floor((double)f) == (double)floorf (f).
7138 Do the transformation for a call with argument ARG. */
7139
7140 static tree
7141 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7142 {
7143 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7144
7145 if (!validate_arg (arg, REAL_TYPE))
7146 return NULL_TREE;
7147
7148 /* Integer rounding functions are idempotent. */
7149 if (fcode == builtin_mathfn_code (arg))
7150 return arg;
7151
7152 /* If argument is already integer valued, and we don't need to worry
7153 about setting errno, there's no need to perform rounding. */
7154 if (! flag_errno_math && integer_valued_real_p (arg))
7155 return arg;
7156
7157 if (optimize)
7158 {
7159 tree arg0 = strip_float_extensions (arg);
7160 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7161 tree newtype = TREE_TYPE (arg0);
7162 tree decl;
7163
7164 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7165 && (decl = mathfn_built_in (newtype, fcode)))
7166 return fold_convert (ftype,
7167 build_call_expr (decl, 1,
7168 fold_convert (newtype, arg0)));
7169 }
7170 return NULL_TREE;
7171 }
7172
7173 /* FNDECL is assumed to be builtin which can narrow the FP type of
7174 the argument, for instance lround((double)f) -> lroundf (f).
7175 Do the transformation for a call with argument ARG. */
7176
7177 static tree
7178 fold_fixed_mathfn (tree fndecl, tree arg)
7179 {
7180 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7181
7182 if (!validate_arg (arg, REAL_TYPE))
7183 return NULL_TREE;
7184
7185 /* If argument is already integer valued, and we don't need to worry
7186 about setting errno, there's no need to perform rounding. */
7187 if (! flag_errno_math && integer_valued_real_p (arg))
7188 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7189
7190 if (optimize)
7191 {
7192 tree ftype = TREE_TYPE (arg);
7193 tree arg0 = strip_float_extensions (arg);
7194 tree newtype = TREE_TYPE (arg0);
7195 tree decl;
7196
7197 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7198 && (decl = mathfn_built_in (newtype, fcode)))
7199 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7200 }
7201
7202 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7203 sizeof (long long) == sizeof (long). */
7204 if (TYPE_PRECISION (long_long_integer_type_node)
7205 == TYPE_PRECISION (long_integer_type_node))
7206 {
7207 tree newfn = NULL_TREE;
7208 switch (fcode)
7209 {
7210 CASE_FLT_FN (BUILT_IN_LLCEIL):
7211 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7212 break;
7213
7214 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7215 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7216 break;
7217
7218 CASE_FLT_FN (BUILT_IN_LLROUND):
7219 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7220 break;
7221
7222 CASE_FLT_FN (BUILT_IN_LLRINT):
7223 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7224 break;
7225
7226 default:
7227 break;
7228 }
7229
7230 if (newfn)
7231 {
7232 tree newcall = build_call_expr(newfn, 1, arg);
7233 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7234 }
7235 }
7236
7237 return NULL_TREE;
7238 }
7239
7240 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7241 return type. Return NULL_TREE if no simplification can be made. */
7242
7243 static tree
7244 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7245 {
7246 tree res;
7247
7248 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7249 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7250 return NULL_TREE;
7251
7252 /* Calculate the result when the argument is a constant. */
7253 if (TREE_CODE (arg) == COMPLEX_CST
7254 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7255 type, mpfr_hypot)))
7256 return res;
7257
7258 if (TREE_CODE (arg) == COMPLEX_EXPR)
7259 {
7260 tree real = TREE_OPERAND (arg, 0);
7261 tree imag = TREE_OPERAND (arg, 1);
7262
7263 /* If either part is zero, cabs is fabs of the other. */
7264 if (real_zerop (real))
7265 return fold_build1 (ABS_EXPR, type, imag);
7266 if (real_zerop (imag))
7267 return fold_build1 (ABS_EXPR, type, real);
7268
7269 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7270 if (flag_unsafe_math_optimizations
7271 && operand_equal_p (real, imag, OEP_PURE_SAME))
7272 {
7273 const REAL_VALUE_TYPE sqrt2_trunc
7274 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7275 STRIP_NOPS (real);
7276 return fold_build2 (MULT_EXPR, type,
7277 fold_build1 (ABS_EXPR, type, real),
7278 build_real (type, sqrt2_trunc));
7279 }
7280 }
7281
7282 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7283 if (TREE_CODE (arg) == NEGATE_EXPR
7284 || TREE_CODE (arg) == CONJ_EXPR)
7285 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7286
7287 /* Don't do this when optimizing for size. */
7288 if (flag_unsafe_math_optimizations
7289 && optimize && !optimize_size)
7290 {
7291 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7292
7293 if (sqrtfn != NULL_TREE)
7294 {
7295 tree rpart, ipart, result;
7296
7297 arg = builtin_save_expr (arg);
7298
7299 rpart = fold_build1 (REALPART_EXPR, type, arg);
7300 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7301
7302 rpart = builtin_save_expr (rpart);
7303 ipart = builtin_save_expr (ipart);
7304
7305 result = fold_build2 (PLUS_EXPR, type,
7306 fold_build2 (MULT_EXPR, type,
7307 rpart, rpart),
7308 fold_build2 (MULT_EXPR, type,
7309 ipart, ipart));
7310
7311 return build_call_expr (sqrtfn, 1, result);
7312 }
7313 }
7314
7315 return NULL_TREE;
7316 }
7317
7318 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7319 Return NULL_TREE if no simplification can be made. */
7320
7321 static tree
7322 fold_builtin_sqrt (tree arg, tree type)
7323 {
7324
7325 enum built_in_function fcode;
7326 tree res;
7327
7328 if (!validate_arg (arg, REAL_TYPE))
7329 return NULL_TREE;
7330
7331 /* Calculate the result when the argument is a constant. */
7332 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7333 return res;
7334
7335 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7336 fcode = builtin_mathfn_code (arg);
7337 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7338 {
7339 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7340 arg = fold_build2 (MULT_EXPR, type,
7341 CALL_EXPR_ARG (arg, 0),
7342 build_real (type, dconsthalf));
7343 return build_call_expr (expfn, 1, arg);
7344 }
7345
7346 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7347 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7348 {
7349 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7350
7351 if (powfn)
7352 {
7353 tree arg0 = CALL_EXPR_ARG (arg, 0);
7354 tree tree_root;
7355 /* The inner root was either sqrt or cbrt. */
7356 REAL_VALUE_TYPE dconstroot =
7357 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7358
7359 /* Adjust for the outer root. */
7360 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7361 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7362 tree_root = build_real (type, dconstroot);
7363 return build_call_expr (powfn, 2, arg0, tree_root);
7364 }
7365 }
7366
7367 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7368 if (flag_unsafe_math_optimizations
7369 && (fcode == BUILT_IN_POW
7370 || fcode == BUILT_IN_POWF
7371 || fcode == BUILT_IN_POWL))
7372 {
7373 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7374 tree arg0 = CALL_EXPR_ARG (arg, 0);
7375 tree arg1 = CALL_EXPR_ARG (arg, 1);
7376 tree narg1;
7377 if (!tree_expr_nonnegative_p (arg0))
7378 arg0 = build1 (ABS_EXPR, type, arg0);
7379 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7380 build_real (type, dconsthalf));
7381 return build_call_expr (powfn, 2, arg0, narg1);
7382 }
7383
7384 return NULL_TREE;
7385 }
7386
7387 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7388 Return NULL_TREE if no simplification can be made. */
7389
7390 static tree
7391 fold_builtin_cbrt (tree arg, tree type)
7392 {
7393 const enum built_in_function fcode = builtin_mathfn_code (arg);
7394 tree res;
7395
7396 if (!validate_arg (arg, REAL_TYPE))
7397 return NULL_TREE;
7398
7399 /* Calculate the result when the argument is a constant. */
7400 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7401 return res;
7402
7403 if (flag_unsafe_math_optimizations)
7404 {
7405 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7406 if (BUILTIN_EXPONENT_P (fcode))
7407 {
7408 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7409 const REAL_VALUE_TYPE third_trunc =
7410 real_value_truncate (TYPE_MODE (type), dconstthird);
7411 arg = fold_build2 (MULT_EXPR, type,
7412 CALL_EXPR_ARG (arg, 0),
7413 build_real (type, third_trunc));
7414 return build_call_expr (expfn, 1, arg);
7415 }
7416
7417 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7418 if (BUILTIN_SQRT_P (fcode))
7419 {
7420 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7421
7422 if (powfn)
7423 {
7424 tree arg0 = CALL_EXPR_ARG (arg, 0);
7425 tree tree_root;
7426 REAL_VALUE_TYPE dconstroot = dconstthird;
7427
7428 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7429 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7430 tree_root = build_real (type, dconstroot);
7431 return build_call_expr (powfn, 2, arg0, tree_root);
7432 }
7433 }
7434
7435 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7436 if (BUILTIN_CBRT_P (fcode))
7437 {
7438 tree arg0 = CALL_EXPR_ARG (arg, 0);
7439 if (tree_expr_nonnegative_p (arg0))
7440 {
7441 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7442
7443 if (powfn)
7444 {
7445 tree tree_root;
7446 REAL_VALUE_TYPE dconstroot;
7447
7448 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7449 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7450 tree_root = build_real (type, dconstroot);
7451 return build_call_expr (powfn, 2, arg0, tree_root);
7452 }
7453 }
7454 }
7455
7456 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7457 if (fcode == BUILT_IN_POW
7458 || fcode == BUILT_IN_POWF
7459 || fcode == BUILT_IN_POWL)
7460 {
7461 tree arg00 = CALL_EXPR_ARG (arg, 0);
7462 tree arg01 = CALL_EXPR_ARG (arg, 1);
7463 if (tree_expr_nonnegative_p (arg00))
7464 {
7465 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7466 const REAL_VALUE_TYPE dconstroot
7467 = real_value_truncate (TYPE_MODE (type), dconstthird);
7468 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7469 build_real (type, dconstroot));
7470 return build_call_expr (powfn, 2, arg00, narg01);
7471 }
7472 }
7473 }
7474 return NULL_TREE;
7475 }
7476
7477 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7478 TYPE is the type of the return value. Return NULL_TREE if no
7479 simplification can be made. */
7480
7481 static tree
7482 fold_builtin_cos (tree arg, tree type, tree fndecl)
7483 {
7484 tree res, narg;
7485
7486 if (!validate_arg (arg, REAL_TYPE))
7487 return NULL_TREE;
7488
7489 /* Calculate the result when the argument is a constant. */
7490 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7491 return res;
7492
7493 /* Optimize cos(-x) into cos (x). */
7494 if ((narg = fold_strip_sign_ops (arg)))
7495 return build_call_expr (fndecl, 1, narg);
7496
7497 return NULL_TREE;
7498 }
7499
7500 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7501 Return NULL_TREE if no simplification can be made. */
7502
7503 static tree
7504 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7505 {
7506 if (validate_arg (arg, REAL_TYPE))
7507 {
7508 tree res, narg;
7509
7510 /* Calculate the result when the argument is a constant. */
7511 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7512 return res;
7513
7514 /* Optimize cosh(-x) into cosh (x). */
7515 if ((narg = fold_strip_sign_ops (arg)))
7516 return build_call_expr (fndecl, 1, narg);
7517 }
7518
7519 return NULL_TREE;
7520 }
7521
7522 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7523 Return NULL_TREE if no simplification can be made. */
7524
7525 static tree
7526 fold_builtin_tan (tree arg, tree type)
7527 {
7528 enum built_in_function fcode;
7529 tree res;
7530
7531 if (!validate_arg (arg, REAL_TYPE))
7532 return NULL_TREE;
7533
7534 /* Calculate the result when the argument is a constant. */
7535 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7536 return res;
7537
7538 /* Optimize tan(atan(x)) = x. */
7539 fcode = builtin_mathfn_code (arg);
7540 if (flag_unsafe_math_optimizations
7541 && (fcode == BUILT_IN_ATAN
7542 || fcode == BUILT_IN_ATANF
7543 || fcode == BUILT_IN_ATANL))
7544 return CALL_EXPR_ARG (arg, 0);
7545
7546 return NULL_TREE;
7547 }
7548
7549 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7550 NULL_TREE if no simplification can be made. */
7551
7552 static tree
7553 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7554 {
7555 tree type;
7556 tree res, fn, call;
7557
7558 if (!validate_arg (arg0, REAL_TYPE)
7559 || !validate_arg (arg1, POINTER_TYPE)
7560 || !validate_arg (arg2, POINTER_TYPE))
7561 return NULL_TREE;
7562
7563 type = TREE_TYPE (arg0);
7564
7565 /* Calculate the result when the argument is a constant. */
7566 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7567 return res;
7568
7569 /* Canonicalize sincos to cexpi. */
7570 if (!TARGET_C99_FUNCTIONS)
7571 return NULL_TREE;
7572 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7573 if (!fn)
7574 return NULL_TREE;
7575
7576 call = build_call_expr (fn, 1, arg0);
7577 call = builtin_save_expr (call);
7578
7579 return build2 (COMPOUND_EXPR, type,
7580 build2 (MODIFY_EXPR, void_type_node,
7581 build_fold_indirect_ref (arg1),
7582 build1 (IMAGPART_EXPR, type, call)),
7583 build2 (MODIFY_EXPR, void_type_node,
7584 build_fold_indirect_ref (arg2),
7585 build1 (REALPART_EXPR, type, call)));
7586 }
7587
7588 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7589 NULL_TREE if no simplification can be made. */
7590
7591 static tree
7592 fold_builtin_cexp (tree arg0, tree type)
7593 {
7594 tree rtype;
7595 tree realp, imagp, ifn;
7596
7597 if (!validate_arg (arg0, COMPLEX_TYPE))
7598 return NULL_TREE;
7599
7600 rtype = TREE_TYPE (TREE_TYPE (arg0));
7601
7602 /* In case we can figure out the real part of arg0 and it is constant zero
7603 fold to cexpi. */
7604 if (!TARGET_C99_FUNCTIONS)
7605 return NULL_TREE;
7606 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7607 if (!ifn)
7608 return NULL_TREE;
7609
7610 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7611 && real_zerop (realp))
7612 {
7613 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7614 return build_call_expr (ifn, 1, narg);
7615 }
7616
7617 /* In case we can easily decompose real and imaginary parts split cexp
7618 to exp (r) * cexpi (i). */
7619 if (flag_unsafe_math_optimizations
7620 && realp)
7621 {
7622 tree rfn, rcall, icall;
7623
7624 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7625 if (!rfn)
7626 return NULL_TREE;
7627
7628 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7629 if (!imagp)
7630 return NULL_TREE;
7631
7632 icall = build_call_expr (ifn, 1, imagp);
7633 icall = builtin_save_expr (icall);
7634 rcall = build_call_expr (rfn, 1, realp);
7635 rcall = builtin_save_expr (rcall);
7636 return build2 (COMPLEX_EXPR, type,
7637 build2 (MULT_EXPR, rtype,
7638 rcall,
7639 build1 (REALPART_EXPR, rtype, icall)),
7640 build2 (MULT_EXPR, rtype,
7641 rcall,
7642 build1 (IMAGPART_EXPR, rtype, icall)));
7643 }
7644
7645 return NULL_TREE;
7646 }
7647
7648 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7649 Return NULL_TREE if no simplification can be made. */
7650
7651 static tree
7652 fold_builtin_trunc (tree fndecl, tree arg)
7653 {
7654 if (!validate_arg (arg, REAL_TYPE))
7655 return NULL_TREE;
7656
7657 /* Optimize trunc of constant value. */
7658 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7659 {
7660 REAL_VALUE_TYPE r, x;
7661 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7662
7663 x = TREE_REAL_CST (arg);
7664 real_trunc (&r, TYPE_MODE (type), &x);
7665 return build_real (type, r);
7666 }
7667
7668 return fold_trunc_transparent_mathfn (fndecl, arg);
7669 }
7670
7671 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7672 Return NULL_TREE if no simplification can be made. */
7673
7674 static tree
7675 fold_builtin_floor (tree fndecl, tree arg)
7676 {
7677 if (!validate_arg (arg, REAL_TYPE))
7678 return NULL_TREE;
7679
7680 /* Optimize floor of constant value. */
7681 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7682 {
7683 REAL_VALUE_TYPE x;
7684
7685 x = TREE_REAL_CST (arg);
7686 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7687 {
7688 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7689 REAL_VALUE_TYPE r;
7690
7691 real_floor (&r, TYPE_MODE (type), &x);
7692 return build_real (type, r);
7693 }
7694 }
7695
7696 /* Fold floor (x) where x is nonnegative to trunc (x). */
7697 if (tree_expr_nonnegative_p (arg))
7698 {
7699 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7700 if (truncfn)
7701 return build_call_expr (truncfn, 1, arg);
7702 }
7703
7704 return fold_trunc_transparent_mathfn (fndecl, arg);
7705 }
7706
7707 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7708 Return NULL_TREE if no simplification can be made. */
7709
7710 static tree
7711 fold_builtin_ceil (tree fndecl, tree arg)
7712 {
7713 if (!validate_arg (arg, REAL_TYPE))
7714 return NULL_TREE;
7715
7716 /* Optimize ceil of constant value. */
7717 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7718 {
7719 REAL_VALUE_TYPE x;
7720
7721 x = TREE_REAL_CST (arg);
7722 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7723 {
7724 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7725 REAL_VALUE_TYPE r;
7726
7727 real_ceil (&r, TYPE_MODE (type), &x);
7728 return build_real (type, r);
7729 }
7730 }
7731
7732 return fold_trunc_transparent_mathfn (fndecl, arg);
7733 }
7734
7735 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7736 Return NULL_TREE if no simplification can be made. */
7737
7738 static tree
7739 fold_builtin_round (tree fndecl, tree arg)
7740 {
7741 if (!validate_arg (arg, REAL_TYPE))
7742 return NULL_TREE;
7743
7744 /* Optimize round of constant value. */
7745 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7746 {
7747 REAL_VALUE_TYPE x;
7748
7749 x = TREE_REAL_CST (arg);
7750 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7751 {
7752 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7753 REAL_VALUE_TYPE r;
7754
7755 real_round (&r, TYPE_MODE (type), &x);
7756 return build_real (type, r);
7757 }
7758 }
7759
7760 return fold_trunc_transparent_mathfn (fndecl, arg);
7761 }
7762
7763 /* Fold function call to builtin lround, lroundf or lroundl (or the
7764 corresponding long long versions) and other rounding functions. ARG
7765 is the argument to the call. Return NULL_TREE if no simplification
7766 can be made. */
7767
7768 static tree
7769 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7770 {
7771 if (!validate_arg (arg, REAL_TYPE))
7772 return NULL_TREE;
7773
7774 /* Optimize lround of constant value. */
7775 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7776 {
7777 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7778
7779 if (real_isfinite (&x))
7780 {
7781 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7782 tree ftype = TREE_TYPE (arg);
7783 unsigned HOST_WIDE_INT lo2;
7784 HOST_WIDE_INT hi, lo;
7785 REAL_VALUE_TYPE r;
7786
7787 switch (DECL_FUNCTION_CODE (fndecl))
7788 {
7789 CASE_FLT_FN (BUILT_IN_LFLOOR):
7790 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7791 real_floor (&r, TYPE_MODE (ftype), &x);
7792 break;
7793
7794 CASE_FLT_FN (BUILT_IN_LCEIL):
7795 CASE_FLT_FN (BUILT_IN_LLCEIL):
7796 real_ceil (&r, TYPE_MODE (ftype), &x);
7797 break;
7798
7799 CASE_FLT_FN (BUILT_IN_LROUND):
7800 CASE_FLT_FN (BUILT_IN_LLROUND):
7801 real_round (&r, TYPE_MODE (ftype), &x);
7802 break;
7803
7804 default:
7805 gcc_unreachable ();
7806 }
7807
7808 REAL_VALUE_TO_INT (&lo, &hi, r);
7809 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7810 return build_int_cst_wide (itype, lo2, hi);
7811 }
7812 }
7813
7814 switch (DECL_FUNCTION_CODE (fndecl))
7815 {
7816 CASE_FLT_FN (BUILT_IN_LFLOOR):
7817 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7818 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7819 if (tree_expr_nonnegative_p (arg))
7820 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7821 arg);
7822 break;
7823 default:;
7824 }
7825
7826 return fold_fixed_mathfn (fndecl, arg);
7827 }
7828
7829 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7830 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7831 the argument to the call. Return NULL_TREE if no simplification can
7832 be made. */
7833
7834 static tree
7835 fold_builtin_bitop (tree fndecl, tree arg)
7836 {
7837 if (!validate_arg (arg, INTEGER_TYPE))
7838 return NULL_TREE;
7839
7840 /* Optimize for constant argument. */
7841 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7842 {
7843 HOST_WIDE_INT hi, width, result;
7844 unsigned HOST_WIDE_INT lo;
7845 tree type;
7846
7847 type = TREE_TYPE (arg);
7848 width = TYPE_PRECISION (type);
7849 lo = TREE_INT_CST_LOW (arg);
7850
7851 /* Clear all the bits that are beyond the type's precision. */
7852 if (width > HOST_BITS_PER_WIDE_INT)
7853 {
7854 hi = TREE_INT_CST_HIGH (arg);
7855 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7856 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7857 }
7858 else
7859 {
7860 hi = 0;
7861 if (width < HOST_BITS_PER_WIDE_INT)
7862 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7863 }
7864
7865 switch (DECL_FUNCTION_CODE (fndecl))
7866 {
7867 CASE_INT_FN (BUILT_IN_FFS):
7868 if (lo != 0)
7869 result = exact_log2 (lo & -lo) + 1;
7870 else if (hi != 0)
7871 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7872 else
7873 result = 0;
7874 break;
7875
7876 CASE_INT_FN (BUILT_IN_CLZ):
7877 if (hi != 0)
7878 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7879 else if (lo != 0)
7880 result = width - floor_log2 (lo) - 1;
7881 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7882 result = width;
7883 break;
7884
7885 CASE_INT_FN (BUILT_IN_CTZ):
7886 if (lo != 0)
7887 result = exact_log2 (lo & -lo);
7888 else if (hi != 0)
7889 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7890 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7891 result = width;
7892 break;
7893
7894 CASE_INT_FN (BUILT_IN_POPCOUNT):
7895 result = 0;
7896 while (lo)
7897 result++, lo &= lo - 1;
7898 while (hi)
7899 result++, hi &= hi - 1;
7900 break;
7901
7902 CASE_INT_FN (BUILT_IN_PARITY):
7903 result = 0;
7904 while (lo)
7905 result++, lo &= lo - 1;
7906 while (hi)
7907 result++, hi &= hi - 1;
7908 result &= 1;
7909 break;
7910
7911 default:
7912 gcc_unreachable ();
7913 }
7914
7915 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7916 }
7917
7918 return NULL_TREE;
7919 }
7920
7921 /* Fold function call to builtin_bswap and the long and long long
7922 variants. Return NULL_TREE if no simplification can be made. */
7923 static tree
7924 fold_builtin_bswap (tree fndecl, tree arg)
7925 {
7926 if (! validate_arg (arg, INTEGER_TYPE))
7927 return NULL_TREE;
7928
7929 /* Optimize constant value. */
7930 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7931 {
7932 HOST_WIDE_INT hi, width, r_hi = 0;
7933 unsigned HOST_WIDE_INT lo, r_lo = 0;
7934 tree type;
7935
7936 type = TREE_TYPE (arg);
7937 width = TYPE_PRECISION (type);
7938 lo = TREE_INT_CST_LOW (arg);
7939 hi = TREE_INT_CST_HIGH (arg);
7940
7941 switch (DECL_FUNCTION_CODE (fndecl))
7942 {
7943 case BUILT_IN_BSWAP32:
7944 case BUILT_IN_BSWAP64:
7945 {
7946 int s;
7947
7948 for (s = 0; s < width; s += 8)
7949 {
7950 int d = width - s - 8;
7951 unsigned HOST_WIDE_INT byte;
7952
7953 if (s < HOST_BITS_PER_WIDE_INT)
7954 byte = (lo >> s) & 0xff;
7955 else
7956 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7957
7958 if (d < HOST_BITS_PER_WIDE_INT)
7959 r_lo |= byte << d;
7960 else
7961 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7962 }
7963 }
7964
7965 break;
7966
7967 default:
7968 gcc_unreachable ();
7969 }
7970
7971 if (width < HOST_BITS_PER_WIDE_INT)
7972 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7973 else
7974 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7975 }
7976
7977 return NULL_TREE;
7978 }
7979
7980 /* Return true if EXPR is the real constant contained in VALUE. */
7981
7982 static bool
7983 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7984 {
7985 STRIP_NOPS (expr);
7986
7987 return ((TREE_CODE (expr) == REAL_CST
7988 && !TREE_OVERFLOW (expr)
7989 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7990 || (TREE_CODE (expr) == COMPLEX_CST
7991 && real_dconstp (TREE_REALPART (expr), value)
7992 && real_zerop (TREE_IMAGPART (expr))));
7993 }
7994
7995 /* A subroutine of fold_builtin to fold the various logarithmic
7996 functions. Return NULL_TREE if no simplification can me made.
7997 FUNC is the corresponding MPFR logarithm function. */
7998
7999 static tree
8000 fold_builtin_logarithm (tree fndecl, tree arg,
8001 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8002 {
8003 if (validate_arg (arg, REAL_TYPE))
8004 {
8005 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8006 tree res;
8007 const enum built_in_function fcode = builtin_mathfn_code (arg);
8008
8009 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8010 instead we'll look for 'e' truncated to MODE. So only do
8011 this if flag_unsafe_math_optimizations is set. */
8012 if (flag_unsafe_math_optimizations && func == mpfr_log)
8013 {
8014 const REAL_VALUE_TYPE e_truncated =
8015 real_value_truncate (TYPE_MODE (type), dconste);
8016 if (real_dconstp (arg, &e_truncated))
8017 return build_real (type, dconst1);
8018 }
8019
8020 /* Calculate the result when the argument is a constant. */
8021 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8022 return res;
8023
8024 /* Special case, optimize logN(expN(x)) = x. */
8025 if (flag_unsafe_math_optimizations
8026 && ((func == mpfr_log
8027 && (fcode == BUILT_IN_EXP
8028 || fcode == BUILT_IN_EXPF
8029 || fcode == BUILT_IN_EXPL))
8030 || (func == mpfr_log2
8031 && (fcode == BUILT_IN_EXP2
8032 || fcode == BUILT_IN_EXP2F
8033 || fcode == BUILT_IN_EXP2L))
8034 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8035 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8036
8037 /* Optimize logN(func()) for various exponential functions. We
8038 want to determine the value "x" and the power "exponent" in
8039 order to transform logN(x**exponent) into exponent*logN(x). */
8040 if (flag_unsafe_math_optimizations)
8041 {
8042 tree exponent = 0, x = 0;
8043
8044 switch (fcode)
8045 {
8046 CASE_FLT_FN (BUILT_IN_EXP):
8047 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8048 x = build_real (type,
8049 real_value_truncate (TYPE_MODE (type), dconste));
8050 exponent = CALL_EXPR_ARG (arg, 0);
8051 break;
8052 CASE_FLT_FN (BUILT_IN_EXP2):
8053 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8054 x = build_real (type, dconst2);
8055 exponent = CALL_EXPR_ARG (arg, 0);
8056 break;
8057 CASE_FLT_FN (BUILT_IN_EXP10):
8058 CASE_FLT_FN (BUILT_IN_POW10):
8059 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8060 x = build_real (type, dconst10);
8061 exponent = CALL_EXPR_ARG (arg, 0);
8062 break;
8063 CASE_FLT_FN (BUILT_IN_SQRT):
8064 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8065 x = CALL_EXPR_ARG (arg, 0);
8066 exponent = build_real (type, dconsthalf);
8067 break;
8068 CASE_FLT_FN (BUILT_IN_CBRT):
8069 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8070 x = CALL_EXPR_ARG (arg, 0);
8071 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8072 dconstthird));
8073 break;
8074 CASE_FLT_FN (BUILT_IN_POW):
8075 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8076 x = CALL_EXPR_ARG (arg, 0);
8077 exponent = CALL_EXPR_ARG (arg, 1);
8078 break;
8079 default:
8080 break;
8081 }
8082
8083 /* Now perform the optimization. */
8084 if (x && exponent)
8085 {
8086 tree logfn = build_call_expr (fndecl, 1, x);
8087 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8088 }
8089 }
8090 }
8091
8092 return NULL_TREE;
8093 }
8094
8095 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8096 NULL_TREE if no simplification can be made. */
8097
8098 static tree
8099 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8100 {
8101 tree res, narg0, narg1;
8102
8103 if (!validate_arg (arg0, REAL_TYPE)
8104 || !validate_arg (arg1, REAL_TYPE))
8105 return NULL_TREE;
8106
8107 /* Calculate the result when the argument is a constant. */
8108 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8109 return res;
8110
8111 /* If either argument to hypot has a negate or abs, strip that off.
8112 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8113 narg0 = fold_strip_sign_ops (arg0);
8114 narg1 = fold_strip_sign_ops (arg1);
8115 if (narg0 || narg1)
8116 {
8117 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8118 narg1 ? narg1 : arg1);
8119 }
8120
8121 /* If either argument is zero, hypot is fabs of the other. */
8122 if (real_zerop (arg0))
8123 return fold_build1 (ABS_EXPR, type, arg1);
8124 else if (real_zerop (arg1))
8125 return fold_build1 (ABS_EXPR, type, arg0);
8126
8127 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8128 if (flag_unsafe_math_optimizations
8129 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8130 {
8131 const REAL_VALUE_TYPE sqrt2_trunc
8132 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8133 return fold_build2 (MULT_EXPR, type,
8134 fold_build1 (ABS_EXPR, type, arg0),
8135 build_real (type, sqrt2_trunc));
8136 }
8137
8138 return NULL_TREE;
8139 }
8140
8141
8142 /* Fold a builtin function call to pow, powf, or powl. Return
8143 NULL_TREE if no simplification can be made. */
8144 static tree
8145 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8146 {
8147 tree res;
8148
8149 if (!validate_arg (arg0, REAL_TYPE)
8150 || !validate_arg (arg1, REAL_TYPE))
8151 return NULL_TREE;
8152
8153 /* Calculate the result when the argument is a constant. */
8154 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8155 return res;
8156
8157 /* Optimize pow(1.0,y) = 1.0. */
8158 if (real_onep (arg0))
8159 return omit_one_operand (type, build_real (type, dconst1), arg1);
8160
8161 if (TREE_CODE (arg1) == REAL_CST
8162 && !TREE_OVERFLOW (arg1))
8163 {
8164 REAL_VALUE_TYPE cint;
8165 REAL_VALUE_TYPE c;
8166 HOST_WIDE_INT n;
8167
8168 c = TREE_REAL_CST (arg1);
8169
8170 /* Optimize pow(x,0.0) = 1.0. */
8171 if (REAL_VALUES_EQUAL (c, dconst0))
8172 return omit_one_operand (type, build_real (type, dconst1),
8173 arg0);
8174
8175 /* Optimize pow(x,1.0) = x. */
8176 if (REAL_VALUES_EQUAL (c, dconst1))
8177 return arg0;
8178
8179 /* Optimize pow(x,-1.0) = 1.0/x. */
8180 if (REAL_VALUES_EQUAL (c, dconstm1))
8181 return fold_build2 (RDIV_EXPR, type,
8182 build_real (type, dconst1), arg0);
8183
8184 /* Optimize pow(x,0.5) = sqrt(x). */
8185 if (flag_unsafe_math_optimizations
8186 && REAL_VALUES_EQUAL (c, dconsthalf))
8187 {
8188 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8189
8190 if (sqrtfn != NULL_TREE)
8191 return build_call_expr (sqrtfn, 1, arg0);
8192 }
8193
8194 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8195 if (flag_unsafe_math_optimizations)
8196 {
8197 const REAL_VALUE_TYPE dconstroot
8198 = real_value_truncate (TYPE_MODE (type), dconstthird);
8199
8200 if (REAL_VALUES_EQUAL (c, dconstroot))
8201 {
8202 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8203 if (cbrtfn != NULL_TREE)
8204 return build_call_expr (cbrtfn, 1, arg0);
8205 }
8206 }
8207
8208 /* Check for an integer exponent. */
8209 n = real_to_integer (&c);
8210 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8211 if (real_identical (&c, &cint))
8212 {
8213 /* Attempt to evaluate pow at compile-time. */
8214 if (TREE_CODE (arg0) == REAL_CST
8215 && !TREE_OVERFLOW (arg0))
8216 {
8217 REAL_VALUE_TYPE x;
8218 bool inexact;
8219
8220 x = TREE_REAL_CST (arg0);
8221 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8222 if (flag_unsafe_math_optimizations || !inexact)
8223 return build_real (type, x);
8224 }
8225
8226 /* Strip sign ops from even integer powers. */
8227 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8228 {
8229 tree narg0 = fold_strip_sign_ops (arg0);
8230 if (narg0)
8231 return build_call_expr (fndecl, 2, narg0, arg1);
8232 }
8233 }
8234 }
8235
8236 if (flag_unsafe_math_optimizations)
8237 {
8238 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8239
8240 /* Optimize pow(expN(x),y) = expN(x*y). */
8241 if (BUILTIN_EXPONENT_P (fcode))
8242 {
8243 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8244 tree arg = CALL_EXPR_ARG (arg0, 0);
8245 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8246 return build_call_expr (expfn, 1, arg);
8247 }
8248
8249 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8250 if (BUILTIN_SQRT_P (fcode))
8251 {
8252 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8253 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8254 build_real (type, dconsthalf));
8255 return build_call_expr (fndecl, 2, narg0, narg1);
8256 }
8257
8258 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8259 if (BUILTIN_CBRT_P (fcode))
8260 {
8261 tree arg = CALL_EXPR_ARG (arg0, 0);
8262 if (tree_expr_nonnegative_p (arg))
8263 {
8264 const REAL_VALUE_TYPE dconstroot
8265 = real_value_truncate (TYPE_MODE (type), dconstthird);
8266 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8267 build_real (type, dconstroot));
8268 return build_call_expr (fndecl, 2, arg, narg1);
8269 }
8270 }
8271
8272 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8273 if (fcode == BUILT_IN_POW
8274 || fcode == BUILT_IN_POWF
8275 || fcode == BUILT_IN_POWL)
8276 {
8277 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8278 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8279 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8280 return build_call_expr (fndecl, 2, arg00, narg1);
8281 }
8282 }
8283
8284 return NULL_TREE;
8285 }
8286
8287 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8288 Return NULL_TREE if no simplification can be made. */
8289 static tree
8290 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8291 tree arg0, tree arg1, tree type)
8292 {
8293 if (!validate_arg (arg0, REAL_TYPE)
8294 || !validate_arg (arg1, INTEGER_TYPE))
8295 return NULL_TREE;
8296
8297 /* Optimize pow(1.0,y) = 1.0. */
8298 if (real_onep (arg0))
8299 return omit_one_operand (type, build_real (type, dconst1), arg1);
8300
8301 if (host_integerp (arg1, 0))
8302 {
8303 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8304
8305 /* Evaluate powi at compile-time. */
8306 if (TREE_CODE (arg0) == REAL_CST
8307 && !TREE_OVERFLOW (arg0))
8308 {
8309 REAL_VALUE_TYPE x;
8310 x = TREE_REAL_CST (arg0);
8311 real_powi (&x, TYPE_MODE (type), &x, c);
8312 return build_real (type, x);
8313 }
8314
8315 /* Optimize pow(x,0) = 1.0. */
8316 if (c == 0)
8317 return omit_one_operand (type, build_real (type, dconst1),
8318 arg0);
8319
8320 /* Optimize pow(x,1) = x. */
8321 if (c == 1)
8322 return arg0;
8323
8324 /* Optimize pow(x,-1) = 1.0/x. */
8325 if (c == -1)
8326 return fold_build2 (RDIV_EXPR, type,
8327 build_real (type, dconst1), arg0);
8328 }
8329
8330 return NULL_TREE;
8331 }
8332
8333 /* A subroutine of fold_builtin to fold the various exponent
8334 functions. Return NULL_TREE if no simplification can be made.
8335 FUNC is the corresponding MPFR exponent function. */
8336
8337 static tree
8338 fold_builtin_exponent (tree fndecl, tree arg,
8339 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8340 {
8341 if (validate_arg (arg, REAL_TYPE))
8342 {
8343 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8344 tree res;
8345
8346 /* Calculate the result when the argument is a constant. */
8347 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8348 return res;
8349
8350 /* Optimize expN(logN(x)) = x. */
8351 if (flag_unsafe_math_optimizations)
8352 {
8353 const enum built_in_function fcode = builtin_mathfn_code (arg);
8354
8355 if ((func == mpfr_exp
8356 && (fcode == BUILT_IN_LOG
8357 || fcode == BUILT_IN_LOGF
8358 || fcode == BUILT_IN_LOGL))
8359 || (func == mpfr_exp2
8360 && (fcode == BUILT_IN_LOG2
8361 || fcode == BUILT_IN_LOG2F
8362 || fcode == BUILT_IN_LOG2L))
8363 || (func == mpfr_exp10
8364 && (fcode == BUILT_IN_LOG10
8365 || fcode == BUILT_IN_LOG10F
8366 || fcode == BUILT_IN_LOG10L)))
8367 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8368 }
8369 }
8370
8371 return NULL_TREE;
8372 }
8373
8374 /* Return true if VAR is a VAR_DECL or a component thereof. */
8375
8376 static bool
8377 var_decl_component_p (tree var)
8378 {
8379 tree inner = var;
8380 while (handled_component_p (inner))
8381 inner = TREE_OPERAND (inner, 0);
8382 return SSA_VAR_P (inner);
8383 }
8384
8385 /* Fold function call to builtin memset. Return
8386 NULL_TREE if no simplification can be made. */
8387
8388 static tree
8389 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8390 {
8391 tree var, ret;
8392 unsigned HOST_WIDE_INT length, cval;
8393
8394 if (! validate_arg (dest, POINTER_TYPE)
8395 || ! validate_arg (c, INTEGER_TYPE)
8396 || ! validate_arg (len, INTEGER_TYPE))
8397 return NULL_TREE;
8398
8399 if (! host_integerp (len, 1))
8400 return NULL_TREE;
8401
8402 /* If the LEN parameter is zero, return DEST. */
8403 if (integer_zerop (len))
8404 return omit_one_operand (type, dest, c);
8405
8406 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8407 return NULL_TREE;
8408
8409 var = dest;
8410 STRIP_NOPS (var);
8411 if (TREE_CODE (var) != ADDR_EXPR)
8412 return NULL_TREE;
8413
8414 var = TREE_OPERAND (var, 0);
8415 if (TREE_THIS_VOLATILE (var))
8416 return NULL_TREE;
8417
8418 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8419 && !POINTER_TYPE_P (TREE_TYPE (var)))
8420 return NULL_TREE;
8421
8422 if (! var_decl_component_p (var))
8423 return NULL_TREE;
8424
8425 length = tree_low_cst (len, 1);
8426 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8427 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8428 < (int) length)
8429 return NULL_TREE;
8430
8431 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8432 return NULL_TREE;
8433
8434 if (integer_zerop (c))
8435 cval = 0;
8436 else
8437 {
8438 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8439 return NULL_TREE;
8440
8441 cval = tree_low_cst (c, 1);
8442 cval &= 0xff;
8443 cval |= cval << 8;
8444 cval |= cval << 16;
8445 cval |= (cval << 31) << 1;
8446 }
8447
8448 ret = build_int_cst_type (TREE_TYPE (var), cval);
8449 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8450 if (ignore)
8451 return ret;
8452
8453 return omit_one_operand (type, dest, ret);
8454 }
8455
8456 /* Fold function call to builtin memset. Return
8457 NULL_TREE if no simplification can be made. */
8458
8459 static tree
8460 fold_builtin_bzero (tree dest, tree size, bool ignore)
8461 {
8462 if (! validate_arg (dest, POINTER_TYPE)
8463 || ! validate_arg (size, INTEGER_TYPE))
8464 return NULL_TREE;
8465
8466 if (!ignore)
8467 return NULL_TREE;
8468
8469 /* New argument list transforming bzero(ptr x, int y) to
8470 memset(ptr x, int 0, size_t y). This is done this way
8471 so that if it isn't expanded inline, we fallback to
8472 calling bzero instead of memset. */
8473
8474 return fold_builtin_memset (dest, integer_zero_node,
8475 fold_convert (sizetype, size),
8476 void_type_node, ignore);
8477 }
8478
8479 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8480 NULL_TREE if no simplification can be made.
8481 If ENDP is 0, return DEST (like memcpy).
8482 If ENDP is 1, return DEST+LEN (like mempcpy).
8483 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8484 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8485 (memmove). */
8486
8487 static tree
8488 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8489 {
8490 tree destvar, srcvar, expr;
8491
8492 if (! validate_arg (dest, POINTER_TYPE)
8493 || ! validate_arg (src, POINTER_TYPE)
8494 || ! validate_arg (len, INTEGER_TYPE))
8495 return NULL_TREE;
8496
8497 /* If the LEN parameter is zero, return DEST. */
8498 if (integer_zerop (len))
8499 return omit_one_operand (type, dest, src);
8500
8501 /* If SRC and DEST are the same (and not volatile), return
8502 DEST{,+LEN,+LEN-1}. */
8503 if (operand_equal_p (src, dest, 0))
8504 expr = len;
8505 else
8506 {
8507 tree srctype, desttype;
8508 if (endp == 3)
8509 {
8510 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8511 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8512
8513 /* Both DEST and SRC must be pointer types.
8514 ??? This is what old code did. Is the testing for pointer types
8515 really mandatory?
8516
8517 If either SRC is readonly or length is 1, we can use memcpy. */
8518 if (dest_align && src_align
8519 && (readonly_data_expr (src)
8520 || (host_integerp (len, 1)
8521 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8522 tree_low_cst (len, 1)))))
8523 {
8524 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8525 if (!fn)
8526 return NULL_TREE;
8527 return build_call_expr (fn, 3, dest, src, len);
8528 }
8529 return NULL_TREE;
8530 }
8531
8532 if (!host_integerp (len, 0))
8533 return NULL_TREE;
8534 /* FIXME:
8535 This logic lose for arguments like (type *)malloc (sizeof (type)),
8536 since we strip the casts of up to VOID return value from malloc.
8537 Perhaps we ought to inherit type from non-VOID argument here? */
8538 STRIP_NOPS (src);
8539 STRIP_NOPS (dest);
8540 srctype = TREE_TYPE (TREE_TYPE (src));
8541 desttype = TREE_TYPE (TREE_TYPE (dest));
8542 if (!srctype || !desttype
8543 || !TYPE_SIZE_UNIT (srctype)
8544 || !TYPE_SIZE_UNIT (desttype)
8545 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8546 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8547 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8548 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8549 return NULL_TREE;
8550
8551 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8552 < (int) TYPE_ALIGN (desttype)
8553 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8554 < (int) TYPE_ALIGN (srctype)))
8555 return NULL_TREE;
8556
8557 if (!ignore)
8558 dest = builtin_save_expr (dest);
8559
8560 srcvar = build_fold_indirect_ref (src);
8561 if (TREE_THIS_VOLATILE (srcvar))
8562 return NULL_TREE;
8563 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8564 return NULL_TREE;
8565 /* With memcpy, it is possible to bypass aliasing rules, so without
8566 this check i. e. execute/20060930-2.c would be misoptimized, because
8567 it use conflicting alias set to hold argument for the memcpy call.
8568 This check is probably unnecesary with -fno-strict-aliasing.
8569 Similarly for destvar. See also PR29286. */
8570 if (!var_decl_component_p (srcvar)
8571 /* Accept: memcpy (*char_var, "test", 1); that simplify
8572 to char_var='t'; */
8573 || is_gimple_min_invariant (srcvar)
8574 || readonly_data_expr (src))
8575 return NULL_TREE;
8576
8577 destvar = build_fold_indirect_ref (dest);
8578 if (TREE_THIS_VOLATILE (destvar))
8579 return NULL_TREE;
8580 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8581 return NULL_TREE;
8582 if (!var_decl_component_p (destvar))
8583 return NULL_TREE;
8584
8585 if (srctype == desttype
8586 || (gimple_in_ssa_p (cfun)
8587 && useless_type_conversion_p (desttype, srctype)))
8588 expr = srcvar;
8589 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8590 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8591 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8592 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8593 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8594 else
8595 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8596 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8597 }
8598
8599 if (ignore)
8600 return expr;
8601
8602 if (endp == 0 || endp == 3)
8603 return omit_one_operand (type, dest, expr);
8604
8605 if (expr == len)
8606 expr = NULL_TREE;
8607
8608 if (endp == 2)
8609 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8610 ssize_int (1));
8611
8612 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8613 dest = fold_convert (type, dest);
8614 if (expr)
8615 dest = omit_one_operand (type, dest, expr);
8616 return dest;
8617 }
8618
8619 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8620 If LEN is not NULL, it represents the length of the string to be
8621 copied. Return NULL_TREE if no simplification can be made. */
8622
8623 tree
8624 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8625 {
8626 tree fn;
8627
8628 if (!validate_arg (dest, POINTER_TYPE)
8629 || !validate_arg (src, POINTER_TYPE))
8630 return NULL_TREE;
8631
8632 /* If SRC and DEST are the same (and not volatile), return DEST. */
8633 if (operand_equal_p (src, dest, 0))
8634 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8635
8636 if (optimize_size)
8637 return NULL_TREE;
8638
8639 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8640 if (!fn)
8641 return NULL_TREE;
8642
8643 if (!len)
8644 {
8645 len = c_strlen (src, 1);
8646 if (! len || TREE_SIDE_EFFECTS (len))
8647 return NULL_TREE;
8648 }
8649
8650 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8651 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8652 build_call_expr (fn, 3, dest, src, len));
8653 }
8654
8655 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8656 If SLEN is not NULL, it represents the length of the source string.
8657 Return NULL_TREE if no simplification can be made. */
8658
8659 tree
8660 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8661 {
8662 tree fn;
8663
8664 if (!validate_arg (dest, POINTER_TYPE)
8665 || !validate_arg (src, POINTER_TYPE)
8666 || !validate_arg (len, INTEGER_TYPE))
8667 return NULL_TREE;
8668
8669 /* If the LEN parameter is zero, return DEST. */
8670 if (integer_zerop (len))
8671 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8672
8673 /* We can't compare slen with len as constants below if len is not a
8674 constant. */
8675 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8676 return NULL_TREE;
8677
8678 if (!slen)
8679 slen = c_strlen (src, 1);
8680
8681 /* Now, we must be passed a constant src ptr parameter. */
8682 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8683 return NULL_TREE;
8684
8685 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8686
8687 /* We do not support simplification of this case, though we do
8688 support it when expanding trees into RTL. */
8689 /* FIXME: generate a call to __builtin_memset. */
8690 if (tree_int_cst_lt (slen, len))
8691 return NULL_TREE;
8692
8693 /* OK transform into builtin memcpy. */
8694 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8695 if (!fn)
8696 return NULL_TREE;
8697 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8698 build_call_expr (fn, 3, dest, src, len));
8699 }
8700
8701 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8702 arguments to the call, and TYPE is its return type.
8703 Return NULL_TREE if no simplification can be made. */
8704
8705 static tree
8706 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8707 {
8708 if (!validate_arg (arg1, POINTER_TYPE)
8709 || !validate_arg (arg2, INTEGER_TYPE)
8710 || !validate_arg (len, INTEGER_TYPE))
8711 return NULL_TREE;
8712 else
8713 {
8714 const char *p1;
8715
8716 if (TREE_CODE (arg2) != INTEGER_CST
8717 || !host_integerp (len, 1))
8718 return NULL_TREE;
8719
8720 p1 = c_getstr (arg1);
8721 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8722 {
8723 char c;
8724 const char *r;
8725 tree tem;
8726
8727 if (target_char_cast (arg2, &c))
8728 return NULL_TREE;
8729
8730 r = memchr (p1, c, tree_low_cst (len, 1));
8731
8732 if (r == NULL)
8733 return build_int_cst (TREE_TYPE (arg1), 0);
8734
8735 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8736 size_int (r - p1));
8737 return fold_convert (type, tem);
8738 }
8739 return NULL_TREE;
8740 }
8741 }
8742
8743 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8744 Return NULL_TREE if no simplification can be made. */
8745
8746 static tree
8747 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8748 {
8749 const char *p1, *p2;
8750
8751 if (!validate_arg (arg1, POINTER_TYPE)
8752 || !validate_arg (arg2, POINTER_TYPE)
8753 || !validate_arg (len, INTEGER_TYPE))
8754 return NULL_TREE;
8755
8756 /* If the LEN parameter is zero, return zero. */
8757 if (integer_zerop (len))
8758 return omit_two_operands (integer_type_node, integer_zero_node,
8759 arg1, arg2);
8760
8761 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8762 if (operand_equal_p (arg1, arg2, 0))
8763 return omit_one_operand (integer_type_node, integer_zero_node, len);
8764
8765 p1 = c_getstr (arg1);
8766 p2 = c_getstr (arg2);
8767
8768 /* If all arguments are constant, and the value of len is not greater
8769 than the lengths of arg1 and arg2, evaluate at compile-time. */
8770 if (host_integerp (len, 1) && p1 && p2
8771 && compare_tree_int (len, strlen (p1) + 1) <= 0
8772 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8773 {
8774 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8775
8776 if (r > 0)
8777 return integer_one_node;
8778 else if (r < 0)
8779 return integer_minus_one_node;
8780 else
8781 return integer_zero_node;
8782 }
8783
8784 /* If len parameter is one, return an expression corresponding to
8785 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8786 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8787 {
8788 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8789 tree cst_uchar_ptr_node
8790 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8791
8792 tree ind1 = fold_convert (integer_type_node,
8793 build1 (INDIRECT_REF, cst_uchar_node,
8794 fold_convert (cst_uchar_ptr_node,
8795 arg1)));
8796 tree ind2 = fold_convert (integer_type_node,
8797 build1 (INDIRECT_REF, cst_uchar_node,
8798 fold_convert (cst_uchar_ptr_node,
8799 arg2)));
8800 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8801 }
8802
8803 return NULL_TREE;
8804 }
8805
8806 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8807 Return NULL_TREE if no simplification can be made. */
8808
8809 static tree
8810 fold_builtin_strcmp (tree arg1, tree arg2)
8811 {
8812 const char *p1, *p2;
8813
8814 if (!validate_arg (arg1, POINTER_TYPE)
8815 || !validate_arg (arg2, POINTER_TYPE))
8816 return NULL_TREE;
8817
8818 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8819 if (operand_equal_p (arg1, arg2, 0))
8820 return integer_zero_node;
8821
8822 p1 = c_getstr (arg1);
8823 p2 = c_getstr (arg2);
8824
8825 if (p1 && p2)
8826 {
8827 const int i = strcmp (p1, p2);
8828 if (i < 0)
8829 return integer_minus_one_node;
8830 else if (i > 0)
8831 return integer_one_node;
8832 else
8833 return integer_zero_node;
8834 }
8835
8836 /* If the second arg is "", return *(const unsigned char*)arg1. */
8837 if (p2 && *p2 == '\0')
8838 {
8839 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8840 tree cst_uchar_ptr_node
8841 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8842
8843 return fold_convert (integer_type_node,
8844 build1 (INDIRECT_REF, cst_uchar_node,
8845 fold_convert (cst_uchar_ptr_node,
8846 arg1)));
8847 }
8848
8849 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8850 if (p1 && *p1 == '\0')
8851 {
8852 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8853 tree cst_uchar_ptr_node
8854 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8855
8856 tree temp = fold_convert (integer_type_node,
8857 build1 (INDIRECT_REF, cst_uchar_node,
8858 fold_convert (cst_uchar_ptr_node,
8859 arg2)));
8860 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8861 }
8862
8863 return NULL_TREE;
8864 }
8865
8866 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8867 Return NULL_TREE if no simplification can be made. */
8868
8869 static tree
8870 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8871 {
8872 const char *p1, *p2;
8873
8874 if (!validate_arg (arg1, POINTER_TYPE)
8875 || !validate_arg (arg2, POINTER_TYPE)
8876 || !validate_arg (len, INTEGER_TYPE))
8877 return NULL_TREE;
8878
8879 /* If the LEN parameter is zero, return zero. */
8880 if (integer_zerop (len))
8881 return omit_two_operands (integer_type_node, integer_zero_node,
8882 arg1, arg2);
8883
8884 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8885 if (operand_equal_p (arg1, arg2, 0))
8886 return omit_one_operand (integer_type_node, integer_zero_node, len);
8887
8888 p1 = c_getstr (arg1);
8889 p2 = c_getstr (arg2);
8890
8891 if (host_integerp (len, 1) && p1 && p2)
8892 {
8893 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8894 if (i > 0)
8895 return integer_one_node;
8896 else if (i < 0)
8897 return integer_minus_one_node;
8898 else
8899 return integer_zero_node;
8900 }
8901
8902 /* If the second arg is "", and the length is greater than zero,
8903 return *(const unsigned char*)arg1. */
8904 if (p2 && *p2 == '\0'
8905 && TREE_CODE (len) == INTEGER_CST
8906 && tree_int_cst_sgn (len) == 1)
8907 {
8908 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8909 tree cst_uchar_ptr_node
8910 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8911
8912 return fold_convert (integer_type_node,
8913 build1 (INDIRECT_REF, cst_uchar_node,
8914 fold_convert (cst_uchar_ptr_node,
8915 arg1)));
8916 }
8917
8918 /* If the first arg is "", and the length is greater than zero,
8919 return -*(const unsigned char*)arg2. */
8920 if (p1 && *p1 == '\0'
8921 && TREE_CODE (len) == INTEGER_CST
8922 && tree_int_cst_sgn (len) == 1)
8923 {
8924 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8925 tree cst_uchar_ptr_node
8926 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8927
8928 tree temp = fold_convert (integer_type_node,
8929 build1 (INDIRECT_REF, cst_uchar_node,
8930 fold_convert (cst_uchar_ptr_node,
8931 arg2)));
8932 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8933 }
8934
8935 /* If len parameter is one, return an expression corresponding to
8936 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8937 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8938 {
8939 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8940 tree cst_uchar_ptr_node
8941 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8942
8943 tree ind1 = fold_convert (integer_type_node,
8944 build1 (INDIRECT_REF, cst_uchar_node,
8945 fold_convert (cst_uchar_ptr_node,
8946 arg1)));
8947 tree ind2 = fold_convert (integer_type_node,
8948 build1 (INDIRECT_REF, cst_uchar_node,
8949 fold_convert (cst_uchar_ptr_node,
8950 arg2)));
8951 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8952 }
8953
8954 return NULL_TREE;
8955 }
8956
8957 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8958 ARG. Return NULL_TREE if no simplification can be made. */
8959
8960 static tree
8961 fold_builtin_signbit (tree arg, tree type)
8962 {
8963 tree temp;
8964
8965 if (!validate_arg (arg, REAL_TYPE))
8966 return NULL_TREE;
8967
8968 /* If ARG is a compile-time constant, determine the result. */
8969 if (TREE_CODE (arg) == REAL_CST
8970 && !TREE_OVERFLOW (arg))
8971 {
8972 REAL_VALUE_TYPE c;
8973
8974 c = TREE_REAL_CST (arg);
8975 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8976 return fold_convert (type, temp);
8977 }
8978
8979 /* If ARG is non-negative, the result is always zero. */
8980 if (tree_expr_nonnegative_p (arg))
8981 return omit_one_operand (type, integer_zero_node, arg);
8982
8983 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8984 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8985 return fold_build2 (LT_EXPR, type, arg,
8986 build_real (TREE_TYPE (arg), dconst0));
8987
8988 return NULL_TREE;
8989 }
8990
8991 /* Fold function call to builtin copysign, copysignf or copysignl with
8992 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8993 be made. */
8994
8995 static tree
8996 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8997 {
8998 tree tem;
8999
9000 if (!validate_arg (arg1, REAL_TYPE)
9001 || !validate_arg (arg2, REAL_TYPE))
9002 return NULL_TREE;
9003
9004 /* copysign(X,X) is X. */
9005 if (operand_equal_p (arg1, arg2, 0))
9006 return fold_convert (type, arg1);
9007
9008 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9009 if (TREE_CODE (arg1) == REAL_CST
9010 && TREE_CODE (arg2) == REAL_CST
9011 && !TREE_OVERFLOW (arg1)
9012 && !TREE_OVERFLOW (arg2))
9013 {
9014 REAL_VALUE_TYPE c1, c2;
9015
9016 c1 = TREE_REAL_CST (arg1);
9017 c2 = TREE_REAL_CST (arg2);
9018 /* c1.sign := c2.sign. */
9019 real_copysign (&c1, &c2);
9020 return build_real (type, c1);
9021 }
9022
9023 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9024 Remember to evaluate Y for side-effects. */
9025 if (tree_expr_nonnegative_p (arg2))
9026 return omit_one_operand (type,
9027 fold_build1 (ABS_EXPR, type, arg1),
9028 arg2);
9029
9030 /* Strip sign changing operations for the first argument. */
9031 tem = fold_strip_sign_ops (arg1);
9032 if (tem)
9033 return build_call_expr (fndecl, 2, tem, arg2);
9034
9035 return NULL_TREE;
9036 }
9037
9038 /* Fold a call to builtin isascii with argument ARG. */
9039
9040 static tree
9041 fold_builtin_isascii (tree arg)
9042 {
9043 if (!validate_arg (arg, INTEGER_TYPE))
9044 return NULL_TREE;
9045 else
9046 {
9047 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9048 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9049 build_int_cst (NULL_TREE,
9050 ~ (unsigned HOST_WIDE_INT) 0x7f));
9051 return fold_build2 (EQ_EXPR, integer_type_node,
9052 arg, integer_zero_node);
9053 }
9054 }
9055
9056 /* Fold a call to builtin toascii with argument ARG. */
9057
9058 static tree
9059 fold_builtin_toascii (tree arg)
9060 {
9061 if (!validate_arg (arg, INTEGER_TYPE))
9062 return NULL_TREE;
9063
9064 /* Transform toascii(c) -> (c & 0x7f). */
9065 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9066 build_int_cst (NULL_TREE, 0x7f));
9067 }
9068
9069 /* Fold a call to builtin isdigit with argument ARG. */
9070
9071 static tree
9072 fold_builtin_isdigit (tree arg)
9073 {
9074 if (!validate_arg (arg, INTEGER_TYPE))
9075 return NULL_TREE;
9076 else
9077 {
9078 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9079 /* According to the C standard, isdigit is unaffected by locale.
9080 However, it definitely is affected by the target character set. */
9081 unsigned HOST_WIDE_INT target_digit0
9082 = lang_hooks.to_target_charset ('0');
9083
9084 if (target_digit0 == 0)
9085 return NULL_TREE;
9086
9087 arg = fold_convert (unsigned_type_node, arg);
9088 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9089 build_int_cst (unsigned_type_node, target_digit0));
9090 return fold_build2 (LE_EXPR, integer_type_node, arg,
9091 build_int_cst (unsigned_type_node, 9));
9092 }
9093 }
9094
9095 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9096
9097 static tree
9098 fold_builtin_fabs (tree arg, tree type)
9099 {
9100 if (!validate_arg (arg, REAL_TYPE))
9101 return NULL_TREE;
9102
9103 arg = fold_convert (type, arg);
9104 if (TREE_CODE (arg) == REAL_CST)
9105 return fold_abs_const (arg, type);
9106 return fold_build1 (ABS_EXPR, type, arg);
9107 }
9108
9109 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9110
9111 static tree
9112 fold_builtin_abs (tree arg, tree type)
9113 {
9114 if (!validate_arg (arg, INTEGER_TYPE))
9115 return NULL_TREE;
9116
9117 arg = fold_convert (type, arg);
9118 if (TREE_CODE (arg) == INTEGER_CST)
9119 return fold_abs_const (arg, type);
9120 return fold_build1 (ABS_EXPR, type, arg);
9121 }
9122
9123 /* Fold a call to builtin fmin or fmax. */
9124
9125 static tree
9126 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9127 {
9128 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9129 {
9130 /* Calculate the result when the argument is a constant. */
9131 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9132
9133 if (res)
9134 return res;
9135
9136 /* If either argument is NaN, return the other one. Avoid the
9137 transformation if we get (and honor) a signalling NaN. Using
9138 omit_one_operand() ensures we create a non-lvalue. */
9139 if (TREE_CODE (arg0) == REAL_CST
9140 && real_isnan (&TREE_REAL_CST (arg0))
9141 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9142 || ! TREE_REAL_CST (arg0).signalling))
9143 return omit_one_operand (type, arg1, arg0);
9144 if (TREE_CODE (arg1) == REAL_CST
9145 && real_isnan (&TREE_REAL_CST (arg1))
9146 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9147 || ! TREE_REAL_CST (arg1).signalling))
9148 return omit_one_operand (type, arg0, arg1);
9149
9150 /* Transform fmin/fmax(x,x) -> x. */
9151 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9152 return omit_one_operand (type, arg0, arg1);
9153
9154 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9155 functions to return the numeric arg if the other one is NaN.
9156 These tree codes don't honor that, so only transform if
9157 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9158 handled, so we don't have to worry about it either. */
9159 if (flag_finite_math_only)
9160 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9161 fold_convert (type, arg0),
9162 fold_convert (type, arg1));
9163 }
9164 return NULL_TREE;
9165 }
9166
9167 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9168
9169 static tree
9170 fold_builtin_carg (tree arg, tree type)
9171 {
9172 if (validate_arg (arg, COMPLEX_TYPE))
9173 {
9174 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9175
9176 if (atan2_fn)
9177 {
9178 tree new_arg = builtin_save_expr (arg);
9179 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9180 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9181 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9182 }
9183 }
9184
9185 return NULL_TREE;
9186 }
9187
9188 /* Fold a call to builtin logb/ilogb. */
9189
9190 static tree
9191 fold_builtin_logb (tree arg, tree rettype)
9192 {
9193 if (! validate_arg (arg, REAL_TYPE))
9194 return NULL_TREE;
9195
9196 STRIP_NOPS (arg);
9197
9198 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9199 {
9200 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9201
9202 switch (value->cl)
9203 {
9204 case rvc_nan:
9205 case rvc_inf:
9206 /* If arg is Inf or NaN and we're logb, return it. */
9207 if (TREE_CODE (rettype) == REAL_TYPE)
9208 return fold_convert (rettype, arg);
9209 /* Fall through... */
9210 case rvc_zero:
9211 /* Zero may set errno and/or raise an exception for logb, also
9212 for ilogb we don't know FP_ILOGB0. */
9213 return NULL_TREE;
9214 case rvc_normal:
9215 /* For normal numbers, proceed iff radix == 2. In GCC,
9216 normalized significands are in the range [0.5, 1.0). We
9217 want the exponent as if they were [1.0, 2.0) so get the
9218 exponent and subtract 1. */
9219 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9220 return fold_convert (rettype, build_int_cst (NULL_TREE,
9221 REAL_EXP (value)-1));
9222 break;
9223 }
9224 }
9225
9226 return NULL_TREE;
9227 }
9228
9229 /* Fold a call to builtin significand, if radix == 2. */
9230
9231 static tree
9232 fold_builtin_significand (tree arg, tree rettype)
9233 {
9234 if (! validate_arg (arg, REAL_TYPE))
9235 return NULL_TREE;
9236
9237 STRIP_NOPS (arg);
9238
9239 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9240 {
9241 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9242
9243 switch (value->cl)
9244 {
9245 case rvc_zero:
9246 case rvc_nan:
9247 case rvc_inf:
9248 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9249 return fold_convert (rettype, arg);
9250 case rvc_normal:
9251 /* For normal numbers, proceed iff radix == 2. */
9252 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9253 {
9254 REAL_VALUE_TYPE result = *value;
9255 /* In GCC, normalized significands are in the range [0.5,
9256 1.0). We want them to be [1.0, 2.0) so set the
9257 exponent to 1. */
9258 SET_REAL_EXP (&result, 1);
9259 return build_real (rettype, result);
9260 }
9261 break;
9262 }
9263 }
9264
9265 return NULL_TREE;
9266 }
9267
9268 /* Fold a call to builtin frexp, we can assume the base is 2. */
9269
9270 static tree
9271 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9272 {
9273 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9274 return NULL_TREE;
9275
9276 STRIP_NOPS (arg0);
9277
9278 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9279 return NULL_TREE;
9280
9281 arg1 = build_fold_indirect_ref (arg1);
9282
9283 /* Proceed if a valid pointer type was passed in. */
9284 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9285 {
9286 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9287 tree frac, exp;
9288
9289 switch (value->cl)
9290 {
9291 case rvc_zero:
9292 /* For +-0, return (*exp = 0, +-0). */
9293 exp = integer_zero_node;
9294 frac = arg0;
9295 break;
9296 case rvc_nan:
9297 case rvc_inf:
9298 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9299 return omit_one_operand (rettype, arg0, arg1);
9300 case rvc_normal:
9301 {
9302 /* Since the frexp function always expects base 2, and in
9303 GCC normalized significands are already in the range
9304 [0.5, 1.0), we have exactly what frexp wants. */
9305 REAL_VALUE_TYPE frac_rvt = *value;
9306 SET_REAL_EXP (&frac_rvt, 0);
9307 frac = build_real (rettype, frac_rvt);
9308 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9309 }
9310 break;
9311 default:
9312 gcc_unreachable ();
9313 }
9314
9315 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9316 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9317 TREE_SIDE_EFFECTS (arg1) = 1;
9318 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9319 }
9320
9321 return NULL_TREE;
9322 }
9323
9324 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9325 then we can assume the base is two. If it's false, then we have to
9326 check the mode of the TYPE parameter in certain cases. */
9327
9328 static tree
9329 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9330 {
9331 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9332 {
9333 STRIP_NOPS (arg0);
9334 STRIP_NOPS (arg1);
9335
9336 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9337 if (real_zerop (arg0) || integer_zerop (arg1)
9338 || (TREE_CODE (arg0) == REAL_CST
9339 && !real_isfinite (&TREE_REAL_CST (arg0))))
9340 return omit_one_operand (type, arg0, arg1);
9341
9342 /* If both arguments are constant, then try to evaluate it. */
9343 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9344 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9345 && host_integerp (arg1, 0))
9346 {
9347 /* Bound the maximum adjustment to twice the range of the
9348 mode's valid exponents. Use abs to ensure the range is
9349 positive as a sanity check. */
9350 const long max_exp_adj = 2 *
9351 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9352 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9353
9354 /* Get the user-requested adjustment. */
9355 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9356
9357 /* The requested adjustment must be inside this range. This
9358 is a preliminary cap to avoid things like overflow, we
9359 may still fail to compute the result for other reasons. */
9360 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9361 {
9362 REAL_VALUE_TYPE initial_result;
9363
9364 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9365
9366 /* Ensure we didn't overflow. */
9367 if (! real_isinf (&initial_result))
9368 {
9369 const REAL_VALUE_TYPE trunc_result
9370 = real_value_truncate (TYPE_MODE (type), initial_result);
9371
9372 /* Only proceed if the target mode can hold the
9373 resulting value. */
9374 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9375 return build_real (type, trunc_result);
9376 }
9377 }
9378 }
9379 }
9380
9381 return NULL_TREE;
9382 }
9383
9384 /* Fold a call to builtin modf. */
9385
9386 static tree
9387 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9388 {
9389 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9390 return NULL_TREE;
9391
9392 STRIP_NOPS (arg0);
9393
9394 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9395 return NULL_TREE;
9396
9397 arg1 = build_fold_indirect_ref (arg1);
9398
9399 /* Proceed if a valid pointer type was passed in. */
9400 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9401 {
9402 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9403 REAL_VALUE_TYPE trunc, frac;
9404
9405 switch (value->cl)
9406 {
9407 case rvc_nan:
9408 case rvc_zero:
9409 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9410 trunc = frac = *value;
9411 break;
9412 case rvc_inf:
9413 /* For +-Inf, return (*arg1 = arg0, +-0). */
9414 frac = dconst0;
9415 frac.sign = value->sign;
9416 trunc = *value;
9417 break;
9418 case rvc_normal:
9419 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9420 real_trunc (&trunc, VOIDmode, value);
9421 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9422 /* If the original number was negative and already
9423 integral, then the fractional part is -0.0. */
9424 if (value->sign && frac.cl == rvc_zero)
9425 frac.sign = value->sign;
9426 break;
9427 }
9428
9429 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9430 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9431 build_real (rettype, trunc));
9432 TREE_SIDE_EFFECTS (arg1) = 1;
9433 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9434 build_real (rettype, frac));
9435 }
9436
9437 return NULL_TREE;
9438 }
9439
9440 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9441 ARG is the argument for the call. */
9442
9443 static tree
9444 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9445 {
9446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9447 REAL_VALUE_TYPE r;
9448
9449 if (!validate_arg (arg, REAL_TYPE))
9450 {
9451 error ("non-floating-point argument to function %qs",
9452 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9453 return error_mark_node;
9454 }
9455
9456 switch (builtin_index)
9457 {
9458 case BUILT_IN_ISINF:
9459 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9460 return omit_one_operand (type, integer_zero_node, arg);
9461
9462 if (TREE_CODE (arg) == REAL_CST)
9463 {
9464 r = TREE_REAL_CST (arg);
9465 if (real_isinf (&r))
9466 return real_compare (GT_EXPR, &r, &dconst0)
9467 ? integer_one_node : integer_minus_one_node;
9468 else
9469 return integer_zero_node;
9470 }
9471
9472 return NULL_TREE;
9473
9474 case BUILT_IN_FINITE:
9475 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9476 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9477 return omit_one_operand (type, integer_one_node, arg);
9478
9479 if (TREE_CODE (arg) == REAL_CST)
9480 {
9481 r = TREE_REAL_CST (arg);
9482 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9483 }
9484
9485 return NULL_TREE;
9486
9487 case BUILT_IN_ISNAN:
9488 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9489 return omit_one_operand (type, integer_zero_node, arg);
9490
9491 if (TREE_CODE (arg) == REAL_CST)
9492 {
9493 r = TREE_REAL_CST (arg);
9494 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9495 }
9496
9497 arg = builtin_save_expr (arg);
9498 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9499
9500 default:
9501 gcc_unreachable ();
9502 }
9503 }
9504
9505 /* Fold a call to an unordered comparison function such as
9506 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9507 being called and ARG0 and ARG1 are the arguments for the call.
9508 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9509 the opposite of the desired result. UNORDERED_CODE is used
9510 for modes that can hold NaNs and ORDERED_CODE is used for
9511 the rest. */
9512
9513 static tree
9514 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9515 enum tree_code unordered_code,
9516 enum tree_code ordered_code)
9517 {
9518 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9519 enum tree_code code;
9520 tree type0, type1;
9521 enum tree_code code0, code1;
9522 tree cmp_type = NULL_TREE;
9523
9524 type0 = TREE_TYPE (arg0);
9525 type1 = TREE_TYPE (arg1);
9526
9527 code0 = TREE_CODE (type0);
9528 code1 = TREE_CODE (type1);
9529
9530 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9531 /* Choose the wider of two real types. */
9532 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9533 ? type0 : type1;
9534 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9535 cmp_type = type0;
9536 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9537 cmp_type = type1;
9538 else
9539 {
9540 error ("non-floating-point argument to function %qs",
9541 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9542 return error_mark_node;
9543 }
9544
9545 arg0 = fold_convert (cmp_type, arg0);
9546 arg1 = fold_convert (cmp_type, arg1);
9547
9548 if (unordered_code == UNORDERED_EXPR)
9549 {
9550 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9551 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9552 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9553 }
9554
9555 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9556 : ordered_code;
9557 return fold_build1 (TRUTH_NOT_EXPR, type,
9558 fold_build2 (code, type, arg0, arg1));
9559 }
9560
9561 /* Fold a call to built-in function FNDECL with 0 arguments.
9562 IGNORE is true if the result of the function call is ignored. This
9563 function returns NULL_TREE if no simplification was possible. */
9564
9565 static tree
9566 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9567 {
9568 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9569 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9570 switch (fcode)
9571 {
9572 CASE_FLT_FN (BUILT_IN_INF):
9573 case BUILT_IN_INFD32:
9574 case BUILT_IN_INFD64:
9575 case BUILT_IN_INFD128:
9576 return fold_builtin_inf (type, true);
9577
9578 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9579 return fold_builtin_inf (type, false);
9580
9581 case BUILT_IN_CLASSIFY_TYPE:
9582 return fold_builtin_classify_type (NULL_TREE);
9583
9584 default:
9585 break;
9586 }
9587 return NULL_TREE;
9588 }
9589
9590 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9591 IGNORE is true if the result of the function call is ignored. This
9592 function returns NULL_TREE if no simplification was possible. */
9593
9594 static tree
9595 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9596 {
9597 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9598 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9599 switch (fcode)
9600 {
9601
9602 case BUILT_IN_CONSTANT_P:
9603 {
9604 tree val = fold_builtin_constant_p (arg0);
9605
9606 /* Gimplification will pull the CALL_EXPR for the builtin out of
9607 an if condition. When not optimizing, we'll not CSE it back.
9608 To avoid link error types of regressions, return false now. */
9609 if (!val && !optimize)
9610 val = integer_zero_node;
9611
9612 return val;
9613 }
9614
9615 case BUILT_IN_CLASSIFY_TYPE:
9616 return fold_builtin_classify_type (arg0);
9617
9618 case BUILT_IN_STRLEN:
9619 return fold_builtin_strlen (arg0);
9620
9621 CASE_FLT_FN (BUILT_IN_FABS):
9622 return fold_builtin_fabs (arg0, type);
9623
9624 case BUILT_IN_ABS:
9625 case BUILT_IN_LABS:
9626 case BUILT_IN_LLABS:
9627 case BUILT_IN_IMAXABS:
9628 return fold_builtin_abs (arg0, type);
9629
9630 CASE_FLT_FN (BUILT_IN_CONJ):
9631 if (validate_arg (arg0, COMPLEX_TYPE))
9632 return fold_build1 (CONJ_EXPR, type, arg0);
9633 break;
9634
9635 CASE_FLT_FN (BUILT_IN_CREAL):
9636 if (validate_arg (arg0, COMPLEX_TYPE))
9637 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9638 break;
9639
9640 CASE_FLT_FN (BUILT_IN_CIMAG):
9641 if (validate_arg (arg0, COMPLEX_TYPE))
9642 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9643 break;
9644
9645 CASE_FLT_FN (BUILT_IN_CCOS):
9646 CASE_FLT_FN (BUILT_IN_CCOSH):
9647 /* These functions are "even", i.e. f(x) == f(-x). */
9648 if (validate_arg (arg0, COMPLEX_TYPE))
9649 {
9650 tree narg = fold_strip_sign_ops (arg0);
9651 if (narg)
9652 return build_call_expr (fndecl, 1, narg);
9653 }
9654 break;
9655
9656 CASE_FLT_FN (BUILT_IN_CABS):
9657 return fold_builtin_cabs (arg0, type, fndecl);
9658
9659 CASE_FLT_FN (BUILT_IN_CARG):
9660 return fold_builtin_carg (arg0, type);
9661
9662 CASE_FLT_FN (BUILT_IN_SQRT):
9663 return fold_builtin_sqrt (arg0, type);
9664
9665 CASE_FLT_FN (BUILT_IN_CBRT):
9666 return fold_builtin_cbrt (arg0, type);
9667
9668 CASE_FLT_FN (BUILT_IN_ASIN):
9669 if (validate_arg (arg0, REAL_TYPE))
9670 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9671 &dconstm1, &dconst1, true);
9672 break;
9673
9674 CASE_FLT_FN (BUILT_IN_ACOS):
9675 if (validate_arg (arg0, REAL_TYPE))
9676 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9677 &dconstm1, &dconst1, true);
9678 break;
9679
9680 CASE_FLT_FN (BUILT_IN_ATAN):
9681 if (validate_arg (arg0, REAL_TYPE))
9682 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9683 break;
9684
9685 CASE_FLT_FN (BUILT_IN_ASINH):
9686 if (validate_arg (arg0, REAL_TYPE))
9687 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9688 break;
9689
9690 CASE_FLT_FN (BUILT_IN_ACOSH):
9691 if (validate_arg (arg0, REAL_TYPE))
9692 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9693 &dconst1, NULL, true);
9694 break;
9695
9696 CASE_FLT_FN (BUILT_IN_ATANH):
9697 if (validate_arg (arg0, REAL_TYPE))
9698 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9699 &dconstm1, &dconst1, false);
9700 break;
9701
9702 CASE_FLT_FN (BUILT_IN_SIN):
9703 if (validate_arg (arg0, REAL_TYPE))
9704 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9705 break;
9706
9707 CASE_FLT_FN (BUILT_IN_COS):
9708 return fold_builtin_cos (arg0, type, fndecl);
9709 break;
9710
9711 CASE_FLT_FN (BUILT_IN_TAN):
9712 return fold_builtin_tan (arg0, type);
9713
9714 CASE_FLT_FN (BUILT_IN_CEXP):
9715 return fold_builtin_cexp (arg0, type);
9716
9717 CASE_FLT_FN (BUILT_IN_CEXPI):
9718 if (validate_arg (arg0, REAL_TYPE))
9719 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9720 break;
9721
9722 CASE_FLT_FN (BUILT_IN_SINH):
9723 if (validate_arg (arg0, REAL_TYPE))
9724 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9725 break;
9726
9727 CASE_FLT_FN (BUILT_IN_COSH):
9728 return fold_builtin_cosh (arg0, type, fndecl);
9729
9730 CASE_FLT_FN (BUILT_IN_TANH):
9731 if (validate_arg (arg0, REAL_TYPE))
9732 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9733 break;
9734
9735 CASE_FLT_FN (BUILT_IN_ERF):
9736 if (validate_arg (arg0, REAL_TYPE))
9737 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9738 break;
9739
9740 CASE_FLT_FN (BUILT_IN_ERFC):
9741 if (validate_arg (arg0, REAL_TYPE))
9742 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9743 break;
9744
9745 CASE_FLT_FN (BUILT_IN_TGAMMA):
9746 if (validate_arg (arg0, REAL_TYPE))
9747 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9748 break;
9749
9750 CASE_FLT_FN (BUILT_IN_EXP):
9751 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9752
9753 CASE_FLT_FN (BUILT_IN_EXP2):
9754 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9755
9756 CASE_FLT_FN (BUILT_IN_EXP10):
9757 CASE_FLT_FN (BUILT_IN_POW10):
9758 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9759
9760 CASE_FLT_FN (BUILT_IN_EXPM1):
9761 if (validate_arg (arg0, REAL_TYPE))
9762 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9763 break;
9764
9765 CASE_FLT_FN (BUILT_IN_LOG):
9766 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9767
9768 CASE_FLT_FN (BUILT_IN_LOG2):
9769 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9770
9771 CASE_FLT_FN (BUILT_IN_LOG10):
9772 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9773
9774 CASE_FLT_FN (BUILT_IN_LOG1P):
9775 if (validate_arg (arg0, REAL_TYPE))
9776 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9777 &dconstm1, NULL, false);
9778 break;
9779
9780 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9781 CASE_FLT_FN (BUILT_IN_J0):
9782 if (validate_arg (arg0, REAL_TYPE))
9783 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9784 NULL, NULL, 0);
9785 break;
9786
9787 CASE_FLT_FN (BUILT_IN_J1):
9788 if (validate_arg (arg0, REAL_TYPE))
9789 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9790 NULL, NULL, 0);
9791 break;
9792
9793 CASE_FLT_FN (BUILT_IN_Y0):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9796 &dconst0, NULL, false);
9797 break;
9798
9799 CASE_FLT_FN (BUILT_IN_Y1):
9800 if (validate_arg (arg0, REAL_TYPE))
9801 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9802 &dconst0, NULL, false);
9803 break;
9804 #endif
9805
9806 CASE_FLT_FN (BUILT_IN_NAN):
9807 case BUILT_IN_NAND32:
9808 case BUILT_IN_NAND64:
9809 case BUILT_IN_NAND128:
9810 return fold_builtin_nan (arg0, type, true);
9811
9812 CASE_FLT_FN (BUILT_IN_NANS):
9813 return fold_builtin_nan (arg0, type, false);
9814
9815 CASE_FLT_FN (BUILT_IN_FLOOR):
9816 return fold_builtin_floor (fndecl, arg0);
9817
9818 CASE_FLT_FN (BUILT_IN_CEIL):
9819 return fold_builtin_ceil (fndecl, arg0);
9820
9821 CASE_FLT_FN (BUILT_IN_TRUNC):
9822 return fold_builtin_trunc (fndecl, arg0);
9823
9824 CASE_FLT_FN (BUILT_IN_ROUND):
9825 return fold_builtin_round (fndecl, arg0);
9826
9827 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9828 CASE_FLT_FN (BUILT_IN_RINT):
9829 return fold_trunc_transparent_mathfn (fndecl, arg0);
9830
9831 CASE_FLT_FN (BUILT_IN_LCEIL):
9832 CASE_FLT_FN (BUILT_IN_LLCEIL):
9833 CASE_FLT_FN (BUILT_IN_LFLOOR):
9834 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9835 CASE_FLT_FN (BUILT_IN_LROUND):
9836 CASE_FLT_FN (BUILT_IN_LLROUND):
9837 return fold_builtin_int_roundingfn (fndecl, arg0);
9838
9839 CASE_FLT_FN (BUILT_IN_LRINT):
9840 CASE_FLT_FN (BUILT_IN_LLRINT):
9841 return fold_fixed_mathfn (fndecl, arg0);
9842
9843 case BUILT_IN_BSWAP32:
9844 case BUILT_IN_BSWAP64:
9845 return fold_builtin_bswap (fndecl, arg0);
9846
9847 CASE_INT_FN (BUILT_IN_FFS):
9848 CASE_INT_FN (BUILT_IN_CLZ):
9849 CASE_INT_FN (BUILT_IN_CTZ):
9850 CASE_INT_FN (BUILT_IN_POPCOUNT):
9851 CASE_INT_FN (BUILT_IN_PARITY):
9852 return fold_builtin_bitop (fndecl, arg0);
9853
9854 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9855 return fold_builtin_signbit (arg0, type);
9856
9857 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9858 return fold_builtin_significand (arg0, type);
9859
9860 CASE_FLT_FN (BUILT_IN_ILOGB):
9861 CASE_FLT_FN (BUILT_IN_LOGB):
9862 return fold_builtin_logb (arg0, type);
9863
9864 case BUILT_IN_ISASCII:
9865 return fold_builtin_isascii (arg0);
9866
9867 case BUILT_IN_TOASCII:
9868 return fold_builtin_toascii (arg0);
9869
9870 case BUILT_IN_ISDIGIT:
9871 return fold_builtin_isdigit (arg0);
9872
9873 CASE_FLT_FN (BUILT_IN_FINITE):
9874 case BUILT_IN_FINITED32:
9875 case BUILT_IN_FINITED64:
9876 case BUILT_IN_FINITED128:
9877 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9878
9879 CASE_FLT_FN (BUILT_IN_ISINF):
9880 case BUILT_IN_ISINFD32:
9881 case BUILT_IN_ISINFD64:
9882 case BUILT_IN_ISINFD128:
9883 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9884
9885 CASE_FLT_FN (BUILT_IN_ISNAN):
9886 case BUILT_IN_ISNAND32:
9887 case BUILT_IN_ISNAND64:
9888 case BUILT_IN_ISNAND128:
9889 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9890
9891 case BUILT_IN_PRINTF:
9892 case BUILT_IN_PRINTF_UNLOCKED:
9893 case BUILT_IN_VPRINTF:
9894 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9895
9896 default:
9897 break;
9898 }
9899
9900 return NULL_TREE;
9901
9902 }
9903
9904 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9905 IGNORE is true if the result of the function call is ignored. This
9906 function returns NULL_TREE if no simplification was possible. */
9907
9908 static tree
9909 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9910 {
9911 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9912 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9913
9914 switch (fcode)
9915 {
9916 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9917 CASE_FLT_FN (BUILT_IN_JN):
9918 if (validate_arg (arg0, INTEGER_TYPE)
9919 && validate_arg (arg1, REAL_TYPE))
9920 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9921 break;
9922
9923 CASE_FLT_FN (BUILT_IN_YN):
9924 if (validate_arg (arg0, INTEGER_TYPE)
9925 && validate_arg (arg1, REAL_TYPE))
9926 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9927 &dconst0, false);
9928 break;
9929
9930 CASE_FLT_FN (BUILT_IN_DREM):
9931 CASE_FLT_FN (BUILT_IN_REMAINDER):
9932 if (validate_arg (arg0, REAL_TYPE)
9933 && validate_arg(arg1, REAL_TYPE))
9934 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9935 break;
9936
9937 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9938 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9939 if (validate_arg (arg0, REAL_TYPE)
9940 && validate_arg(arg1, POINTER_TYPE))
9941 return do_mpfr_lgamma_r (arg0, arg1, type);
9942 break;
9943 #endif
9944
9945 CASE_FLT_FN (BUILT_IN_ATAN2):
9946 if (validate_arg (arg0, REAL_TYPE)
9947 && validate_arg(arg1, REAL_TYPE))
9948 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9949 break;
9950
9951 CASE_FLT_FN (BUILT_IN_FDIM):
9952 if (validate_arg (arg0, REAL_TYPE)
9953 && validate_arg(arg1, REAL_TYPE))
9954 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9955 break;
9956
9957 CASE_FLT_FN (BUILT_IN_HYPOT):
9958 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9959
9960 CASE_FLT_FN (BUILT_IN_LDEXP):
9961 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9962 CASE_FLT_FN (BUILT_IN_SCALBN):
9963 CASE_FLT_FN (BUILT_IN_SCALBLN):
9964 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9965
9966 CASE_FLT_FN (BUILT_IN_FREXP):
9967 return fold_builtin_frexp (arg0, arg1, type);
9968
9969 CASE_FLT_FN (BUILT_IN_MODF):
9970 return fold_builtin_modf (arg0, arg1, type);
9971
9972 case BUILT_IN_BZERO:
9973 return fold_builtin_bzero (arg0, arg1, ignore);
9974
9975 case BUILT_IN_FPUTS:
9976 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9977
9978 case BUILT_IN_FPUTS_UNLOCKED:
9979 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9980
9981 case BUILT_IN_STRSTR:
9982 return fold_builtin_strstr (arg0, arg1, type);
9983
9984 case BUILT_IN_STRCAT:
9985 return fold_builtin_strcat (arg0, arg1);
9986
9987 case BUILT_IN_STRSPN:
9988 return fold_builtin_strspn (arg0, arg1);
9989
9990 case BUILT_IN_STRCSPN:
9991 return fold_builtin_strcspn (arg0, arg1);
9992
9993 case BUILT_IN_STRCHR:
9994 case BUILT_IN_INDEX:
9995 return fold_builtin_strchr (arg0, arg1, type);
9996
9997 case BUILT_IN_STRRCHR:
9998 case BUILT_IN_RINDEX:
9999 return fold_builtin_strrchr (arg0, arg1, type);
10000
10001 case BUILT_IN_STRCPY:
10002 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10003
10004 case BUILT_IN_STRCMP:
10005 return fold_builtin_strcmp (arg0, arg1);
10006
10007 case BUILT_IN_STRPBRK:
10008 return fold_builtin_strpbrk (arg0, arg1, type);
10009
10010 case BUILT_IN_EXPECT:
10011 return fold_builtin_expect (arg0);
10012
10013 CASE_FLT_FN (BUILT_IN_POW):
10014 return fold_builtin_pow (fndecl, arg0, arg1, type);
10015
10016 CASE_FLT_FN (BUILT_IN_POWI):
10017 return fold_builtin_powi (fndecl, arg0, arg1, type);
10018
10019 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10020 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10021
10022 CASE_FLT_FN (BUILT_IN_FMIN):
10023 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10024
10025 CASE_FLT_FN (BUILT_IN_FMAX):
10026 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10027
10028 case BUILT_IN_ISGREATER:
10029 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10030 case BUILT_IN_ISGREATEREQUAL:
10031 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10032 case BUILT_IN_ISLESS:
10033 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10034 case BUILT_IN_ISLESSEQUAL:
10035 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10036 case BUILT_IN_ISLESSGREATER:
10037 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10038 case BUILT_IN_ISUNORDERED:
10039 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10040 NOP_EXPR);
10041
10042 /* We do the folding for va_start in the expander. */
10043 case BUILT_IN_VA_START:
10044 break;
10045
10046 case BUILT_IN_SPRINTF:
10047 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10048
10049 case BUILT_IN_OBJECT_SIZE:
10050 return fold_builtin_object_size (arg0, arg1);
10051
10052 case BUILT_IN_PRINTF:
10053 case BUILT_IN_PRINTF_UNLOCKED:
10054 case BUILT_IN_VPRINTF:
10055 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10056
10057 case BUILT_IN_PRINTF_CHK:
10058 case BUILT_IN_VPRINTF_CHK:
10059 if (!validate_arg (arg0, INTEGER_TYPE)
10060 || TREE_SIDE_EFFECTS (arg0))
10061 return NULL_TREE;
10062 else
10063 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10064 break;
10065
10066 case BUILT_IN_FPRINTF:
10067 case BUILT_IN_FPRINTF_UNLOCKED:
10068 case BUILT_IN_VFPRINTF:
10069 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10070 ignore, fcode);
10071
10072 default:
10073 break;
10074 }
10075 return NULL_TREE;
10076 }
10077
10078 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10079 and ARG2. IGNORE is true if the result of the function call is ignored.
10080 This function returns NULL_TREE if no simplification was possible. */
10081
10082 static tree
10083 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10084 {
10085 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10086 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10087 switch (fcode)
10088 {
10089
10090 CASE_FLT_FN (BUILT_IN_SINCOS):
10091 return fold_builtin_sincos (arg0, arg1, arg2);
10092
10093 CASE_FLT_FN (BUILT_IN_FMA):
10094 if (validate_arg (arg0, REAL_TYPE)
10095 && validate_arg(arg1, REAL_TYPE)
10096 && validate_arg(arg2, REAL_TYPE))
10097 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10098 break;
10099
10100 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10101 CASE_FLT_FN (BUILT_IN_REMQUO):
10102 if (validate_arg (arg0, REAL_TYPE)
10103 && validate_arg(arg1, REAL_TYPE)
10104 && validate_arg(arg2, POINTER_TYPE))
10105 return do_mpfr_remquo (arg0, arg1, arg2);
10106 break;
10107 #endif
10108
10109 case BUILT_IN_MEMSET:
10110 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10111
10112 case BUILT_IN_BCOPY:
10113 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10114
10115 case BUILT_IN_MEMCPY:
10116 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10117
10118 case BUILT_IN_MEMPCPY:
10119 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10120
10121 case BUILT_IN_MEMMOVE:
10122 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10123
10124 case BUILT_IN_STRNCAT:
10125 return fold_builtin_strncat (arg0, arg1, arg2);
10126
10127 case BUILT_IN_STRNCPY:
10128 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10129
10130 case BUILT_IN_STRNCMP:
10131 return fold_builtin_strncmp (arg0, arg1, arg2);
10132
10133 case BUILT_IN_MEMCHR:
10134 return fold_builtin_memchr (arg0, arg1, arg2, type);
10135
10136 case BUILT_IN_BCMP:
10137 case BUILT_IN_MEMCMP:
10138 return fold_builtin_memcmp (arg0, arg1, arg2);;
10139
10140 case BUILT_IN_SPRINTF:
10141 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10142
10143 case BUILT_IN_STRCPY_CHK:
10144 case BUILT_IN_STPCPY_CHK:
10145 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10146 ignore, fcode);
10147
10148 case BUILT_IN_STRCAT_CHK:
10149 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10150
10151 case BUILT_IN_PRINTF_CHK:
10152 case BUILT_IN_VPRINTF_CHK:
10153 if (!validate_arg (arg0, INTEGER_TYPE)
10154 || TREE_SIDE_EFFECTS (arg0))
10155 return NULL_TREE;
10156 else
10157 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10158 break;
10159
10160 case BUILT_IN_FPRINTF:
10161 case BUILT_IN_FPRINTF_UNLOCKED:
10162 case BUILT_IN_VFPRINTF:
10163 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10164
10165 case BUILT_IN_FPRINTF_CHK:
10166 case BUILT_IN_VFPRINTF_CHK:
10167 if (!validate_arg (arg1, INTEGER_TYPE)
10168 || TREE_SIDE_EFFECTS (arg1))
10169 return NULL_TREE;
10170 else
10171 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10172 ignore, fcode);
10173
10174 default:
10175 break;
10176 }
10177 return NULL_TREE;
10178 }
10179
10180 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10181 ARG2, and ARG3. IGNORE is true if the result of the function call is
10182 ignored. This function returns NULL_TREE if no simplification was
10183 possible. */
10184
10185 static tree
10186 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10187 bool ignore)
10188 {
10189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10190
10191 switch (fcode)
10192 {
10193 case BUILT_IN_MEMCPY_CHK:
10194 case BUILT_IN_MEMPCPY_CHK:
10195 case BUILT_IN_MEMMOVE_CHK:
10196 case BUILT_IN_MEMSET_CHK:
10197 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10198 NULL_TREE, ignore,
10199 DECL_FUNCTION_CODE (fndecl));
10200
10201 case BUILT_IN_STRNCPY_CHK:
10202 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10203
10204 case BUILT_IN_STRNCAT_CHK:
10205 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10206
10207 case BUILT_IN_FPRINTF_CHK:
10208 case BUILT_IN_VFPRINTF_CHK:
10209 if (!validate_arg (arg1, INTEGER_TYPE)
10210 || TREE_SIDE_EFFECTS (arg1))
10211 return NULL_TREE;
10212 else
10213 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10214 ignore, fcode);
10215 break;
10216
10217 default:
10218 break;
10219 }
10220 return NULL_TREE;
10221 }
10222
10223 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10224 arguments, where NARGS <= 4. IGNORE is true if the result of the
10225 function call is ignored. This function returns NULL_TREE if no
10226 simplification was possible. Note that this only folds builtins with
10227 fixed argument patterns. Foldings that do varargs-to-varargs
10228 transformations, or that match calls with more than 4 arguments,
10229 need to be handled with fold_builtin_varargs instead. */
10230
10231 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10232
10233 static tree
10234 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10235 {
10236 tree ret = NULL_TREE;
10237 switch (nargs)
10238 {
10239 case 0:
10240 ret = fold_builtin_0 (fndecl, ignore);
10241 break;
10242 case 1:
10243 ret = fold_builtin_1 (fndecl, args[0], ignore);
10244 break;
10245 case 2:
10246 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10247 break;
10248 case 3:
10249 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10250 break;
10251 case 4:
10252 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10253 ignore);
10254 break;
10255 default:
10256 break;
10257 }
10258 if (ret)
10259 {
10260 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10261 TREE_NO_WARNING (ret) = 1;
10262 return ret;
10263 }
10264 return NULL_TREE;
10265 }
10266
10267 /* Builtins with folding operations that operate on "..." arguments
10268 need special handling; we need to store the arguments in a convenient
10269 data structure before attempting any folding. Fortunately there are
10270 only a few builtins that fall into this category. FNDECL is the
10271 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10272 result of the function call is ignored. */
10273
10274 static tree
10275 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10276 {
10277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10278 tree ret = NULL_TREE;
10279
10280 switch (fcode)
10281 {
10282 case BUILT_IN_SPRINTF_CHK:
10283 case BUILT_IN_VSPRINTF_CHK:
10284 ret = fold_builtin_sprintf_chk (exp, fcode);
10285 break;
10286
10287 case BUILT_IN_SNPRINTF_CHK:
10288 case BUILT_IN_VSNPRINTF_CHK:
10289 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10290
10291 default:
10292 break;
10293 }
10294 if (ret)
10295 {
10296 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10297 TREE_NO_WARNING (ret) = 1;
10298 return ret;
10299 }
10300 return NULL_TREE;
10301 }
10302
10303 /* A wrapper function for builtin folding that prevents warnings for
10304 "statement without effect" and the like, caused by removing the
10305 call node earlier than the warning is generated. */
10306
10307 tree
10308 fold_call_expr (tree exp, bool ignore)
10309 {
10310 tree ret = NULL_TREE;
10311 tree fndecl = get_callee_fndecl (exp);
10312 if (fndecl
10313 && TREE_CODE (fndecl) == FUNCTION_DECL
10314 && DECL_BUILT_IN (fndecl))
10315 {
10316 /* FIXME: Don't use a list in this interface. */
10317 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10318 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10319 else
10320 {
10321 int nargs = call_expr_nargs (exp);
10322 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10323 {
10324 tree *args = CALL_EXPR_ARGP (exp);
10325 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10326 }
10327 if (!ret)
10328 ret = fold_builtin_varargs (fndecl, exp, ignore);
10329 if (ret)
10330 {
10331 /* Propagate location information from original call to
10332 expansion of builtin. Otherwise things like
10333 maybe_emit_chk_warning, that operate on the expansion
10334 of a builtin, will use the wrong location information. */
10335 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10336 {
10337 tree realret = ret;
10338 if (TREE_CODE (ret) == NOP_EXPR)
10339 realret = TREE_OPERAND (ret, 0);
10340 if (CAN_HAVE_LOCATION_P (realret)
10341 && !EXPR_HAS_LOCATION (realret))
10342 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10343 }
10344 return ret;
10345 }
10346 }
10347 }
10348 return NULL_TREE;
10349 }
10350
10351 /* Conveniently construct a function call expression. FNDECL names the
10352 function to be called and ARGLIST is a TREE_LIST of arguments. */
10353
10354 tree
10355 build_function_call_expr (tree fndecl, tree arglist)
10356 {
10357 tree fntype = TREE_TYPE (fndecl);
10358 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10359 int n = list_length (arglist);
10360 tree *argarray = (tree *) alloca (n * sizeof (tree));
10361 int i;
10362
10363 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10364 argarray[i] = TREE_VALUE (arglist);
10365 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10366 }
10367
10368 /* Conveniently construct a function call expression. FNDECL names the
10369 function to be called, N is the number of arguments, and the "..."
10370 parameters are the argument expressions. */
10371
10372 tree
10373 build_call_expr (tree fndecl, int n, ...)
10374 {
10375 va_list ap;
10376 tree fntype = TREE_TYPE (fndecl);
10377 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10378 tree *argarray = (tree *) alloca (n * sizeof (tree));
10379 int i;
10380
10381 va_start (ap, n);
10382 for (i = 0; i < n; i++)
10383 argarray[i] = va_arg (ap, tree);
10384 va_end (ap);
10385 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10386 }
10387
10388 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10389 N arguments are passed in the array ARGARRAY. */
10390
10391 tree
10392 fold_builtin_call_array (tree type,
10393 tree fn,
10394 int n,
10395 tree *argarray)
10396 {
10397 tree ret = NULL_TREE;
10398 int i;
10399 tree exp;
10400
10401 if (TREE_CODE (fn) == ADDR_EXPR)
10402 {
10403 tree fndecl = TREE_OPERAND (fn, 0);
10404 if (TREE_CODE (fndecl) == FUNCTION_DECL
10405 && DECL_BUILT_IN (fndecl))
10406 {
10407 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10408 {
10409 tree arglist = NULL_TREE;
10410 for (i = n - 1; i >= 0; i--)
10411 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10412 ret = targetm.fold_builtin (fndecl, arglist, false);
10413 if (ret)
10414 return ret;
10415 }
10416 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10417 {
10418 /* First try the transformations that don't require consing up
10419 an exp. */
10420 ret = fold_builtin_n (fndecl, argarray, n, false);
10421 if (ret)
10422 return ret;
10423 }
10424
10425 /* If we got this far, we need to build an exp. */
10426 exp = build_call_array (type, fn, n, argarray);
10427 ret = fold_builtin_varargs (fndecl, exp, false);
10428 return ret ? ret : exp;
10429 }
10430 }
10431
10432 return build_call_array (type, fn, n, argarray);
10433 }
10434
10435 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10436 along with N new arguments specified as the "..." parameters. SKIP
10437 is the number of arguments in EXP to be omitted. This function is used
10438 to do varargs-to-varargs transformations. */
10439
10440 static tree
10441 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10442 {
10443 int oldnargs = call_expr_nargs (exp);
10444 int nargs = oldnargs - skip + n;
10445 tree fntype = TREE_TYPE (fndecl);
10446 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10447 tree *buffer;
10448
10449 if (n > 0)
10450 {
10451 int i, j;
10452 va_list ap;
10453
10454 buffer = alloca (nargs * sizeof (tree));
10455 va_start (ap, n);
10456 for (i = 0; i < n; i++)
10457 buffer[i] = va_arg (ap, tree);
10458 va_end (ap);
10459 for (j = skip; j < oldnargs; j++, i++)
10460 buffer[i] = CALL_EXPR_ARG (exp, j);
10461 }
10462 else
10463 buffer = CALL_EXPR_ARGP (exp) + skip;
10464
10465 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10466 }
10467
10468 /* Validate a single argument ARG against a tree code CODE representing
10469 a type. */
10470
10471 static bool
10472 validate_arg (tree arg, enum tree_code code)
10473 {
10474 if (!arg)
10475 return false;
10476 else if (code == POINTER_TYPE)
10477 return POINTER_TYPE_P (TREE_TYPE (arg));
10478 return code == TREE_CODE (TREE_TYPE (arg));
10479 }
10480
10481 /* This function validates the types of a function call argument list
10482 against a specified list of tree_codes. If the last specifier is a 0,
10483 that represents an ellipses, otherwise the last specifier must be a
10484 VOID_TYPE. */
10485
10486 bool
10487 validate_arglist (tree callexpr, ...)
10488 {
10489 enum tree_code code;
10490 bool res = 0;
10491 va_list ap;
10492 call_expr_arg_iterator iter;
10493 tree arg;
10494
10495 va_start (ap, callexpr);
10496 init_call_expr_arg_iterator (callexpr, &iter);
10497
10498 do
10499 {
10500 code = va_arg (ap, enum tree_code);
10501 switch (code)
10502 {
10503 case 0:
10504 /* This signifies an ellipses, any further arguments are all ok. */
10505 res = true;
10506 goto end;
10507 case VOID_TYPE:
10508 /* This signifies an endlink, if no arguments remain, return
10509 true, otherwise return false. */
10510 res = !more_call_expr_args_p (&iter);
10511 goto end;
10512 default:
10513 /* If no parameters remain or the parameter's code does not
10514 match the specified code, return false. Otherwise continue
10515 checking any remaining arguments. */
10516 arg = next_call_expr_arg (&iter);
10517 if (!validate_arg (arg, code))
10518 goto end;
10519 break;
10520 }
10521 }
10522 while (1);
10523
10524 /* We need gotos here since we can only have one VA_CLOSE in a
10525 function. */
10526 end: ;
10527 va_end (ap);
10528
10529 return res;
10530 }
10531
10532 /* Default target-specific builtin expander that does nothing. */
10533
10534 rtx
10535 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10536 rtx target ATTRIBUTE_UNUSED,
10537 rtx subtarget ATTRIBUTE_UNUSED,
10538 enum machine_mode mode ATTRIBUTE_UNUSED,
10539 int ignore ATTRIBUTE_UNUSED)
10540 {
10541 return NULL_RTX;
10542 }
10543
10544 /* Returns true is EXP represents data that would potentially reside
10545 in a readonly section. */
10546
10547 static bool
10548 readonly_data_expr (tree exp)
10549 {
10550 STRIP_NOPS (exp);
10551
10552 if (TREE_CODE (exp) != ADDR_EXPR)
10553 return false;
10554
10555 exp = get_base_address (TREE_OPERAND (exp, 0));
10556 if (!exp)
10557 return false;
10558
10559 /* Make sure we call decl_readonly_section only for trees it
10560 can handle (since it returns true for everything it doesn't
10561 understand). */
10562 if (TREE_CODE (exp) == STRING_CST
10563 || TREE_CODE (exp) == CONSTRUCTOR
10564 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10565 return decl_readonly_section (exp, 0);
10566 else
10567 return false;
10568 }
10569
10570 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10571 to the call, and TYPE is its return type.
10572
10573 Return NULL_TREE if no simplification was possible, otherwise return the
10574 simplified form of the call as a tree.
10575
10576 The simplified form may be a constant or other expression which
10577 computes the same value, but in a more efficient manner (including
10578 calls to other builtin functions).
10579
10580 The call may contain arguments which need to be evaluated, but
10581 which are not useful to determine the result of the call. In
10582 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10583 COMPOUND_EXPR will be an argument which must be evaluated.
10584 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10585 COMPOUND_EXPR in the chain will contain the tree for the simplified
10586 form of the builtin function call. */
10587
10588 static tree
10589 fold_builtin_strstr (tree s1, tree s2, tree type)
10590 {
10591 if (!validate_arg (s1, POINTER_TYPE)
10592 || !validate_arg (s2, POINTER_TYPE))
10593 return NULL_TREE;
10594 else
10595 {
10596 tree fn;
10597 const char *p1, *p2;
10598
10599 p2 = c_getstr (s2);
10600 if (p2 == NULL)
10601 return NULL_TREE;
10602
10603 p1 = c_getstr (s1);
10604 if (p1 != NULL)
10605 {
10606 const char *r = strstr (p1, p2);
10607 tree tem;
10608
10609 if (r == NULL)
10610 return build_int_cst (TREE_TYPE (s1), 0);
10611
10612 /* Return an offset into the constant string argument. */
10613 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10614 s1, size_int (r - p1));
10615 return fold_convert (type, tem);
10616 }
10617
10618 /* The argument is const char *, and the result is char *, so we need
10619 a type conversion here to avoid a warning. */
10620 if (p2[0] == '\0')
10621 return fold_convert (type, s1);
10622
10623 if (p2[1] != '\0')
10624 return NULL_TREE;
10625
10626 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10627 if (!fn)
10628 return NULL_TREE;
10629
10630 /* New argument list transforming strstr(s1, s2) to
10631 strchr(s1, s2[0]). */
10632 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10633 }
10634 }
10635
10636 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10637 the call, and TYPE is its return type.
10638
10639 Return NULL_TREE if no simplification was possible, otherwise return the
10640 simplified form of the call as a tree.
10641
10642 The simplified form may be a constant or other expression which
10643 computes the same value, but in a more efficient manner (including
10644 calls to other builtin functions).
10645
10646 The call may contain arguments which need to be evaluated, but
10647 which are not useful to determine the result of the call. In
10648 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10649 COMPOUND_EXPR will be an argument which must be evaluated.
10650 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10651 COMPOUND_EXPR in the chain will contain the tree for the simplified
10652 form of the builtin function call. */
10653
10654 static tree
10655 fold_builtin_strchr (tree s1, tree s2, tree type)
10656 {
10657 if (!validate_arg (s1, POINTER_TYPE)
10658 || !validate_arg (s2, INTEGER_TYPE))
10659 return NULL_TREE;
10660 else
10661 {
10662 const char *p1;
10663
10664 if (TREE_CODE (s2) != INTEGER_CST)
10665 return NULL_TREE;
10666
10667 p1 = c_getstr (s1);
10668 if (p1 != NULL)
10669 {
10670 char c;
10671 const char *r;
10672 tree tem;
10673
10674 if (target_char_cast (s2, &c))
10675 return NULL_TREE;
10676
10677 r = strchr (p1, c);
10678
10679 if (r == NULL)
10680 return build_int_cst (TREE_TYPE (s1), 0);
10681
10682 /* Return an offset into the constant string argument. */
10683 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10684 s1, size_int (r - p1));
10685 return fold_convert (type, tem);
10686 }
10687 return NULL_TREE;
10688 }
10689 }
10690
10691 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10692 the call, and TYPE is its return type.
10693
10694 Return NULL_TREE if no simplification was possible, otherwise return the
10695 simplified form of the call as a tree.
10696
10697 The simplified form may be a constant or other expression which
10698 computes the same value, but in a more efficient manner (including
10699 calls to other builtin functions).
10700
10701 The call may contain arguments which need to be evaluated, but
10702 which are not useful to determine the result of the call. In
10703 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10704 COMPOUND_EXPR will be an argument which must be evaluated.
10705 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10706 COMPOUND_EXPR in the chain will contain the tree for the simplified
10707 form of the builtin function call. */
10708
10709 static tree
10710 fold_builtin_strrchr (tree s1, tree s2, tree type)
10711 {
10712 if (!validate_arg (s1, POINTER_TYPE)
10713 || !validate_arg (s2, INTEGER_TYPE))
10714 return NULL_TREE;
10715 else
10716 {
10717 tree fn;
10718 const char *p1;
10719
10720 if (TREE_CODE (s2) != INTEGER_CST)
10721 return NULL_TREE;
10722
10723 p1 = c_getstr (s1);
10724 if (p1 != NULL)
10725 {
10726 char c;
10727 const char *r;
10728 tree tem;
10729
10730 if (target_char_cast (s2, &c))
10731 return NULL_TREE;
10732
10733 r = strrchr (p1, c);
10734
10735 if (r == NULL)
10736 return build_int_cst (TREE_TYPE (s1), 0);
10737
10738 /* Return an offset into the constant string argument. */
10739 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10740 s1, size_int (r - p1));
10741 return fold_convert (type, tem);
10742 }
10743
10744 if (! integer_zerop (s2))
10745 return NULL_TREE;
10746
10747 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10748 if (!fn)
10749 return NULL_TREE;
10750
10751 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10752 return build_call_expr (fn, 2, s1, s2);
10753 }
10754 }
10755
10756 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10757 to the call, and TYPE is its return type.
10758
10759 Return NULL_TREE if no simplification was possible, otherwise return the
10760 simplified form of the call as a tree.
10761
10762 The simplified form may be a constant or other expression which
10763 computes the same value, but in a more efficient manner (including
10764 calls to other builtin functions).
10765
10766 The call may contain arguments which need to be evaluated, but
10767 which are not useful to determine the result of the call. In
10768 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10769 COMPOUND_EXPR will be an argument which must be evaluated.
10770 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10771 COMPOUND_EXPR in the chain will contain the tree for the simplified
10772 form of the builtin function call. */
10773
10774 static tree
10775 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10776 {
10777 if (!validate_arg (s1, POINTER_TYPE)
10778 || !validate_arg (s2, POINTER_TYPE))
10779 return NULL_TREE;
10780 else
10781 {
10782 tree fn;
10783 const char *p1, *p2;
10784
10785 p2 = c_getstr (s2);
10786 if (p2 == NULL)
10787 return NULL_TREE;
10788
10789 p1 = c_getstr (s1);
10790 if (p1 != NULL)
10791 {
10792 const char *r = strpbrk (p1, p2);
10793 tree tem;
10794
10795 if (r == NULL)
10796 return build_int_cst (TREE_TYPE (s1), 0);
10797
10798 /* Return an offset into the constant string argument. */
10799 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10800 s1, size_int (r - p1));
10801 return fold_convert (type, tem);
10802 }
10803
10804 if (p2[0] == '\0')
10805 /* strpbrk(x, "") == NULL.
10806 Evaluate and ignore s1 in case it had side-effects. */
10807 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10808
10809 if (p2[1] != '\0')
10810 return NULL_TREE; /* Really call strpbrk. */
10811
10812 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10813 if (!fn)
10814 return NULL_TREE;
10815
10816 /* New argument list transforming strpbrk(s1, s2) to
10817 strchr(s1, s2[0]). */
10818 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10819 }
10820 }
10821
10822 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10823 to the call.
10824
10825 Return NULL_TREE if no simplification was possible, otherwise return the
10826 simplified form of the call as a tree.
10827
10828 The simplified form may be a constant or other expression which
10829 computes the same value, but in a more efficient manner (including
10830 calls to other builtin functions).
10831
10832 The call may contain arguments which need to be evaluated, but
10833 which are not useful to determine the result of the call. In
10834 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10835 COMPOUND_EXPR will be an argument which must be evaluated.
10836 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10837 COMPOUND_EXPR in the chain will contain the tree for the simplified
10838 form of the builtin function call. */
10839
10840 static tree
10841 fold_builtin_strcat (tree dst, tree src)
10842 {
10843 if (!validate_arg (dst, POINTER_TYPE)
10844 || !validate_arg (src, POINTER_TYPE))
10845 return NULL_TREE;
10846 else
10847 {
10848 const char *p = c_getstr (src);
10849
10850 /* If the string length is zero, return the dst parameter. */
10851 if (p && *p == '\0')
10852 return dst;
10853
10854 return NULL_TREE;
10855 }
10856 }
10857
10858 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10859 arguments to the call.
10860
10861 Return NULL_TREE if no simplification was possible, otherwise return the
10862 simplified form of the call as a tree.
10863
10864 The simplified form may be a constant or other expression which
10865 computes the same value, but in a more efficient manner (including
10866 calls to other builtin functions).
10867
10868 The call may contain arguments which need to be evaluated, but
10869 which are not useful to determine the result of the call. In
10870 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10871 COMPOUND_EXPR will be an argument which must be evaluated.
10872 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10873 COMPOUND_EXPR in the chain will contain the tree for the simplified
10874 form of the builtin function call. */
10875
10876 static tree
10877 fold_builtin_strncat (tree dst, tree src, tree len)
10878 {
10879 if (!validate_arg (dst, POINTER_TYPE)
10880 || !validate_arg (src, POINTER_TYPE)
10881 || !validate_arg (len, INTEGER_TYPE))
10882 return NULL_TREE;
10883 else
10884 {
10885 const char *p = c_getstr (src);
10886
10887 /* If the requested length is zero, or the src parameter string
10888 length is zero, return the dst parameter. */
10889 if (integer_zerop (len) || (p && *p == '\0'))
10890 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10891
10892 /* If the requested len is greater than or equal to the string
10893 length, call strcat. */
10894 if (TREE_CODE (len) == INTEGER_CST && p
10895 && compare_tree_int (len, strlen (p)) >= 0)
10896 {
10897 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10898
10899 /* If the replacement _DECL isn't initialized, don't do the
10900 transformation. */
10901 if (!fn)
10902 return NULL_TREE;
10903
10904 return build_call_expr (fn, 2, dst, src);
10905 }
10906 return NULL_TREE;
10907 }
10908 }
10909
10910 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10911 to the call.
10912
10913 Return NULL_TREE if no simplification was possible, otherwise return the
10914 simplified form of the call as a tree.
10915
10916 The simplified form may be a constant or other expression which
10917 computes the same value, but in a more efficient manner (including
10918 calls to other builtin functions).
10919
10920 The call may contain arguments which need to be evaluated, but
10921 which are not useful to determine the result of the call. In
10922 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10923 COMPOUND_EXPR will be an argument which must be evaluated.
10924 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10925 COMPOUND_EXPR in the chain will contain the tree for the simplified
10926 form of the builtin function call. */
10927
10928 static tree
10929 fold_builtin_strspn (tree s1, tree s2)
10930 {
10931 if (!validate_arg (s1, POINTER_TYPE)
10932 || !validate_arg (s2, POINTER_TYPE))
10933 return NULL_TREE;
10934 else
10935 {
10936 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10937
10938 /* If both arguments are constants, evaluate at compile-time. */
10939 if (p1 && p2)
10940 {
10941 const size_t r = strspn (p1, p2);
10942 return size_int (r);
10943 }
10944
10945 /* If either argument is "", return NULL_TREE. */
10946 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10947 /* Evaluate and ignore both arguments in case either one has
10948 side-effects. */
10949 return omit_two_operands (integer_type_node, integer_zero_node,
10950 s1, s2);
10951 return NULL_TREE;
10952 }
10953 }
10954
10955 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10956 to the call.
10957
10958 Return NULL_TREE if no simplification was possible, otherwise return the
10959 simplified form of the call as a tree.
10960
10961 The simplified form may be a constant or other expression which
10962 computes the same value, but in a more efficient manner (including
10963 calls to other builtin functions).
10964
10965 The call may contain arguments which need to be evaluated, but
10966 which are not useful to determine the result of the call. In
10967 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10968 COMPOUND_EXPR will be an argument which must be evaluated.
10969 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10970 COMPOUND_EXPR in the chain will contain the tree for the simplified
10971 form of the builtin function call. */
10972
10973 static tree
10974 fold_builtin_strcspn (tree s1, tree s2)
10975 {
10976 if (!validate_arg (s1, POINTER_TYPE)
10977 || !validate_arg (s2, POINTER_TYPE))
10978 return NULL_TREE;
10979 else
10980 {
10981 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10982
10983 /* If both arguments are constants, evaluate at compile-time. */
10984 if (p1 && p2)
10985 {
10986 const size_t r = strcspn (p1, p2);
10987 return size_int (r);
10988 }
10989
10990 /* If the first argument is "", return NULL_TREE. */
10991 if (p1 && *p1 == '\0')
10992 {
10993 /* Evaluate and ignore argument s2 in case it has
10994 side-effects. */
10995 return omit_one_operand (integer_type_node,
10996 integer_zero_node, s2);
10997 }
10998
10999 /* If the second argument is "", return __builtin_strlen(s1). */
11000 if (p2 && *p2 == '\0')
11001 {
11002 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11003
11004 /* If the replacement _DECL isn't initialized, don't do the
11005 transformation. */
11006 if (!fn)
11007 return NULL_TREE;
11008
11009 return build_call_expr (fn, 1, s1);
11010 }
11011 return NULL_TREE;
11012 }
11013 }
11014
11015 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11016 to the call. IGNORE is true if the value returned
11017 by the builtin will be ignored. UNLOCKED is true is true if this
11018 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11019 the known length of the string. Return NULL_TREE if no simplification
11020 was possible. */
11021
11022 tree
11023 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11024 {
11025 /* If we're using an unlocked function, assume the other unlocked
11026 functions exist explicitly. */
11027 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11028 : implicit_built_in_decls[BUILT_IN_FPUTC];
11029 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11030 : implicit_built_in_decls[BUILT_IN_FWRITE];
11031
11032 /* If the return value is used, don't do the transformation. */
11033 if (!ignore)
11034 return NULL_TREE;
11035
11036 /* Verify the arguments in the original call. */
11037 if (!validate_arg (arg0, POINTER_TYPE)
11038 || !validate_arg (arg1, POINTER_TYPE))
11039 return NULL_TREE;
11040
11041 if (! len)
11042 len = c_strlen (arg0, 0);
11043
11044 /* Get the length of the string passed to fputs. If the length
11045 can't be determined, punt. */
11046 if (!len
11047 || TREE_CODE (len) != INTEGER_CST)
11048 return NULL_TREE;
11049
11050 switch (compare_tree_int (len, 1))
11051 {
11052 case -1: /* length is 0, delete the call entirely . */
11053 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11054
11055 case 0: /* length is 1, call fputc. */
11056 {
11057 const char *p = c_getstr (arg0);
11058
11059 if (p != NULL)
11060 {
11061 if (fn_fputc)
11062 return build_call_expr (fn_fputc, 2,
11063 build_int_cst (NULL_TREE, p[0]), arg1);
11064 else
11065 return NULL_TREE;
11066 }
11067 }
11068 /* FALLTHROUGH */
11069 case 1: /* length is greater than 1, call fwrite. */
11070 {
11071 /* If optimizing for size keep fputs. */
11072 if (optimize_size)
11073 return NULL_TREE;
11074 /* New argument list transforming fputs(string, stream) to
11075 fwrite(string, 1, len, stream). */
11076 if (fn_fwrite)
11077 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11078 else
11079 return NULL_TREE;
11080 }
11081 default:
11082 gcc_unreachable ();
11083 }
11084 return NULL_TREE;
11085 }
11086
11087 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11088 produced. False otherwise. This is done so that we don't output the error
11089 or warning twice or three times. */
11090 bool
11091 fold_builtin_next_arg (tree exp, bool va_start_p)
11092 {
11093 tree fntype = TREE_TYPE (current_function_decl);
11094 int nargs = call_expr_nargs (exp);
11095 tree arg;
11096
11097 if (TYPE_ARG_TYPES (fntype) == 0
11098 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11099 == void_type_node))
11100 {
11101 error ("%<va_start%> used in function with fixed args");
11102 return true;
11103 }
11104
11105 if (va_start_p)
11106 {
11107 if (va_start_p && (nargs != 2))
11108 {
11109 error ("wrong number of arguments to function %<va_start%>");
11110 return true;
11111 }
11112 arg = CALL_EXPR_ARG (exp, 1);
11113 }
11114 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11115 when we checked the arguments and if needed issued a warning. */
11116 else
11117 {
11118 if (nargs == 0)
11119 {
11120 /* Evidently an out of date version of <stdarg.h>; can't validate
11121 va_start's second argument, but can still work as intended. */
11122 warning (0, "%<__builtin_next_arg%> called without an argument");
11123 return true;
11124 }
11125 else if (nargs > 1)
11126 {
11127 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11128 return true;
11129 }
11130 arg = CALL_EXPR_ARG (exp, 0);
11131 }
11132
11133 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11134 or __builtin_next_arg (0) the first time we see it, after checking
11135 the arguments and if needed issuing a warning. */
11136 if (!integer_zerop (arg))
11137 {
11138 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11139
11140 /* Strip off all nops for the sake of the comparison. This
11141 is not quite the same as STRIP_NOPS. It does more.
11142 We must also strip off INDIRECT_EXPR for C++ reference
11143 parameters. */
11144 while (TREE_CODE (arg) == NOP_EXPR
11145 || TREE_CODE (arg) == CONVERT_EXPR
11146 || TREE_CODE (arg) == NON_LVALUE_EXPR
11147 || TREE_CODE (arg) == INDIRECT_REF)
11148 arg = TREE_OPERAND (arg, 0);
11149 if (arg != last_parm)
11150 {
11151 /* FIXME: Sometimes with the tree optimizers we can get the
11152 not the last argument even though the user used the last
11153 argument. We just warn and set the arg to be the last
11154 argument so that we will get wrong-code because of
11155 it. */
11156 warning (0, "second parameter of %<va_start%> not last named argument");
11157 }
11158 /* We want to verify the second parameter just once before the tree
11159 optimizers are run and then avoid keeping it in the tree,
11160 as otherwise we could warn even for correct code like:
11161 void foo (int i, ...)
11162 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11163 if (va_start_p)
11164 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11165 else
11166 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11167 }
11168 return false;
11169 }
11170
11171
11172 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11173 ORIG may be null if this is a 2-argument call. We don't attempt to
11174 simplify calls with more than 3 arguments.
11175
11176 Return NULL_TREE if no simplification was possible, otherwise return the
11177 simplified form of the call as a tree. If IGNORED is true, it means that
11178 the caller does not use the returned value of the function. */
11179
11180 static tree
11181 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11182 {
11183 tree call, retval;
11184 const char *fmt_str = NULL;
11185
11186 /* Verify the required arguments in the original call. We deal with two
11187 types of sprintf() calls: 'sprintf (str, fmt)' and
11188 'sprintf (dest, "%s", orig)'. */
11189 if (!validate_arg (dest, POINTER_TYPE)
11190 || !validate_arg (fmt, POINTER_TYPE))
11191 return NULL_TREE;
11192 if (orig && !validate_arg (orig, POINTER_TYPE))
11193 return NULL_TREE;
11194
11195 /* Check whether the format is a literal string constant. */
11196 fmt_str = c_getstr (fmt);
11197 if (fmt_str == NULL)
11198 return NULL_TREE;
11199
11200 call = NULL_TREE;
11201 retval = NULL_TREE;
11202
11203 if (!init_target_chars ())
11204 return NULL_TREE;
11205
11206 /* If the format doesn't contain % args or %%, use strcpy. */
11207 if (strchr (fmt_str, target_percent) == NULL)
11208 {
11209 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11210
11211 if (!fn)
11212 return NULL_TREE;
11213
11214 /* Don't optimize sprintf (buf, "abc", ptr++). */
11215 if (orig)
11216 return NULL_TREE;
11217
11218 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11219 'format' is known to contain no % formats. */
11220 call = build_call_expr (fn, 2, dest, fmt);
11221 if (!ignored)
11222 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11223 }
11224
11225 /* If the format is "%s", use strcpy if the result isn't used. */
11226 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11227 {
11228 tree fn;
11229 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11230
11231 if (!fn)
11232 return NULL_TREE;
11233
11234 /* Don't crash on sprintf (str1, "%s"). */
11235 if (!orig)
11236 return NULL_TREE;
11237
11238 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11239 if (!ignored)
11240 {
11241 retval = c_strlen (orig, 1);
11242 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11243 return NULL_TREE;
11244 }
11245 call = build_call_expr (fn, 2, dest, orig);
11246 }
11247
11248 if (call && retval)
11249 {
11250 retval = fold_convert
11251 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11252 retval);
11253 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11254 }
11255 else
11256 return call;
11257 }
11258
11259 /* Expand a call EXP to __builtin_object_size. */
11260
11261 rtx
11262 expand_builtin_object_size (tree exp)
11263 {
11264 tree ost;
11265 int object_size_type;
11266 tree fndecl = get_callee_fndecl (exp);
11267 location_t locus = EXPR_LOCATION (exp);
11268
11269 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11270 {
11271 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11272 &locus, fndecl);
11273 expand_builtin_trap ();
11274 return const0_rtx;
11275 }
11276
11277 ost = CALL_EXPR_ARG (exp, 1);
11278 STRIP_NOPS (ost);
11279
11280 if (TREE_CODE (ost) != INTEGER_CST
11281 || tree_int_cst_sgn (ost) < 0
11282 || compare_tree_int (ost, 3) > 0)
11283 {
11284 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11285 &locus, fndecl);
11286 expand_builtin_trap ();
11287 return const0_rtx;
11288 }
11289
11290 object_size_type = tree_low_cst (ost, 0);
11291
11292 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11293 }
11294
11295 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11296 FCODE is the BUILT_IN_* to use.
11297 Return NULL_RTX if we failed; the caller should emit a normal call,
11298 otherwise try to get the result in TARGET, if convenient (and in
11299 mode MODE if that's convenient). */
11300
11301 static rtx
11302 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11303 enum built_in_function fcode)
11304 {
11305 tree dest, src, len, size;
11306
11307 if (!validate_arglist (exp,
11308 POINTER_TYPE,
11309 fcode == BUILT_IN_MEMSET_CHK
11310 ? INTEGER_TYPE : POINTER_TYPE,
11311 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11312 return NULL_RTX;
11313
11314 dest = CALL_EXPR_ARG (exp, 0);
11315 src = CALL_EXPR_ARG (exp, 1);
11316 len = CALL_EXPR_ARG (exp, 2);
11317 size = CALL_EXPR_ARG (exp, 3);
11318
11319 if (! host_integerp (size, 1))
11320 return NULL_RTX;
11321
11322 if (host_integerp (len, 1) || integer_all_onesp (size))
11323 {
11324 tree fn;
11325
11326 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11327 {
11328 location_t locus = EXPR_LOCATION (exp);
11329 warning (0, "%Hcall to %D will always overflow destination buffer",
11330 &locus, get_callee_fndecl (exp));
11331 return NULL_RTX;
11332 }
11333
11334 fn = NULL_TREE;
11335 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11336 mem{cpy,pcpy,move,set} is available. */
11337 switch (fcode)
11338 {
11339 case BUILT_IN_MEMCPY_CHK:
11340 fn = built_in_decls[BUILT_IN_MEMCPY];
11341 break;
11342 case BUILT_IN_MEMPCPY_CHK:
11343 fn = built_in_decls[BUILT_IN_MEMPCPY];
11344 break;
11345 case BUILT_IN_MEMMOVE_CHK:
11346 fn = built_in_decls[BUILT_IN_MEMMOVE];
11347 break;
11348 case BUILT_IN_MEMSET_CHK:
11349 fn = built_in_decls[BUILT_IN_MEMSET];
11350 break;
11351 default:
11352 break;
11353 }
11354
11355 if (! fn)
11356 return NULL_RTX;
11357
11358 fn = build_call_expr (fn, 3, dest, src, len);
11359 if (TREE_CODE (fn) == CALL_EXPR)
11360 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11361 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11362 }
11363 else if (fcode == BUILT_IN_MEMSET_CHK)
11364 return NULL_RTX;
11365 else
11366 {
11367 unsigned int dest_align
11368 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11369
11370 /* If DEST is not a pointer type, call the normal function. */
11371 if (dest_align == 0)
11372 return NULL_RTX;
11373
11374 /* If SRC and DEST are the same (and not volatile), do nothing. */
11375 if (operand_equal_p (src, dest, 0))
11376 {
11377 tree expr;
11378
11379 if (fcode != BUILT_IN_MEMPCPY_CHK)
11380 {
11381 /* Evaluate and ignore LEN in case it has side-effects. */
11382 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11383 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11384 }
11385
11386 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11387 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11388 }
11389
11390 /* __memmove_chk special case. */
11391 if (fcode == BUILT_IN_MEMMOVE_CHK)
11392 {
11393 unsigned int src_align
11394 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11395
11396 if (src_align == 0)
11397 return NULL_RTX;
11398
11399 /* If src is categorized for a readonly section we can use
11400 normal __memcpy_chk. */
11401 if (readonly_data_expr (src))
11402 {
11403 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11404 if (!fn)
11405 return NULL_RTX;
11406 fn = build_call_expr (fn, 4, dest, src, len, size);
11407 if (TREE_CODE (fn) == CALL_EXPR)
11408 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11409 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11410 }
11411 }
11412 return NULL_RTX;
11413 }
11414 }
11415
11416 /* Emit warning if a buffer overflow is detected at compile time. */
11417
11418 static void
11419 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11420 {
11421 int is_strlen = 0;
11422 tree len, size;
11423 location_t locus;
11424
11425 switch (fcode)
11426 {
11427 case BUILT_IN_STRCPY_CHK:
11428 case BUILT_IN_STPCPY_CHK:
11429 /* For __strcat_chk the warning will be emitted only if overflowing
11430 by at least strlen (dest) + 1 bytes. */
11431 case BUILT_IN_STRCAT_CHK:
11432 len = CALL_EXPR_ARG (exp, 1);
11433 size = CALL_EXPR_ARG (exp, 2);
11434 is_strlen = 1;
11435 break;
11436 case BUILT_IN_STRNCAT_CHK:
11437 case BUILT_IN_STRNCPY_CHK:
11438 len = CALL_EXPR_ARG (exp, 2);
11439 size = CALL_EXPR_ARG (exp, 3);
11440 break;
11441 case BUILT_IN_SNPRINTF_CHK:
11442 case BUILT_IN_VSNPRINTF_CHK:
11443 len = CALL_EXPR_ARG (exp, 1);
11444 size = CALL_EXPR_ARG (exp, 3);
11445 break;
11446 default:
11447 gcc_unreachable ();
11448 }
11449
11450 if (!len || !size)
11451 return;
11452
11453 if (! host_integerp (size, 1) || integer_all_onesp (size))
11454 return;
11455
11456 if (is_strlen)
11457 {
11458 len = c_strlen (len, 1);
11459 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11460 return;
11461 }
11462 else if (fcode == BUILT_IN_STRNCAT_CHK)
11463 {
11464 tree src = CALL_EXPR_ARG (exp, 1);
11465 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11466 return;
11467 src = c_strlen (src, 1);
11468 if (! src || ! host_integerp (src, 1))
11469 {
11470 locus = EXPR_LOCATION (exp);
11471 warning (0, "%Hcall to %D might overflow destination buffer",
11472 &locus, get_callee_fndecl (exp));
11473 return;
11474 }
11475 else if (tree_int_cst_lt (src, size))
11476 return;
11477 }
11478 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11479 return;
11480
11481 locus = EXPR_LOCATION (exp);
11482 warning (0, "%Hcall to %D will always overflow destination buffer",
11483 &locus, get_callee_fndecl (exp));
11484 }
11485
11486 /* Emit warning if a buffer overflow is detected at compile time
11487 in __sprintf_chk/__vsprintf_chk calls. */
11488
11489 static void
11490 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11491 {
11492 tree dest, size, len, fmt, flag;
11493 const char *fmt_str;
11494 int nargs = call_expr_nargs (exp);
11495
11496 /* Verify the required arguments in the original call. */
11497
11498 if (nargs < 4)
11499 return;
11500 dest = CALL_EXPR_ARG (exp, 0);
11501 flag = CALL_EXPR_ARG (exp, 1);
11502 size = CALL_EXPR_ARG (exp, 2);
11503 fmt = CALL_EXPR_ARG (exp, 3);
11504
11505 if (! host_integerp (size, 1) || integer_all_onesp (size))
11506 return;
11507
11508 /* Check whether the format is a literal string constant. */
11509 fmt_str = c_getstr (fmt);
11510 if (fmt_str == NULL)
11511 return;
11512
11513 if (!init_target_chars ())
11514 return;
11515
11516 /* If the format doesn't contain % args or %%, we know its size. */
11517 if (strchr (fmt_str, target_percent) == 0)
11518 len = build_int_cstu (size_type_node, strlen (fmt_str));
11519 /* If the format is "%s" and first ... argument is a string literal,
11520 we know it too. */
11521 else if (fcode == BUILT_IN_SPRINTF_CHK
11522 && strcmp (fmt_str, target_percent_s) == 0)
11523 {
11524 tree arg;
11525
11526 if (nargs < 5)
11527 return;
11528 arg = CALL_EXPR_ARG (exp, 4);
11529 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11530 return;
11531
11532 len = c_strlen (arg, 1);
11533 if (!len || ! host_integerp (len, 1))
11534 return;
11535 }
11536 else
11537 return;
11538
11539 if (! tree_int_cst_lt (len, size))
11540 {
11541 location_t locus = EXPR_LOCATION (exp);
11542 warning (0, "%Hcall to %D will always overflow destination buffer",
11543 &locus, get_callee_fndecl (exp));
11544 }
11545 }
11546
11547 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11548 if possible. */
11549
11550 tree
11551 fold_builtin_object_size (tree ptr, tree ost)
11552 {
11553 tree ret = NULL_TREE;
11554 int object_size_type;
11555
11556 if (!validate_arg (ptr, POINTER_TYPE)
11557 || !validate_arg (ost, INTEGER_TYPE))
11558 return NULL_TREE;
11559
11560 STRIP_NOPS (ost);
11561
11562 if (TREE_CODE (ost) != INTEGER_CST
11563 || tree_int_cst_sgn (ost) < 0
11564 || compare_tree_int (ost, 3) > 0)
11565 return NULL_TREE;
11566
11567 object_size_type = tree_low_cst (ost, 0);
11568
11569 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11570 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11571 and (size_t) 0 for types 2 and 3. */
11572 if (TREE_SIDE_EFFECTS (ptr))
11573 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11574
11575 if (TREE_CODE (ptr) == ADDR_EXPR)
11576 ret = build_int_cstu (size_type_node,
11577 compute_builtin_object_size (ptr, object_size_type));
11578
11579 else if (TREE_CODE (ptr) == SSA_NAME)
11580 {
11581 unsigned HOST_WIDE_INT bytes;
11582
11583 /* If object size is not known yet, delay folding until
11584 later. Maybe subsequent passes will help determining
11585 it. */
11586 bytes = compute_builtin_object_size (ptr, object_size_type);
11587 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11588 ? -1 : 0))
11589 ret = build_int_cstu (size_type_node, bytes);
11590 }
11591
11592 if (ret)
11593 {
11594 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11595 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11596 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11597 ret = NULL_TREE;
11598 }
11599
11600 return ret;
11601 }
11602
11603 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11604 DEST, SRC, LEN, and SIZE are the arguments to the call.
11605 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11606 code of the builtin. If MAXLEN is not NULL, it is maximum length
11607 passed as third argument. */
11608
11609 tree
11610 fold_builtin_memory_chk (tree fndecl,
11611 tree dest, tree src, tree len, tree size,
11612 tree maxlen, bool ignore,
11613 enum built_in_function fcode)
11614 {
11615 tree fn;
11616
11617 if (!validate_arg (dest, POINTER_TYPE)
11618 || !validate_arg (src,
11619 (fcode == BUILT_IN_MEMSET_CHK
11620 ? INTEGER_TYPE : POINTER_TYPE))
11621 || !validate_arg (len, INTEGER_TYPE)
11622 || !validate_arg (size, INTEGER_TYPE))
11623 return NULL_TREE;
11624
11625 /* If SRC and DEST are the same (and not volatile), return DEST
11626 (resp. DEST+LEN for __mempcpy_chk). */
11627 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11628 {
11629 if (fcode != BUILT_IN_MEMPCPY_CHK)
11630 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11631 else
11632 {
11633 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11634 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11635 }
11636 }
11637
11638 if (! host_integerp (size, 1))
11639 return NULL_TREE;
11640
11641 if (! integer_all_onesp (size))
11642 {
11643 if (! host_integerp (len, 1))
11644 {
11645 /* If LEN is not constant, try MAXLEN too.
11646 For MAXLEN only allow optimizing into non-_ocs function
11647 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11648 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11649 {
11650 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11651 {
11652 /* (void) __mempcpy_chk () can be optimized into
11653 (void) __memcpy_chk (). */
11654 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11655 if (!fn)
11656 return NULL_TREE;
11657
11658 return build_call_expr (fn, 4, dest, src, len, size);
11659 }
11660 return NULL_TREE;
11661 }
11662 }
11663 else
11664 maxlen = len;
11665
11666 if (tree_int_cst_lt (size, maxlen))
11667 return NULL_TREE;
11668 }
11669
11670 fn = NULL_TREE;
11671 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11672 mem{cpy,pcpy,move,set} is available. */
11673 switch (fcode)
11674 {
11675 case BUILT_IN_MEMCPY_CHK:
11676 fn = built_in_decls[BUILT_IN_MEMCPY];
11677 break;
11678 case BUILT_IN_MEMPCPY_CHK:
11679 fn = built_in_decls[BUILT_IN_MEMPCPY];
11680 break;
11681 case BUILT_IN_MEMMOVE_CHK:
11682 fn = built_in_decls[BUILT_IN_MEMMOVE];
11683 break;
11684 case BUILT_IN_MEMSET_CHK:
11685 fn = built_in_decls[BUILT_IN_MEMSET];
11686 break;
11687 default:
11688 break;
11689 }
11690
11691 if (!fn)
11692 return NULL_TREE;
11693
11694 return build_call_expr (fn, 3, dest, src, len);
11695 }
11696
11697 /* Fold a call to the __st[rp]cpy_chk builtin.
11698 DEST, SRC, and SIZE are the arguments to the call.
11699 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11700 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11701 strings passed as second argument. */
11702
11703 tree
11704 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11705 tree maxlen, bool ignore,
11706 enum built_in_function fcode)
11707 {
11708 tree len, fn;
11709
11710 if (!validate_arg (dest, POINTER_TYPE)
11711 || !validate_arg (src, POINTER_TYPE)
11712 || !validate_arg (size, INTEGER_TYPE))
11713 return NULL_TREE;
11714
11715 /* If SRC and DEST are the same (and not volatile), return DEST. */
11716 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11717 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11718
11719 if (! host_integerp (size, 1))
11720 return NULL_TREE;
11721
11722 if (! integer_all_onesp (size))
11723 {
11724 len = c_strlen (src, 1);
11725 if (! len || ! host_integerp (len, 1))
11726 {
11727 /* If LEN is not constant, try MAXLEN too.
11728 For MAXLEN only allow optimizing into non-_ocs function
11729 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11730 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11731 {
11732 if (fcode == BUILT_IN_STPCPY_CHK)
11733 {
11734 if (! ignore)
11735 return NULL_TREE;
11736
11737 /* If return value of __stpcpy_chk is ignored,
11738 optimize into __strcpy_chk. */
11739 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11740 if (!fn)
11741 return NULL_TREE;
11742
11743 return build_call_expr (fn, 3, dest, src, size);
11744 }
11745
11746 if (! len || TREE_SIDE_EFFECTS (len))
11747 return NULL_TREE;
11748
11749 /* If c_strlen returned something, but not a constant,
11750 transform __strcpy_chk into __memcpy_chk. */
11751 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11752 if (!fn)
11753 return NULL_TREE;
11754
11755 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11756 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11757 build_call_expr (fn, 4,
11758 dest, src, len, size));
11759 }
11760 }
11761 else
11762 maxlen = len;
11763
11764 if (! tree_int_cst_lt (maxlen, size))
11765 return NULL_TREE;
11766 }
11767
11768 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11769 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11770 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11771 if (!fn)
11772 return NULL_TREE;
11773
11774 return build_call_expr (fn, 2, dest, src);
11775 }
11776
11777 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11778 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11779 length passed as third argument. */
11780
11781 tree
11782 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11783 tree maxlen)
11784 {
11785 tree fn;
11786
11787 if (!validate_arg (dest, POINTER_TYPE)
11788 || !validate_arg (src, POINTER_TYPE)
11789 || !validate_arg (len, INTEGER_TYPE)
11790 || !validate_arg (size, INTEGER_TYPE))
11791 return NULL_TREE;
11792
11793 if (! host_integerp (size, 1))
11794 return NULL_TREE;
11795
11796 if (! integer_all_onesp (size))
11797 {
11798 if (! host_integerp (len, 1))
11799 {
11800 /* If LEN is not constant, try MAXLEN too.
11801 For MAXLEN only allow optimizing into non-_ocs function
11802 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11803 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11804 return NULL_TREE;
11805 }
11806 else
11807 maxlen = len;
11808
11809 if (tree_int_cst_lt (size, maxlen))
11810 return NULL_TREE;
11811 }
11812
11813 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11814 fn = built_in_decls[BUILT_IN_STRNCPY];
11815 if (!fn)
11816 return NULL_TREE;
11817
11818 return build_call_expr (fn, 3, dest, src, len);
11819 }
11820
11821 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11822 are the arguments to the call. */
11823
11824 static tree
11825 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11826 {
11827 tree fn;
11828 const char *p;
11829
11830 if (!validate_arg (dest, POINTER_TYPE)
11831 || !validate_arg (src, POINTER_TYPE)
11832 || !validate_arg (size, INTEGER_TYPE))
11833 return NULL_TREE;
11834
11835 p = c_getstr (src);
11836 /* If the SRC parameter is "", return DEST. */
11837 if (p && *p == '\0')
11838 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11839
11840 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11841 return NULL_TREE;
11842
11843 /* If __builtin_strcat_chk is used, assume strcat is available. */
11844 fn = built_in_decls[BUILT_IN_STRCAT];
11845 if (!fn)
11846 return NULL_TREE;
11847
11848 return build_call_expr (fn, 2, dest, src);
11849 }
11850
11851 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11852 LEN, and SIZE. */
11853
11854 static tree
11855 fold_builtin_strncat_chk (tree fndecl,
11856 tree dest, tree src, tree len, tree size)
11857 {
11858 tree fn;
11859 const char *p;
11860
11861 if (!validate_arg (dest, POINTER_TYPE)
11862 || !validate_arg (src, POINTER_TYPE)
11863 || !validate_arg (size, INTEGER_TYPE)
11864 || !validate_arg (size, INTEGER_TYPE))
11865 return NULL_TREE;
11866
11867 p = c_getstr (src);
11868 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11869 if (p && *p == '\0')
11870 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11871 else if (integer_zerop (len))
11872 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11873
11874 if (! host_integerp (size, 1))
11875 return NULL_TREE;
11876
11877 if (! integer_all_onesp (size))
11878 {
11879 tree src_len = c_strlen (src, 1);
11880 if (src_len
11881 && host_integerp (src_len, 1)
11882 && host_integerp (len, 1)
11883 && ! tree_int_cst_lt (len, src_len))
11884 {
11885 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11886 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11887 if (!fn)
11888 return NULL_TREE;
11889
11890 return build_call_expr (fn, 3, dest, src, size);
11891 }
11892 return NULL_TREE;
11893 }
11894
11895 /* If __builtin_strncat_chk is used, assume strncat is available. */
11896 fn = built_in_decls[BUILT_IN_STRNCAT];
11897 if (!fn)
11898 return NULL_TREE;
11899
11900 return build_call_expr (fn, 3, dest, src, len);
11901 }
11902
11903 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11904 a normal call should be emitted rather than expanding the function
11905 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11906
11907 static tree
11908 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11909 {
11910 tree dest, size, len, fn, fmt, flag;
11911 const char *fmt_str;
11912 int nargs = call_expr_nargs (exp);
11913
11914 /* Verify the required arguments in the original call. */
11915 if (nargs < 4)
11916 return NULL_TREE;
11917 dest = CALL_EXPR_ARG (exp, 0);
11918 if (!validate_arg (dest, POINTER_TYPE))
11919 return NULL_TREE;
11920 flag = CALL_EXPR_ARG (exp, 1);
11921 if (!validate_arg (flag, INTEGER_TYPE))
11922 return NULL_TREE;
11923 size = CALL_EXPR_ARG (exp, 2);
11924 if (!validate_arg (size, INTEGER_TYPE))
11925 return NULL_TREE;
11926 fmt = CALL_EXPR_ARG (exp, 3);
11927 if (!validate_arg (fmt, POINTER_TYPE))
11928 return NULL_TREE;
11929
11930 if (! host_integerp (size, 1))
11931 return NULL_TREE;
11932
11933 len = NULL_TREE;
11934
11935 if (!init_target_chars ())
11936 return NULL_TREE;
11937
11938 /* Check whether the format is a literal string constant. */
11939 fmt_str = c_getstr (fmt);
11940 if (fmt_str != NULL)
11941 {
11942 /* If the format doesn't contain % args or %%, we know the size. */
11943 if (strchr (fmt_str, target_percent) == 0)
11944 {
11945 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11946 len = build_int_cstu (size_type_node, strlen (fmt_str));
11947 }
11948 /* If the format is "%s" and first ... argument is a string literal,
11949 we know the size too. */
11950 else if (fcode == BUILT_IN_SPRINTF_CHK
11951 && strcmp (fmt_str, target_percent_s) == 0)
11952 {
11953 tree arg;
11954
11955 if (nargs == 5)
11956 {
11957 arg = CALL_EXPR_ARG (exp, 4);
11958 if (validate_arg (arg, POINTER_TYPE))
11959 {
11960 len = c_strlen (arg, 1);
11961 if (! len || ! host_integerp (len, 1))
11962 len = NULL_TREE;
11963 }
11964 }
11965 }
11966 }
11967
11968 if (! integer_all_onesp (size))
11969 {
11970 if (! len || ! tree_int_cst_lt (len, size))
11971 return NULL_TREE;
11972 }
11973
11974 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11975 or if format doesn't contain % chars or is "%s". */
11976 if (! integer_zerop (flag))
11977 {
11978 if (fmt_str == NULL)
11979 return NULL_TREE;
11980 if (strchr (fmt_str, target_percent) != NULL
11981 && strcmp (fmt_str, target_percent_s))
11982 return NULL_TREE;
11983 }
11984
11985 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11986 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11987 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11988 if (!fn)
11989 return NULL_TREE;
11990
11991 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11992 }
11993
11994 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11995 a normal call should be emitted rather than expanding the function
11996 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11997 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11998 passed as second argument. */
11999
12000 tree
12001 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12002 enum built_in_function fcode)
12003 {
12004 tree dest, size, len, fn, fmt, flag;
12005 const char *fmt_str;
12006
12007 /* Verify the required arguments in the original call. */
12008 if (call_expr_nargs (exp) < 5)
12009 return NULL_TREE;
12010 dest = CALL_EXPR_ARG (exp, 0);
12011 if (!validate_arg (dest, POINTER_TYPE))
12012 return NULL_TREE;
12013 len = CALL_EXPR_ARG (exp, 1);
12014 if (!validate_arg (len, INTEGER_TYPE))
12015 return NULL_TREE;
12016 flag = CALL_EXPR_ARG (exp, 2);
12017 if (!validate_arg (flag, INTEGER_TYPE))
12018 return NULL_TREE;
12019 size = CALL_EXPR_ARG (exp, 3);
12020 if (!validate_arg (size, INTEGER_TYPE))
12021 return NULL_TREE;
12022 fmt = CALL_EXPR_ARG (exp, 4);
12023 if (!validate_arg (fmt, POINTER_TYPE))
12024 return NULL_TREE;
12025
12026 if (! host_integerp (size, 1))
12027 return NULL_TREE;
12028
12029 if (! integer_all_onesp (size))
12030 {
12031 if (! host_integerp (len, 1))
12032 {
12033 /* If LEN is not constant, try MAXLEN too.
12034 For MAXLEN only allow optimizing into non-_ocs function
12035 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12036 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12037 return NULL_TREE;
12038 }
12039 else
12040 maxlen = len;
12041
12042 if (tree_int_cst_lt (size, maxlen))
12043 return NULL_TREE;
12044 }
12045
12046 if (!init_target_chars ())
12047 return NULL_TREE;
12048
12049 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12050 or if format doesn't contain % chars or is "%s". */
12051 if (! integer_zerop (flag))
12052 {
12053 fmt_str = c_getstr (fmt);
12054 if (fmt_str == NULL)
12055 return NULL_TREE;
12056 if (strchr (fmt_str, target_percent) != NULL
12057 && strcmp (fmt_str, target_percent_s))
12058 return NULL_TREE;
12059 }
12060
12061 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12062 available. */
12063 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12064 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12065 if (!fn)
12066 return NULL_TREE;
12067
12068 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12069 }
12070
12071 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12072 FMT and ARG are the arguments to the call; we don't fold cases with
12073 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12074
12075 Return NULL_TREE if no simplification was possible, otherwise return the
12076 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12077 code of the function to be simplified. */
12078
12079 static tree
12080 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12081 enum built_in_function fcode)
12082 {
12083 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12084 const char *fmt_str = NULL;
12085
12086 /* If the return value is used, don't do the transformation. */
12087 if (! ignore)
12088 return NULL_TREE;
12089
12090 /* Verify the required arguments in the original call. */
12091 if (!validate_arg (fmt, POINTER_TYPE))
12092 return NULL_TREE;
12093
12094 /* Check whether the format is a literal string constant. */
12095 fmt_str = c_getstr (fmt);
12096 if (fmt_str == NULL)
12097 return NULL_TREE;
12098
12099 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12100 {
12101 /* If we're using an unlocked function, assume the other
12102 unlocked functions exist explicitly. */
12103 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12104 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12105 }
12106 else
12107 {
12108 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12109 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12110 }
12111
12112 if (!init_target_chars ())
12113 return NULL_TREE;
12114
12115 if (strcmp (fmt_str, target_percent_s) == 0
12116 || strchr (fmt_str, target_percent) == NULL)
12117 {
12118 const char *str;
12119
12120 if (strcmp (fmt_str, target_percent_s) == 0)
12121 {
12122 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12123 return NULL_TREE;
12124
12125 if (!arg || !validate_arg (arg, POINTER_TYPE))
12126 return NULL_TREE;
12127
12128 str = c_getstr (arg);
12129 if (str == NULL)
12130 return NULL_TREE;
12131 }
12132 else
12133 {
12134 /* The format specifier doesn't contain any '%' characters. */
12135 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12136 && arg)
12137 return NULL_TREE;
12138 str = fmt_str;
12139 }
12140
12141 /* If the string was "", printf does nothing. */
12142 if (str[0] == '\0')
12143 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12144
12145 /* If the string has length of 1, call putchar. */
12146 if (str[1] == '\0')
12147 {
12148 /* Given printf("c"), (where c is any one character,)
12149 convert "c"[0] to an int and pass that to the replacement
12150 function. */
12151 newarg = build_int_cst (NULL_TREE, str[0]);
12152 if (fn_putchar)
12153 call = build_call_expr (fn_putchar, 1, newarg);
12154 }
12155 else
12156 {
12157 /* If the string was "string\n", call puts("string"). */
12158 size_t len = strlen (str);
12159 if ((unsigned char)str[len - 1] == target_newline)
12160 {
12161 /* Create a NUL-terminated string that's one char shorter
12162 than the original, stripping off the trailing '\n'. */
12163 char *newstr = alloca (len);
12164 memcpy (newstr, str, len - 1);
12165 newstr[len - 1] = 0;
12166
12167 newarg = build_string_literal (len, newstr);
12168 if (fn_puts)
12169 call = build_call_expr (fn_puts, 1, newarg);
12170 }
12171 else
12172 /* We'd like to arrange to call fputs(string,stdout) here,
12173 but we need stdout and don't have a way to get it yet. */
12174 return NULL_TREE;
12175 }
12176 }
12177
12178 /* The other optimizations can be done only on the non-va_list variants. */
12179 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12180 return NULL_TREE;
12181
12182 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12183 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12184 {
12185 if (!arg || !validate_arg (arg, POINTER_TYPE))
12186 return NULL_TREE;
12187 if (fn_puts)
12188 call = build_call_expr (fn_puts, 1, arg);
12189 }
12190
12191 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12192 else if (strcmp (fmt_str, target_percent_c) == 0)
12193 {
12194 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12195 return NULL_TREE;
12196 if (fn_putchar)
12197 call = build_call_expr (fn_putchar, 1, arg);
12198 }
12199
12200 if (!call)
12201 return NULL_TREE;
12202
12203 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12204 }
12205
12206 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12207 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12208 more than 3 arguments, and ARG may be null in the 2-argument case.
12209
12210 Return NULL_TREE if no simplification was possible, otherwise return the
12211 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12212 code of the function to be simplified. */
12213
12214 static tree
12215 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12216 enum built_in_function fcode)
12217 {
12218 tree fn_fputc, fn_fputs, call = NULL_TREE;
12219 const char *fmt_str = NULL;
12220
12221 /* If the return value is used, don't do the transformation. */
12222 if (! ignore)
12223 return NULL_TREE;
12224
12225 /* Verify the required arguments in the original call. */
12226 if (!validate_arg (fp, POINTER_TYPE))
12227 return NULL_TREE;
12228 if (!validate_arg (fmt, POINTER_TYPE))
12229 return NULL_TREE;
12230
12231 /* Check whether the format is a literal string constant. */
12232 fmt_str = c_getstr (fmt);
12233 if (fmt_str == NULL)
12234 return NULL_TREE;
12235
12236 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12237 {
12238 /* If we're using an unlocked function, assume the other
12239 unlocked functions exist explicitly. */
12240 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12241 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12242 }
12243 else
12244 {
12245 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12246 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12247 }
12248
12249 if (!init_target_chars ())
12250 return NULL_TREE;
12251
12252 /* If the format doesn't contain % args or %%, use strcpy. */
12253 if (strchr (fmt_str, target_percent) == NULL)
12254 {
12255 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12256 && arg)
12257 return NULL_TREE;
12258
12259 /* If the format specifier was "", fprintf does nothing. */
12260 if (fmt_str[0] == '\0')
12261 {
12262 /* If FP has side-effects, just wait until gimplification is
12263 done. */
12264 if (TREE_SIDE_EFFECTS (fp))
12265 return NULL_TREE;
12266
12267 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12268 }
12269
12270 /* When "string" doesn't contain %, replace all cases of
12271 fprintf (fp, string) with fputs (string, fp). The fputs
12272 builtin will take care of special cases like length == 1. */
12273 if (fn_fputs)
12274 call = build_call_expr (fn_fputs, 2, fmt, fp);
12275 }
12276
12277 /* The other optimizations can be done only on the non-va_list variants. */
12278 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12279 return NULL_TREE;
12280
12281 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12282 else if (strcmp (fmt_str, target_percent_s) == 0)
12283 {
12284 if (!arg || !validate_arg (arg, POINTER_TYPE))
12285 return NULL_TREE;
12286 if (fn_fputs)
12287 call = build_call_expr (fn_fputs, 2, arg, fp);
12288 }
12289
12290 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12291 else if (strcmp (fmt_str, target_percent_c) == 0)
12292 {
12293 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12294 return NULL_TREE;
12295 if (fn_fputc)
12296 call = build_call_expr (fn_fputc, 2, arg, fp);
12297 }
12298
12299 if (!call)
12300 return NULL_TREE;
12301 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12302 }
12303
12304 /* Initialize format string characters in the target charset. */
12305
12306 static bool
12307 init_target_chars (void)
12308 {
12309 static bool init;
12310 if (!init)
12311 {
12312 target_newline = lang_hooks.to_target_charset ('\n');
12313 target_percent = lang_hooks.to_target_charset ('%');
12314 target_c = lang_hooks.to_target_charset ('c');
12315 target_s = lang_hooks.to_target_charset ('s');
12316 if (target_newline == 0 || target_percent == 0 || target_c == 0
12317 || target_s == 0)
12318 return false;
12319
12320 target_percent_c[0] = target_percent;
12321 target_percent_c[1] = target_c;
12322 target_percent_c[2] = '\0';
12323
12324 target_percent_s[0] = target_percent;
12325 target_percent_s[1] = target_s;
12326 target_percent_s[2] = '\0';
12327
12328 target_percent_s_newline[0] = target_percent;
12329 target_percent_s_newline[1] = target_s;
12330 target_percent_s_newline[2] = target_newline;
12331 target_percent_s_newline[3] = '\0';
12332
12333 init = true;
12334 }
12335 return true;
12336 }
12337
12338 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12339 and no overflow/underflow occurred. INEXACT is true if M was not
12340 exactly calculated. TYPE is the tree type for the result. This
12341 function assumes that you cleared the MPFR flags and then
12342 calculated M to see if anything subsequently set a flag prior to
12343 entering this function. Return NULL_TREE if any checks fail. */
12344
12345 static tree
12346 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12347 {
12348 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12349 overflow/underflow occurred. If -frounding-math, proceed iff the
12350 result of calling FUNC was exact. */
12351 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12352 && (!flag_rounding_math || !inexact))
12353 {
12354 REAL_VALUE_TYPE rr;
12355
12356 real_from_mpfr (&rr, m, type, GMP_RNDN);
12357 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12358 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12359 but the mpft_t is not, then we underflowed in the
12360 conversion. */
12361 if (real_isfinite (&rr)
12362 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12363 {
12364 REAL_VALUE_TYPE rmode;
12365
12366 real_convert (&rmode, TYPE_MODE (type), &rr);
12367 /* Proceed iff the specified mode can hold the value. */
12368 if (real_identical (&rmode, &rr))
12369 return build_real (type, rmode);
12370 }
12371 }
12372 return NULL_TREE;
12373 }
12374
12375 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12376 FUNC on it and return the resulting value as a tree with type TYPE.
12377 If MIN and/or MAX are not NULL, then the supplied ARG must be
12378 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12379 acceptable values, otherwise they are not. The mpfr precision is
12380 set to the precision of TYPE. We assume that function FUNC returns
12381 zero if the result could be calculated exactly within the requested
12382 precision. */
12383
12384 static tree
12385 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12386 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12387 bool inclusive)
12388 {
12389 tree result = NULL_TREE;
12390
12391 STRIP_NOPS (arg);
12392
12393 /* To proceed, MPFR must exactly represent the target floating point
12394 format, which only happens when the target base equals two. */
12395 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12396 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12397 {
12398 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12399
12400 if (real_isfinite (ra)
12401 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12402 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12403 {
12404 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12405 int inexact;
12406 mpfr_t m;
12407
12408 mpfr_init2 (m, prec);
12409 mpfr_from_real (m, ra, GMP_RNDN);
12410 mpfr_clear_flags ();
12411 inexact = func (m, m, GMP_RNDN);
12412 result = do_mpfr_ckconv (m, type, inexact);
12413 mpfr_clear (m);
12414 }
12415 }
12416
12417 return result;
12418 }
12419
12420 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12421 FUNC on it and return the resulting value as a tree with type TYPE.
12422 The mpfr precision is set to the precision of TYPE. We assume that
12423 function FUNC returns zero if the result could be calculated
12424 exactly within the requested precision. */
12425
12426 static tree
12427 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12428 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12429 {
12430 tree result = NULL_TREE;
12431
12432 STRIP_NOPS (arg1);
12433 STRIP_NOPS (arg2);
12434
12435 /* To proceed, MPFR must exactly represent the target floating point
12436 format, which only happens when the target base equals two. */
12437 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12438 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12439 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12440 {
12441 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12442 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12443
12444 if (real_isfinite (ra1) && real_isfinite (ra2))
12445 {
12446 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12447 int inexact;
12448 mpfr_t m1, m2;
12449
12450 mpfr_inits2 (prec, m1, m2, NULL);
12451 mpfr_from_real (m1, ra1, GMP_RNDN);
12452 mpfr_from_real (m2, ra2, GMP_RNDN);
12453 mpfr_clear_flags ();
12454 inexact = func (m1, m1, m2, GMP_RNDN);
12455 result = do_mpfr_ckconv (m1, type, inexact);
12456 mpfr_clears (m1, m2, NULL);
12457 }
12458 }
12459
12460 return result;
12461 }
12462
12463 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12464 FUNC on it and return the resulting value as a tree with type TYPE.
12465 The mpfr precision is set to the precision of TYPE. We assume that
12466 function FUNC returns zero if the result could be calculated
12467 exactly within the requested precision. */
12468
12469 static tree
12470 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12471 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12472 {
12473 tree result = NULL_TREE;
12474
12475 STRIP_NOPS (arg1);
12476 STRIP_NOPS (arg2);
12477 STRIP_NOPS (arg3);
12478
12479 /* To proceed, MPFR must exactly represent the target floating point
12480 format, which only happens when the target base equals two. */
12481 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12482 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12483 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12484 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12485 {
12486 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12487 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12488 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12489
12490 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12491 {
12492 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12493 int inexact;
12494 mpfr_t m1, m2, m3;
12495
12496 mpfr_inits2 (prec, m1, m2, m3, NULL);
12497 mpfr_from_real (m1, ra1, GMP_RNDN);
12498 mpfr_from_real (m2, ra2, GMP_RNDN);
12499 mpfr_from_real (m3, ra3, GMP_RNDN);
12500 mpfr_clear_flags ();
12501 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12502 result = do_mpfr_ckconv (m1, type, inexact);
12503 mpfr_clears (m1, m2, m3, NULL);
12504 }
12505 }
12506
12507 return result;
12508 }
12509
12510 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12511 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12512 If ARG_SINP and ARG_COSP are NULL then the result is returned
12513 as a complex value.
12514 The type is taken from the type of ARG and is used for setting the
12515 precision of the calculation and results. */
12516
12517 static tree
12518 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12519 {
12520 tree const type = TREE_TYPE (arg);
12521 tree result = NULL_TREE;
12522
12523 STRIP_NOPS (arg);
12524
12525 /* To proceed, MPFR must exactly represent the target floating point
12526 format, which only happens when the target base equals two. */
12527 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12528 && TREE_CODE (arg) == REAL_CST
12529 && !TREE_OVERFLOW (arg))
12530 {
12531 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12532
12533 if (real_isfinite (ra))
12534 {
12535 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12536 tree result_s, result_c;
12537 int inexact;
12538 mpfr_t m, ms, mc;
12539
12540 mpfr_inits2 (prec, m, ms, mc, NULL);
12541 mpfr_from_real (m, ra, GMP_RNDN);
12542 mpfr_clear_flags ();
12543 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12544 result_s = do_mpfr_ckconv (ms, type, inexact);
12545 result_c = do_mpfr_ckconv (mc, type, inexact);
12546 mpfr_clears (m, ms, mc, NULL);
12547 if (result_s && result_c)
12548 {
12549 /* If we are to return in a complex value do so. */
12550 if (!arg_sinp && !arg_cosp)
12551 return build_complex (build_complex_type (type),
12552 result_c, result_s);
12553
12554 /* Dereference the sin/cos pointer arguments. */
12555 arg_sinp = build_fold_indirect_ref (arg_sinp);
12556 arg_cosp = build_fold_indirect_ref (arg_cosp);
12557 /* Proceed if valid pointer type were passed in. */
12558 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12559 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12560 {
12561 /* Set the values. */
12562 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12563 result_s);
12564 TREE_SIDE_EFFECTS (result_s) = 1;
12565 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12566 result_c);
12567 TREE_SIDE_EFFECTS (result_c) = 1;
12568 /* Combine the assignments into a compound expr. */
12569 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12570 result_s, result_c));
12571 }
12572 }
12573 }
12574 }
12575 return result;
12576 }
12577
12578 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12579 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12580 two-argument mpfr order N Bessel function FUNC on them and return
12581 the resulting value as a tree with type TYPE. The mpfr precision
12582 is set to the precision of TYPE. We assume that function FUNC
12583 returns zero if the result could be calculated exactly within the
12584 requested precision. */
12585 static tree
12586 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12587 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12588 const REAL_VALUE_TYPE *min, bool inclusive)
12589 {
12590 tree result = NULL_TREE;
12591
12592 STRIP_NOPS (arg1);
12593 STRIP_NOPS (arg2);
12594
12595 /* To proceed, MPFR must exactly represent the target floating point
12596 format, which only happens when the target base equals two. */
12597 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12598 && host_integerp (arg1, 0)
12599 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12600 {
12601 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12602 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12603
12604 if (n == (long)n
12605 && real_isfinite (ra)
12606 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12607 {
12608 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12609 int inexact;
12610 mpfr_t m;
12611
12612 mpfr_init2 (m, prec);
12613 mpfr_from_real (m, ra, GMP_RNDN);
12614 mpfr_clear_flags ();
12615 inexact = func (m, n, m, GMP_RNDN);
12616 result = do_mpfr_ckconv (m, type, inexact);
12617 mpfr_clear (m);
12618 }
12619 }
12620
12621 return result;
12622 }
12623
12624 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12625 the pointer *(ARG_QUO) and return the result. The type is taken
12626 from the type of ARG0 and is used for setting the precision of the
12627 calculation and results. */
12628
12629 static tree
12630 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12631 {
12632 tree const type = TREE_TYPE (arg0);
12633 tree result = NULL_TREE;
12634
12635 STRIP_NOPS (arg0);
12636 STRIP_NOPS (arg1);
12637
12638 /* To proceed, MPFR must exactly represent the target floating point
12639 format, which only happens when the target base equals two. */
12640 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12641 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12642 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12643 {
12644 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12645 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12646
12647 if (real_isfinite (ra0) && real_isfinite (ra1))
12648 {
12649 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12650 tree result_rem;
12651 long integer_quo;
12652 mpfr_t m0, m1;
12653
12654 mpfr_inits2 (prec, m0, m1, NULL);
12655 mpfr_from_real (m0, ra0, GMP_RNDN);
12656 mpfr_from_real (m1, ra1, GMP_RNDN);
12657 mpfr_clear_flags ();
12658 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12659 /* Remquo is independent of the rounding mode, so pass
12660 inexact=0 to do_mpfr_ckconv(). */
12661 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12662 mpfr_clears (m0, m1, NULL);
12663 if (result_rem)
12664 {
12665 /* MPFR calculates quo in the host's long so it may
12666 return more bits in quo than the target int can hold
12667 if sizeof(host long) > sizeof(target int). This can
12668 happen even for native compilers in LP64 mode. In
12669 these cases, modulo the quo value with the largest
12670 number that the target int can hold while leaving one
12671 bit for the sign. */
12672 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12673 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12674
12675 /* Dereference the quo pointer argument. */
12676 arg_quo = build_fold_indirect_ref (arg_quo);
12677 /* Proceed iff a valid pointer type was passed in. */
12678 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12679 {
12680 /* Set the value. */
12681 tree result_quo = fold_build2 (MODIFY_EXPR,
12682 TREE_TYPE (arg_quo), arg_quo,
12683 build_int_cst (NULL, integer_quo));
12684 TREE_SIDE_EFFECTS (result_quo) = 1;
12685 /* Combine the quo assignment with the rem. */
12686 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12687 result_quo, result_rem));
12688 }
12689 }
12690 }
12691 }
12692 return result;
12693 }
12694
12695 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12696 resulting value as a tree with type TYPE. The mpfr precision is
12697 set to the precision of TYPE. We assume that this mpfr function
12698 returns zero if the result could be calculated exactly within the
12699 requested precision. In addition, the integer pointer represented
12700 by ARG_SG will be dereferenced and set to the appropriate signgam
12701 (-1,1) value. */
12702
12703 static tree
12704 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12705 {
12706 tree result = NULL_TREE;
12707
12708 STRIP_NOPS (arg);
12709
12710 /* To proceed, MPFR must exactly represent the target floating point
12711 format, which only happens when the target base equals two. Also
12712 verify ARG is a constant and that ARG_SG is an int pointer. */
12713 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12714 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12715 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12716 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12717 {
12718 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12719
12720 /* In addition to NaN and Inf, the argument cannot be zero or a
12721 negative integer. */
12722 if (real_isfinite (ra)
12723 && ra->cl != rvc_zero
12724 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12725 {
12726 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12727 int inexact, sg;
12728 mpfr_t m;
12729 tree result_lg;
12730
12731 mpfr_init2 (m, prec);
12732 mpfr_from_real (m, ra, GMP_RNDN);
12733 mpfr_clear_flags ();
12734 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12735 result_lg = do_mpfr_ckconv (m, type, inexact);
12736 mpfr_clear (m);
12737 if (result_lg)
12738 {
12739 tree result_sg;
12740
12741 /* Dereference the arg_sg pointer argument. */
12742 arg_sg = build_fold_indirect_ref (arg_sg);
12743 /* Assign the signgam value into *arg_sg. */
12744 result_sg = fold_build2 (MODIFY_EXPR,
12745 TREE_TYPE (arg_sg), arg_sg,
12746 build_int_cst (NULL, sg));
12747 TREE_SIDE_EFFECTS (result_sg) = 1;
12748 /* Combine the signgam assignment with the lgamma result. */
12749 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12750 result_sg, result_lg));
12751 }
12752 }
12753 }
12754
12755 return result;
12756 }
12757 #endif