intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
53
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
183
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
207
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
238 }
239
240
241 /* Return true if DECL is a function symbol representing a built-in. */
242
243 bool
244 is_builtin_fn (tree decl)
245 {
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
247 }
248
249
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
253
254 static bool
255 called_as_built_in (tree node)
256 {
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
262 }
263
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
267
268 int
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
270 {
271 unsigned int inner;
272
273 inner = max_align;
274 if (handled_component_p (exp))
275 {
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
280
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
286 {
287 tree next_offset;
288
289 if (TREE_CODE (offset) == PLUS_EXPR)
290 {
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
293 }
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
297 {
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
302
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
305 }
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
308 {
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
314
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
317 }
318 else
319 {
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
322 }
323 offset = next_offset;
324 }
325 }
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
341 }
342
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
345
346 bool
347 can_trust_pointer_alignment (void)
348 {
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
351 }
352
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
357
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
360
361 int
362 get_pointer_alignment (tree exp, unsigned int max_align)
363 {
364 unsigned int align, inner;
365
366 if (!can_trust_pointer_alignment ())
367 return 0;
368
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
371
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
374
375 while (1)
376 {
377 switch (TREE_CODE (exp))
378 {
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
383
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
387
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
394
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
399
400 exp = TREE_OPERAND (exp, 0);
401 break;
402
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
406
407 default:
408 return align;
409 }
410 }
411 }
412
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
416
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
423
424 The value returned is of type `ssizetype'.
425
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
428
429 tree
430 c_strlen (tree src, int only_value)
431 {
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
437
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
441 {
442 tree len1, len2;
443
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
448 }
449
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
453
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
458
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
462
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
465
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
467 {
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
472
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
476
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
483
484 return size_diffop_loc (loc, size_int (max), offset_node);
485 }
486
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
495
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
499 {
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
502 {
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
505 }
506 return NULL_TREE;
507 }
508
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
512
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
516 }
517
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
520
521 static const char *
522 c_getstr (tree src)
523 {
524 tree offset_node;
525
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
529
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
535
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537 }
538
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
541
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
544 {
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
548
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
550
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
555 {
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
564
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
568 }
569 return immed_double_const (c[0], c[1], mode);
570 }
571
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
575
576 static int
577 target_char_cast (tree cst, char *p)
578 {
579 unsigned HOST_WIDE_INT val, hostval;
580
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
584
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
588
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
592
593 if (val != hostval)
594 return 1;
595
596 *p = hostval;
597 return 0;
598 }
599
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
603
604 static tree
605 builtin_save_expr (tree exp)
606 {
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
611
612 return save_expr (exp);
613 }
614
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
618
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
621 {
622 int i;
623
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
628
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
633
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
641 {
642 tem = hard_frame_pointer_rtx;
643
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
646 }
647 #endif
648
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
656
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
665
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
668 {
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
677 }
678
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
687
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
697 }
698
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
701
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
705
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
708 {
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
712
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
715
716 buf_addr = convert_memory_address (Pmode, buf_addr);
717
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
719
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
723
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
727
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
730
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
733
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
739
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
745
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
749
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
752 }
753
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
756
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
759 {
760 rtx chain;
761
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
765
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
771
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
777 {
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
782 }
783
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
786 {
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
790
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
795
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
798 {
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
803 }
804 }
805 #endif
806
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
818
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
823 }
824
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
829
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
832 {
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
835
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
840
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
843
844 buf_addr = convert_memory_address (Pmode, buf_addr);
845
846 buf_addr = force_reg (Pmode, buf_addr);
847
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
851
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
858 {
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
862
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
868
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
879 {
880 lab = copy_to_reg (lab);
881
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
884
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
887
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
891 }
892 }
893
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
900 {
901 gcc_assert (insn != last);
902
903 if (JUMP_P (insn))
904 {
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
907 }
908 else if (CALL_P (insn))
909 break;
910 }
911 }
912
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
915
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
918 {
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
921
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
924
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
927
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
938
939 crtl->has_nonlocal_goto = 1;
940
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
947 {
948 r_label = copy_to_reg (r_label);
949
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
960
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
965
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
978
979 emit_indirect_jump (r_label);
980 }
981
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
985 {
986 if (JUMP_P (insn))
987 {
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
990 }
991 else if (CALL_P (insn))
992 break;
993 }
994
995 return const0_rtx;
996 }
997
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1002
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1005 {
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1008
1009
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1017
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1023
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1028
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1030 }
1031
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1035
1036 static void
1037 expand_builtin_prefetch (tree exp)
1038 {
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1042
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1045
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1047
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1060
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1063
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1066 {
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1069 }
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1073 {
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1077 }
1078
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1081 {
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1084 }
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1088 {
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1091 }
1092
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1095 {
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1100 {
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1103 }
1104 emit_insn (gen_prefetch (op0, op1, op2));
1105 }
1106 #endif
1107
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1112 }
1113
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1118
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1121 {
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1125
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1130
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1133
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1140
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1153
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1158 {
1159 set_mem_attributes (mem, exp, 0);
1160
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1163
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1170 {
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1174
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1180
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1182
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1186
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1189
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1191 {
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1195
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1203
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1211 {
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1218 }
1219
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1226 {
1227 offset = -1;
1228 length = -1;
1229 }
1230
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1233 }
1234
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1238 {
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1241 }
1242 }
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1245 }
1246
1247 return mem;
1248 }
1249 \f
1250 /* Built-in functions to perform an untyped call and return. */
1251
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1258
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1265
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1268
1269 static int
1270 apply_args_size (void)
1271 {
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1276
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1279 {
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1282
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1287
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1290 {
1291 mode = reg_raw_mode[regno];
1292
1293 gcc_assert (mode != VOIDmode);
1294
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1300 }
1301 else
1302 {
1303 apply_args_mode[regno] = VOIDmode;
1304 }
1305 }
1306 return size;
1307 }
1308
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1311
1312 static int
1313 apply_result_size (void)
1314 {
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1318
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1321 {
1322 size = 0;
1323
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (targetm.calls.function_value_regno_p (regno))
1326 {
1327 mode = reg_raw_mode[regno];
1328
1329 gcc_assert (mode != VOIDmode);
1330
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1336 }
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1339
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1345 }
1346 return size;
1347 }
1348
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1353
1354 static rtx
1355 result_vector (int savep, rtx result)
1356 {
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1361
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1365 {
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1375 }
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1377 }
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1379
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1382
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1385 {
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1390
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1394
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1399
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1403 {
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1407
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1409
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1412 }
1413
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1420 tem
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1425
1426 size = GET_MODE_SIZE (Pmode);
1427
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1431 {
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1435 }
1436
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1439 }
1440
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1447
1448 static rtx
1449 expand_builtin_apply_args (void)
1450 {
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1455 {
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1461
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1466
1467 apply_args_value = temp;
1468
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1483 }
1484 }
1485
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1488
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1491 {
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1498
1499 arguments = convert_memory_address (Pmode, arguments);
1500
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1503
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1511
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1517
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1525
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1529
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1536
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1549
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1554
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1559
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1564 {
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1572 }
1573
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1578 {
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1585 }
1586
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1589
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1595
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1605 {
1606 rtx valreg = 0;
1607
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1614 {
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1616
1617 valreg = gen_rtx_REG (mode, regno);
1618 }
1619
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1623
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1625 }
1626 else
1627 #endif
1628 gcc_unreachable ();
1629
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1634
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1642
1643 OK_DEFER_POP;
1644
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1648 }
1649
1650 /* Perform an untyped return. */
1651
1652 static void
1653 expand_builtin_return (rtx result)
1654 {
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1659
1660 result = convert_memory_address (Pmode, result);
1661
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1664
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1667 {
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1671 }
1672 #endif
1673
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1678 {
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1684
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1690 }
1691
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1694
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1698 }
1699
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1701
1702 static enum type_class
1703 type_to_class (tree type)
1704 {
1705 switch (TREE_CODE (type))
1706 {
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1725 }
1726 }
1727
1728 /* Expand a call EXP to __builtin_classify_type. */
1729
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1732 {
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1736 }
1737
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1751
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1756
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1759 {
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1763
1764 switch (fn)
1765 {
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1849
1850 default:
1851 return NULL_TREE;
1852 }
1853
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1862 }
1863
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1865
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1868 {
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1870 }
1871
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1875
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1878 {
1879 rtx lab = gen_label_rtx ();
1880
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1887
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1891 {
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1901 }
1902 #endif
1903
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1906
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1913 }
1914
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1920
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1923 {
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1930
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1933
1934 arg = CALL_EXPR_ARG (exp, 0);
1935
1936 switch (DECL_FUNCTION_CODE (fndecl))
1937 {
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1988 }
1989
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1992
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1995
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1998 {
1999 target = gen_reg_rtx (mode);
2000
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2005
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2007
2008 start_sequence ();
2009
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2013
2014 if (target != 0)
2015 {
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2018
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2024 }
2025
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2030 }
2031
2032 return expand_call (exp, target, target == const0_rtx);
2033 }
2034
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2041
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2044 {
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2052
2053 switch (DECL_FUNCTION_CODE (fndecl))
2054 {
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2061 }
2062
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2065
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2068
2069 switch (DECL_FUNCTION_CODE (fndecl))
2070 {
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2093 }
2094
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2097
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 return NULL_RTX;
2101
2102 target = gen_reg_rtx (mode);
2103
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2106
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2110
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2113
2114 start_sequence ();
2115
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2120
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2125 {
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2128 }
2129
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2132
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2137
2138 return target;
2139 }
2140
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2147
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2150 {
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2156
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2159
2160 arg = CALL_EXPR_ARG (exp, 0);
2161
2162 switch (DECL_FUNCTION_CODE (fndecl))
2163 {
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2169 }
2170
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2173
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2178 {
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2185 }
2186
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2189 {
2190 target = gen_reg_rtx (mode);
2191
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2196
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2198
2199 start_sequence ();
2200
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2204 {
2205 int result;
2206
2207 switch (DECL_FUNCTION_CODE (fndecl))
2208 {
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2217 }
2218 gcc_assert (result);
2219 }
2220 else
2221 {
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2223 }
2224
2225 if (target != 0)
2226 {
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2232 }
2233
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2238 }
2239
2240 target = expand_call (exp, target, target == const0_rtx);
2241
2242 return target;
2243 }
2244
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2248
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2251 {
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2255
2256 switch (DECL_FUNCTION_CODE (fndecl))
2257 {
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2275 }
2276
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2280
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2283
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2287 }
2288
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2296
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2299 {
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2305
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2308
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2312
2313 if (icode != CODE_FOR_nothing)
2314 {
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2318 if (!target
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2322
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2325
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2330
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2332
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2335
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 return target;
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2342 }
2343
2344 return NULL_RTX;
2345 }
2346
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2350 function. */
2351
2352 static rtx
2353 expand_builtin_sincos (tree exp)
2354 {
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2358 int result;
2359 location_t loc = EXPR_LOCATION (exp);
2360
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 return NULL_RTX;
2364
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2368
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2371
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374 return NULL_RTX;
2375
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2378
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2382
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2387
2388 /* Move target1 and target2 to the memory locations indicated
2389 by op1 and op2. */
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2392
2393 return const0_rtx;
2394 }
2395
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2400
2401 static rtx
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2403 {
2404 tree fndecl = get_callee_fndecl (exp);
2405 tree arg, type;
2406 enum machine_mode mode;
2407 rtx op0, op1, op2;
2408 location_t loc = EXPR_LOCATION (exp);
2409
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2411 return NULL_RTX;
2412
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2416
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2421 {
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2424
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2426
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2429 }
2430 else if (TARGET_HAS_SINCOS)
2431 {
2432 tree call, fn = NULL_TREE;
2433 tree top1, top2;
2434 rtx op1a, op2a;
2435
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 else
2443 gcc_unreachable ();
2444
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2451
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2456 }
2457 else
2458 {
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2461
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2468 else
2469 gcc_unreachable ();
2470
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477 const char *name = NULL;
2478
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 name = "cexpf";
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 name = "cexp";
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 name = "cexpl";
2485
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2488 }
2489
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2492
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2497 }
2498
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2504 }
2505
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2510
2511 static tree
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2513 {
2514 va_list ap;
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2517
2518 va_start (ap, n);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 va_end (ap);
2521 SET_EXPR_LOCATION (fn, loc);
2522 return fn;
2523 }
2524
2525 /* Expand a call to one of the builtin rounding functions gcc defines
2526 as an extension (lfloor and lceil). As these are gcc extensions we
2527 do not need to worry about setting errno to EDOM.
2528 If expanding via optab fails, lower expression to (int)(floor(x)).
2529 EXP is the expression that is a call to the builtin function;
2530 if convenient, the result should be placed in TARGET. */
2531
2532 static rtx
2533 expand_builtin_int_roundingfn (tree exp, rtx target)
2534 {
2535 convert_optab builtin_optab;
2536 rtx op0, insns, tmp;
2537 tree fndecl = get_callee_fndecl (exp);
2538 enum built_in_function fallback_fn;
2539 tree fallback_fndecl;
2540 enum machine_mode mode;
2541 tree arg;
2542
2543 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2544 gcc_unreachable ();
2545
2546 arg = CALL_EXPR_ARG (exp, 0);
2547
2548 switch (DECL_FUNCTION_CODE (fndecl))
2549 {
2550 CASE_FLT_FN (BUILT_IN_LCEIL):
2551 CASE_FLT_FN (BUILT_IN_LLCEIL):
2552 builtin_optab = lceil_optab;
2553 fallback_fn = BUILT_IN_CEIL;
2554 break;
2555
2556 CASE_FLT_FN (BUILT_IN_LFLOOR):
2557 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2558 builtin_optab = lfloor_optab;
2559 fallback_fn = BUILT_IN_FLOOR;
2560 break;
2561
2562 default:
2563 gcc_unreachable ();
2564 }
2565
2566 /* Make a suitable register to place result in. */
2567 mode = TYPE_MODE (TREE_TYPE (exp));
2568
2569 target = gen_reg_rtx (mode);
2570
2571 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2572 need to expand the argument again. This way, we will not perform
2573 side-effects more the once. */
2574 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2575
2576 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2577
2578 start_sequence ();
2579
2580 /* Compute into TARGET. */
2581 if (expand_sfix_optab (target, op0, builtin_optab))
2582 {
2583 /* Output the entire sequence. */
2584 insns = get_insns ();
2585 end_sequence ();
2586 emit_insn (insns);
2587 return target;
2588 }
2589
2590 /* If we were unable to expand via the builtin, stop the sequence
2591 (without outputting the insns). */
2592 end_sequence ();
2593
2594 /* Fall back to floating point rounding optab. */
2595 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2596
2597 /* For non-C99 targets we may end up without a fallback fndecl here
2598 if the user called __builtin_lfloor directly. In this case emit
2599 a call to the floor/ceil variants nevertheless. This should result
2600 in the best user experience for not full C99 targets. */
2601 if (fallback_fndecl == NULL_TREE)
2602 {
2603 tree fntype;
2604 const char *name = NULL;
2605
2606 switch (DECL_FUNCTION_CODE (fndecl))
2607 {
2608 case BUILT_IN_LCEIL:
2609 case BUILT_IN_LLCEIL:
2610 name = "ceil";
2611 break;
2612 case BUILT_IN_LCEILF:
2613 case BUILT_IN_LLCEILF:
2614 name = "ceilf";
2615 break;
2616 case BUILT_IN_LCEILL:
2617 case BUILT_IN_LLCEILL:
2618 name = "ceill";
2619 break;
2620 case BUILT_IN_LFLOOR:
2621 case BUILT_IN_LLFLOOR:
2622 name = "floor";
2623 break;
2624 case BUILT_IN_LFLOORF:
2625 case BUILT_IN_LLFLOORF:
2626 name = "floorf";
2627 break;
2628 case BUILT_IN_LFLOORL:
2629 case BUILT_IN_LLFLOORL:
2630 name = "floorl";
2631 break;
2632 default:
2633 gcc_unreachable ();
2634 }
2635
2636 fntype = build_function_type_list (TREE_TYPE (arg),
2637 TREE_TYPE (arg), NULL_TREE);
2638 fallback_fndecl = build_fn_decl (name, fntype);
2639 }
2640
2641 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2642
2643 tmp = expand_normal (exp);
2644
2645 /* Truncate the result of floating point optab to integer
2646 via expand_fix (). */
2647 target = gen_reg_rtx (mode);
2648 expand_fix (target, tmp, 0);
2649
2650 return target;
2651 }
2652
2653 /* Expand a call to one of the builtin math functions doing integer
2654 conversion (lrint).
2655 Return 0 if a normal call should be emitted rather than expanding the
2656 function in-line. EXP is the expression that is a call to the builtin
2657 function; if convenient, the result should be placed in TARGET. */
2658
2659 static rtx
2660 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2661 {
2662 convert_optab builtin_optab;
2663 rtx op0, insns;
2664 tree fndecl = get_callee_fndecl (exp);
2665 tree arg;
2666 enum machine_mode mode;
2667
2668 /* There's no easy way to detect the case we need to set EDOM. */
2669 if (flag_errno_math)
2670 return NULL_RTX;
2671
2672 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2673 gcc_unreachable ();
2674
2675 arg = CALL_EXPR_ARG (exp, 0);
2676
2677 switch (DECL_FUNCTION_CODE (fndecl))
2678 {
2679 CASE_FLT_FN (BUILT_IN_LRINT):
2680 CASE_FLT_FN (BUILT_IN_LLRINT):
2681 builtin_optab = lrint_optab; break;
2682 CASE_FLT_FN (BUILT_IN_LROUND):
2683 CASE_FLT_FN (BUILT_IN_LLROUND):
2684 builtin_optab = lround_optab; break;
2685 default:
2686 gcc_unreachable ();
2687 }
2688
2689 /* Make a suitable register to place result in. */
2690 mode = TYPE_MODE (TREE_TYPE (exp));
2691
2692 target = gen_reg_rtx (mode);
2693
2694 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2695 need to expand the argument again. This way, we will not perform
2696 side-effects more the once. */
2697 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2698
2699 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2700
2701 start_sequence ();
2702
2703 if (expand_sfix_optab (target, op0, builtin_optab))
2704 {
2705 /* Output the entire sequence. */
2706 insns = get_insns ();
2707 end_sequence ();
2708 emit_insn (insns);
2709 return target;
2710 }
2711
2712 /* If we were unable to expand via the builtin, stop the sequence
2713 (without outputting the insns) and call to the library function
2714 with the stabilized argument list. */
2715 end_sequence ();
2716
2717 target = expand_call (exp, target, target == const0_rtx);
2718
2719 return target;
2720 }
2721
2722 /* To evaluate powi(x,n), the floating point value x raised to the
2723 constant integer exponent n, we use a hybrid algorithm that
2724 combines the "window method" with look-up tables. For an
2725 introduction to exponentiation algorithms and "addition chains",
2726 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2727 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2728 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2729 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2730
2731 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2732 multiplications to inline before calling the system library's pow
2733 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2734 so this default never requires calling pow, powf or powl. */
2735
2736 #ifndef POWI_MAX_MULTS
2737 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2738 #endif
2739
2740 /* The size of the "optimal power tree" lookup table. All
2741 exponents less than this value are simply looked up in the
2742 powi_table below. This threshold is also used to size the
2743 cache of pseudo registers that hold intermediate results. */
2744 #define POWI_TABLE_SIZE 256
2745
2746 /* The size, in bits of the window, used in the "window method"
2747 exponentiation algorithm. This is equivalent to a radix of
2748 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2749 #define POWI_WINDOW_SIZE 3
2750
2751 /* The following table is an efficient representation of an
2752 "optimal power tree". For each value, i, the corresponding
2753 value, j, in the table states than an optimal evaluation
2754 sequence for calculating pow(x,i) can be found by evaluating
2755 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2756 100 integers is given in Knuth's "Seminumerical algorithms". */
2757
2758 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2759 {
2760 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2761 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2762 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2763 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2764 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2765 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2766 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2767 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2768 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2769 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2770 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2771 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2772 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2773 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2774 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2775 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2776 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2777 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2778 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2779 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2780 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2781 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2782 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2783 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2784 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2785 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2786 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2787 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2788 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2789 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2790 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2791 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2792 };
2793
2794
2795 /* Return the number of multiplications required to calculate
2796 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2797 subroutine of powi_cost. CACHE is an array indicating
2798 which exponents have already been calculated. */
2799
2800 static int
2801 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2802 {
2803 /* If we've already calculated this exponent, then this evaluation
2804 doesn't require any additional multiplications. */
2805 if (cache[n])
2806 return 0;
2807
2808 cache[n] = true;
2809 return powi_lookup_cost (n - powi_table[n], cache)
2810 + powi_lookup_cost (powi_table[n], cache) + 1;
2811 }
2812
2813 /* Return the number of multiplications required to calculate
2814 powi(x,n) for an arbitrary x, given the exponent N. This
2815 function needs to be kept in sync with expand_powi below. */
2816
2817 static int
2818 powi_cost (HOST_WIDE_INT n)
2819 {
2820 bool cache[POWI_TABLE_SIZE];
2821 unsigned HOST_WIDE_INT digit;
2822 unsigned HOST_WIDE_INT val;
2823 int result;
2824
2825 if (n == 0)
2826 return 0;
2827
2828 /* Ignore the reciprocal when calculating the cost. */
2829 val = (n < 0) ? -n : n;
2830
2831 /* Initialize the exponent cache. */
2832 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2833 cache[1] = true;
2834
2835 result = 0;
2836
2837 while (val >= POWI_TABLE_SIZE)
2838 {
2839 if (val & 1)
2840 {
2841 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2842 result += powi_lookup_cost (digit, cache)
2843 + POWI_WINDOW_SIZE + 1;
2844 val >>= POWI_WINDOW_SIZE;
2845 }
2846 else
2847 {
2848 val >>= 1;
2849 result++;
2850 }
2851 }
2852
2853 return result + powi_lookup_cost (val, cache);
2854 }
2855
2856 /* Recursive subroutine of expand_powi. This function takes the array,
2857 CACHE, of already calculated exponents and an exponent N and returns
2858 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2859
2860 static rtx
2861 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2862 {
2863 unsigned HOST_WIDE_INT digit;
2864 rtx target, result;
2865 rtx op0, op1;
2866
2867 if (n < POWI_TABLE_SIZE)
2868 {
2869 if (cache[n])
2870 return cache[n];
2871
2872 target = gen_reg_rtx (mode);
2873 cache[n] = target;
2874
2875 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2876 op1 = expand_powi_1 (mode, powi_table[n], cache);
2877 }
2878 else if (n & 1)
2879 {
2880 target = gen_reg_rtx (mode);
2881 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2882 op0 = expand_powi_1 (mode, n - digit, cache);
2883 op1 = expand_powi_1 (mode, digit, cache);
2884 }
2885 else
2886 {
2887 target = gen_reg_rtx (mode);
2888 op0 = expand_powi_1 (mode, n >> 1, cache);
2889 op1 = op0;
2890 }
2891
2892 result = expand_mult (mode, op0, op1, target, 0);
2893 if (result != target)
2894 emit_move_insn (target, result);
2895 return target;
2896 }
2897
2898 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2899 floating point operand in mode MODE, and N is the exponent. This
2900 function needs to be kept in sync with powi_cost above. */
2901
2902 static rtx
2903 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2904 {
2905 rtx cache[POWI_TABLE_SIZE];
2906 rtx result;
2907
2908 if (n == 0)
2909 return CONST1_RTX (mode);
2910
2911 memset (cache, 0, sizeof (cache));
2912 cache[1] = x;
2913
2914 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2915
2916 /* If the original exponent was negative, reciprocate the result. */
2917 if (n < 0)
2918 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2919 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2920
2921 return result;
2922 }
2923
2924 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2925 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2926 if we can simplify it. */
2927 static rtx
2928 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2929 rtx subtarget)
2930 {
2931 if (TREE_CODE (arg1) == REAL_CST
2932 && !TREE_OVERFLOW (arg1)
2933 && flag_unsafe_math_optimizations)
2934 {
2935 enum machine_mode mode = TYPE_MODE (type);
2936 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2937 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2938 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2939 tree op = NULL_TREE;
2940
2941 if (sqrtfn)
2942 {
2943 /* Optimize pow (x, 0.5) into sqrt. */
2944 if (REAL_VALUES_EQUAL (c, dconsthalf))
2945 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2946
2947 else
2948 {
2949 REAL_VALUE_TYPE dconst1_4 = dconst1;
2950 REAL_VALUE_TYPE dconst3_4;
2951 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2952
2953 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2954 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2955
2956 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2957 machines that a builtin sqrt instruction is smaller than a
2958 call to pow with 0.25, so do this optimization even if
2959 -Os. */
2960 if (REAL_VALUES_EQUAL (c, dconst1_4))
2961 {
2962 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2963 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2964 }
2965
2966 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2967 are optimizing for space. */
2968 else if (optimize_insn_for_speed_p ()
2969 && !TREE_SIDE_EFFECTS (arg0)
2970 && REAL_VALUES_EQUAL (c, dconst3_4))
2971 {
2972 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2973 tree sqrt2 = builtin_save_expr (sqrt1);
2974 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2975 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2976 }
2977 }
2978 }
2979
2980 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2981 cbrt/sqrts instead of pow (x, 1./6.). */
2982 if (cbrtfn && ! op
2983 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2984 {
2985 /* First try 1/3. */
2986 REAL_VALUE_TYPE dconst1_3
2987 = real_value_truncate (mode, dconst_third ());
2988
2989 if (REAL_VALUES_EQUAL (c, dconst1_3))
2990 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2991
2992 /* Now try 1/6. */
2993 else if (optimize_insn_for_speed_p ())
2994 {
2995 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2996 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
2997
2998 if (REAL_VALUES_EQUAL (c, dconst1_6))
2999 {
3000 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3001 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3002 }
3003 }
3004 }
3005
3006 if (op)
3007 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3008 }
3009
3010 return NULL_RTX;
3011 }
3012
3013 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3014 a normal call should be emitted rather than expanding the function
3015 in-line. EXP is the expression that is a call to the builtin
3016 function; if convenient, the result should be placed in TARGET. */
3017
3018 static rtx
3019 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3020 {
3021 tree arg0, arg1;
3022 tree fn, narg0;
3023 tree type = TREE_TYPE (exp);
3024 REAL_VALUE_TYPE cint, c, c2;
3025 HOST_WIDE_INT n;
3026 rtx op, op2;
3027 enum machine_mode mode = TYPE_MODE (type);
3028
3029 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3030 return NULL_RTX;
3031
3032 arg0 = CALL_EXPR_ARG (exp, 0);
3033 arg1 = CALL_EXPR_ARG (exp, 1);
3034
3035 if (TREE_CODE (arg1) != REAL_CST
3036 || TREE_OVERFLOW (arg1))
3037 return expand_builtin_mathfn_2 (exp, target, subtarget);
3038
3039 /* Handle constant exponents. */
3040
3041 /* For integer valued exponents we can expand to an optimal multiplication
3042 sequence using expand_powi. */
3043 c = TREE_REAL_CST (arg1);
3044 n = real_to_integer (&c);
3045 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3046 if (real_identical (&c, &cint)
3047 && ((n >= -1 && n <= 2)
3048 || (flag_unsafe_math_optimizations
3049 && optimize_insn_for_speed_p ()
3050 && powi_cost (n) <= POWI_MAX_MULTS)))
3051 {
3052 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3053 if (n != 1)
3054 {
3055 op = force_reg (mode, op);
3056 op = expand_powi (op, mode, n);
3057 }
3058 return op;
3059 }
3060
3061 narg0 = builtin_save_expr (arg0);
3062
3063 /* If the exponent is not integer valued, check if it is half of an integer.
3064 In this case we can expand to sqrt (x) * x**(n/2). */
3065 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3066 if (fn != NULL_TREE)
3067 {
3068 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3069 n = real_to_integer (&c2);
3070 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3071 if (real_identical (&c2, &cint)
3072 && ((flag_unsafe_math_optimizations
3073 && optimize_insn_for_speed_p ()
3074 && powi_cost (n/2) <= POWI_MAX_MULTS)
3075 /* Even the c == 0.5 case cannot be done unconditionally
3076 when we need to preserve signed zeros, as
3077 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3078 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3079 /* For c == 1.5 we can assume that x * sqrt (x) is always
3080 smaller than pow (x, 1.5) if sqrt will not be expanded
3081 as a call. */
3082 || (n == 3
3083 && (optab_handler (sqrt_optab, mode)->insn_code
3084 != CODE_FOR_nothing))))
3085 {
3086 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3087 narg0);
3088 /* Use expand_expr in case the newly built call expression
3089 was folded to a non-call. */
3090 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3091 if (n != 1)
3092 {
3093 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3094 op2 = force_reg (mode, op2);
3095 op2 = expand_powi (op2, mode, abs (n / 2));
3096 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3097 0, OPTAB_LIB_WIDEN);
3098 /* If the original exponent was negative, reciprocate the
3099 result. */
3100 if (n < 0)
3101 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3102 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3103 }
3104 return op;
3105 }
3106 }
3107
3108 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3109 call. */
3110 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3111 subtarget);
3112 if (op)
3113 return op;
3114
3115 /* Try if the exponent is a third of an integer. In this case
3116 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3117 different from pow (x, 1./3.) due to rounding and behavior
3118 with negative x we need to constrain this transformation to
3119 unsafe math and positive x or finite math. */
3120 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3121 if (fn != NULL_TREE
3122 && flag_unsafe_math_optimizations
3123 && (tree_expr_nonnegative_p (arg0)
3124 || !HONOR_NANS (mode)))
3125 {
3126 REAL_VALUE_TYPE dconst3;
3127 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3128 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3129 real_round (&c2, mode, &c2);
3130 n = real_to_integer (&c2);
3131 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3132 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3133 real_convert (&c2, mode, &c2);
3134 if (real_identical (&c2, &c)
3135 && ((optimize_insn_for_speed_p ()
3136 && powi_cost (n/3) <= POWI_MAX_MULTS)
3137 || n == 1))
3138 {
3139 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3140 narg0);
3141 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3142 if (abs (n) % 3 == 2)
3143 op = expand_simple_binop (mode, MULT, op, op, op,
3144 0, OPTAB_LIB_WIDEN);
3145 if (n != 1)
3146 {
3147 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3148 op2 = force_reg (mode, op2);
3149 op2 = expand_powi (op2, mode, abs (n / 3));
3150 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3151 0, OPTAB_LIB_WIDEN);
3152 /* If the original exponent was negative, reciprocate the
3153 result. */
3154 if (n < 0)
3155 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3156 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3157 }
3158 return op;
3159 }
3160 }
3161
3162 /* Fall back to optab expansion. */
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3164 }
3165
3166 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3167 a normal call should be emitted rather than expanding the function
3168 in-line. EXP is the expression that is a call to the builtin
3169 function; if convenient, the result should be placed in TARGET. */
3170
3171 static rtx
3172 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3173 {
3174 tree arg0, arg1;
3175 rtx op0, op1;
3176 enum machine_mode mode;
3177 enum machine_mode mode2;
3178
3179 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 return NULL_RTX;
3181
3182 arg0 = CALL_EXPR_ARG (exp, 0);
3183 arg1 = CALL_EXPR_ARG (exp, 1);
3184 mode = TYPE_MODE (TREE_TYPE (exp));
3185
3186 /* Handle constant power. */
3187
3188 if (TREE_CODE (arg1) == INTEGER_CST
3189 && !TREE_OVERFLOW (arg1))
3190 {
3191 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3192
3193 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3194 Otherwise, check the number of multiplications required. */
3195 if ((TREE_INT_CST_HIGH (arg1) == 0
3196 || TREE_INT_CST_HIGH (arg1) == -1)
3197 && ((n >= -1 && n <= 2)
3198 || (optimize_insn_for_speed_p ()
3199 && powi_cost (n) <= POWI_MAX_MULTS)))
3200 {
3201 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3202 op0 = force_reg (mode, op0);
3203 return expand_powi (op0, mode, n);
3204 }
3205 }
3206
3207 /* Emit a libcall to libgcc. */
3208
3209 /* Mode of the 2nd argument must match that of an int. */
3210 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3211
3212 if (target == NULL_RTX)
3213 target = gen_reg_rtx (mode);
3214
3215 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3216 if (GET_MODE (op0) != mode)
3217 op0 = convert_to_mode (mode, op0, 0);
3218 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3219 if (GET_MODE (op1) != mode2)
3220 op1 = convert_to_mode (mode2, op1, 0);
3221
3222 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3223 target, LCT_CONST, mode, 2,
3224 op0, mode, op1, mode2);
3225
3226 return target;
3227 }
3228
3229 /* Expand expression EXP which is a call to the strlen builtin. Return
3230 NULL_RTX if we failed the caller should emit a normal call, otherwise
3231 try to get the result in TARGET, if convenient. */
3232
3233 static rtx
3234 expand_builtin_strlen (tree exp, rtx target,
3235 enum machine_mode target_mode)
3236 {
3237 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3238 return NULL_RTX;
3239 else
3240 {
3241 rtx pat;
3242 tree len;
3243 tree src = CALL_EXPR_ARG (exp, 0);
3244 rtx result, src_reg, char_rtx, before_strlen;
3245 enum machine_mode insn_mode = target_mode, char_mode;
3246 enum insn_code icode = CODE_FOR_nothing;
3247 int align;
3248
3249 /* If the length can be computed at compile-time, return it. */
3250 len = c_strlen (src, 0);
3251 if (len)
3252 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3253
3254 /* If the length can be computed at compile-time and is constant
3255 integer, but there are side-effects in src, evaluate
3256 src for side-effects, then return len.
3257 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3258 can be optimized into: i++; x = 3; */
3259 len = c_strlen (src, 1);
3260 if (len && TREE_CODE (len) == INTEGER_CST)
3261 {
3262 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3263 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3264 }
3265
3266 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3267
3268 /* If SRC is not a pointer type, don't do this operation inline. */
3269 if (align == 0)
3270 return NULL_RTX;
3271
3272 /* Bail out if we can't compute strlen in the right mode. */
3273 while (insn_mode != VOIDmode)
3274 {
3275 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3276 if (icode != CODE_FOR_nothing)
3277 break;
3278
3279 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3280 }
3281 if (insn_mode == VOIDmode)
3282 return NULL_RTX;
3283
3284 /* Make a place to write the result of the instruction. */
3285 result = target;
3286 if (! (result != 0
3287 && REG_P (result)
3288 && GET_MODE (result) == insn_mode
3289 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3290 result = gen_reg_rtx (insn_mode);
3291
3292 /* Make a place to hold the source address. We will not expand
3293 the actual source until we are sure that the expansion will
3294 not fail -- there are trees that cannot be expanded twice. */
3295 src_reg = gen_reg_rtx (Pmode);
3296
3297 /* Mark the beginning of the strlen sequence so we can emit the
3298 source operand later. */
3299 before_strlen = get_last_insn ();
3300
3301 char_rtx = const0_rtx;
3302 char_mode = insn_data[(int) icode].operand[2].mode;
3303 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3304 char_mode))
3305 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3306
3307 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3308 char_rtx, GEN_INT (align));
3309 if (! pat)
3310 return NULL_RTX;
3311 emit_insn (pat);
3312
3313 /* Now that we are assured of success, expand the source. */
3314 start_sequence ();
3315 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3316 if (pat != src_reg)
3317 emit_move_insn (src_reg, pat);
3318 pat = get_insns ();
3319 end_sequence ();
3320
3321 if (before_strlen)
3322 emit_insn_after (pat, before_strlen);
3323 else
3324 emit_insn_before (pat, get_insns ());
3325
3326 /* Return the value in the proper mode for this function. */
3327 if (GET_MODE (result) == target_mode)
3328 target = result;
3329 else if (target != 0)
3330 convert_move (target, result, 0);
3331 else
3332 target = convert_to_mode (target_mode, result, 0);
3333
3334 return target;
3335 }
3336 }
3337
3338 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3339 bytes from constant string DATA + OFFSET and return it as target
3340 constant. */
3341
3342 static rtx
3343 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3344 enum machine_mode mode)
3345 {
3346 const char *str = (const char *) data;
3347
3348 gcc_assert (offset >= 0
3349 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3350 <= strlen (str) + 1));
3351
3352 return c_readstr (str + offset, mode);
3353 }
3354
3355 /* Expand a call EXP to the memcpy builtin.
3356 Return NULL_RTX if we failed, the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3359
3360 static rtx
3361 expand_builtin_memcpy (tree exp, rtx target)
3362 {
3363 if (!validate_arglist (exp,
3364 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3365 return NULL_RTX;
3366 else
3367 {
3368 tree dest = CALL_EXPR_ARG (exp, 0);
3369 tree src = CALL_EXPR_ARG (exp, 1);
3370 tree len = CALL_EXPR_ARG (exp, 2);
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3375 rtx dest_mem, src_mem, dest_addr, len_rtx;
3376 HOST_WIDE_INT expected_size = -1;
3377 unsigned int expected_align = 0;
3378
3379 /* If DEST is not a pointer type, call the normal function. */
3380 if (dest_align == 0)
3381 return NULL_RTX;
3382
3383 /* If either SRC is not a pointer type, don't do this
3384 operation in-line. */
3385 if (src_align == 0)
3386 return NULL_RTX;
3387
3388 if (currently_expanding_gimple_stmt)
3389 stringop_block_profile (currently_expanding_gimple_stmt,
3390 &expected_align, &expected_size);
3391
3392 if (expected_align < dest_align)
3393 expected_align = dest_align;
3394 dest_mem = get_memory_rtx (dest, len);
3395 set_mem_align (dest_mem, dest_align);
3396 len_rtx = expand_normal (len);
3397 src_str = c_getstr (src);
3398
3399 /* If SRC is a string constant and block move would be done
3400 by pieces, we can avoid loading the string from memory
3401 and only stored the computed constants. */
3402 if (src_str
3403 && CONST_INT_P (len_rtx)
3404 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3405 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3407 dest_align, false))
3408 {
3409 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3410 builtin_memcpy_read_str,
3411 CONST_CAST (char *, src_str),
3412 dest_align, false, 0);
3413 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3414 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3415 return dest_mem;
3416 }
3417
3418 src_mem = get_memory_rtx (src, len);
3419 set_mem_align (src_mem, src_align);
3420
3421 /* Copy word part most expediently. */
3422 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3423 CALL_EXPR_TAILCALL (exp)
3424 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3425 expected_align, expected_size);
3426
3427 if (dest_addr == 0)
3428 {
3429 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3430 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3431 }
3432 return dest_addr;
3433 }
3434 }
3435
3436 /* Expand a call EXP to the mempcpy builtin.
3437 Return NULL_RTX if we failed; the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). If ENDP is 0 return the
3440 destination pointer, if ENDP is 1 return the end pointer ala
3441 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3442 stpcpy. */
3443
3444 static rtx
3445 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3446 {
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3450 else
3451 {
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3455 return expand_builtin_mempcpy_args (dest, src, len,
3456 target, mode, /*endp=*/ 1);
3457 }
3458 }
3459
3460 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3461 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3462 so that this can also be called without constructing an actual CALL_EXPR.
3463 The other arguments and return value are the same as for
3464 expand_builtin_mempcpy. */
3465
3466 static rtx
3467 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3468 rtx target, enum machine_mode mode, int endp)
3469 {
3470 /* If return value is ignored, transform mempcpy into memcpy. */
3471 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3472 {
3473 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3474 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3475 dest, src, len);
3476 return expand_expr (result, target, mode, EXPAND_NORMAL);
3477 }
3478 else
3479 {
3480 const char *src_str;
3481 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3482 unsigned int dest_align
3483 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3484 rtx dest_mem, src_mem, len_rtx;
3485
3486 /* If either SRC or DEST is not a pointer type, don't do this
3487 operation in-line. */
3488 if (dest_align == 0 || src_align == 0)
3489 return NULL_RTX;
3490
3491 /* If LEN is not constant, call the normal function. */
3492 if (! host_integerp (len, 1))
3493 return NULL_RTX;
3494
3495 len_rtx = expand_normal (len);
3496 src_str = c_getstr (src);
3497
3498 /* If SRC is a string constant and block move would be done
3499 by pieces, we can avoid loading the string from memory
3500 and only stored the computed constants. */
3501 if (src_str
3502 && CONST_INT_P (len_rtx)
3503 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3504 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3505 CONST_CAST (char *, src_str),
3506 dest_align, false))
3507 {
3508 dest_mem = get_memory_rtx (dest, len);
3509 set_mem_align (dest_mem, dest_align);
3510 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3511 builtin_memcpy_read_str,
3512 CONST_CAST (char *, src_str),
3513 dest_align, false, endp);
3514 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3515 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3516 return dest_mem;
3517 }
3518
3519 if (CONST_INT_P (len_rtx)
3520 && can_move_by_pieces (INTVAL (len_rtx),
3521 MIN (dest_align, src_align)))
3522 {
3523 dest_mem = get_memory_rtx (dest, len);
3524 set_mem_align (dest_mem, dest_align);
3525 src_mem = get_memory_rtx (src, len);
3526 set_mem_align (src_mem, src_align);
3527 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3528 MIN (dest_align, src_align), endp);
3529 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3530 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3531 return dest_mem;
3532 }
3533
3534 return NULL_RTX;
3535 }
3536 }
3537
3538 #ifndef HAVE_movstr
3539 # define HAVE_movstr 0
3540 # define CODE_FOR_movstr CODE_FOR_nothing
3541 #endif
3542
3543 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3544 we failed, the caller should emit a normal call, otherwise try to
3545 get the result in TARGET, if convenient. If ENDP is 0 return the
3546 destination pointer, if ENDP is 1 return the end pointer ala
3547 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3548 stpcpy. */
3549
3550 static rtx
3551 expand_movstr (tree dest, tree src, rtx target, int endp)
3552 {
3553 rtx end;
3554 rtx dest_mem;
3555 rtx src_mem;
3556 rtx insn;
3557 const struct insn_data_d * data;
3558
3559 if (!HAVE_movstr)
3560 return NULL_RTX;
3561
3562 dest_mem = get_memory_rtx (dest, NULL);
3563 src_mem = get_memory_rtx (src, NULL);
3564 data = insn_data + CODE_FOR_movstr;
3565 if (!endp)
3566 {
3567 target = force_reg (Pmode, XEXP (dest_mem, 0));
3568 dest_mem = replace_equiv_address (dest_mem, target);
3569 end = gen_reg_rtx (Pmode);
3570 }
3571 else
3572 {
3573 if (target == 0
3574 || target == const0_rtx
3575 || ! (*data->operand[0].predicate) (target, Pmode))
3576 {
3577 end = gen_reg_rtx (Pmode);
3578 if (target != const0_rtx)
3579 target = end;
3580 }
3581 else
3582 end = target;
3583 }
3584
3585 if (data->operand[0].mode != VOIDmode)
3586 end = gen_lowpart (data->operand[0].mode, end);
3587
3588 insn = data->genfun (end, dest_mem, src_mem);
3589
3590 gcc_assert (insn);
3591
3592 emit_insn (insn);
3593
3594 /* movstr is supposed to set end to the address of the NUL
3595 terminator. If the caller requested a mempcpy-like return value,
3596 adjust it. */
3597 if (endp == 1 && target != const0_rtx)
3598 {
3599 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3600 emit_move_insn (target, force_operand (tem, NULL_RTX));
3601 }
3602
3603 return target;
3604 }
3605
3606 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3607 NULL_RTX if we failed the caller should emit a normal call, otherwise
3608 try to get the result in TARGET, if convenient (and in mode MODE if that's
3609 convenient). */
3610
3611 static rtx
3612 expand_builtin_strcpy (tree exp, rtx target)
3613 {
3614 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3615 {
3616 tree dest = CALL_EXPR_ARG (exp, 0);
3617 tree src = CALL_EXPR_ARG (exp, 1);
3618 return expand_builtin_strcpy_args (dest, src, target);
3619 }
3620 return NULL_RTX;
3621 }
3622
3623 /* Helper function to do the actual work for expand_builtin_strcpy. The
3624 arguments to the builtin_strcpy call DEST and SRC are broken out
3625 so that this can also be called without constructing an actual CALL_EXPR.
3626 The other arguments and return value are the same as for
3627 expand_builtin_strcpy. */
3628
3629 static rtx
3630 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3631 {
3632 return expand_movstr (dest, src, target, /*endp=*/0);
3633 }
3634
3635 /* Expand a call EXP to the stpcpy builtin.
3636 Return NULL_RTX if we failed the caller should emit a normal call,
3637 otherwise try to get the result in TARGET, if convenient (and in
3638 mode MODE if that's convenient). */
3639
3640 static rtx
3641 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3642 {
3643 tree dst, src;
3644 location_t loc = EXPR_LOCATION (exp);
3645
3646 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3647 return NULL_RTX;
3648
3649 dst = CALL_EXPR_ARG (exp, 0);
3650 src = CALL_EXPR_ARG (exp, 1);
3651
3652 /* If return value is ignored, transform stpcpy into strcpy. */
3653 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3654 {
3655 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3656 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3657 return expand_expr (result, target, mode, EXPAND_NORMAL);
3658 }
3659 else
3660 {
3661 tree len, lenp1;
3662 rtx ret;
3663
3664 /* Ensure we get an actual string whose length can be evaluated at
3665 compile-time, not an expression containing a string. This is
3666 because the latter will potentially produce pessimized code
3667 when used to produce the return value. */
3668 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3669 return expand_movstr (dst, src, target, /*endp=*/2);
3670
3671 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3672 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3673 target, mode, /*endp=*/2);
3674
3675 if (ret)
3676 return ret;
3677
3678 if (TREE_CODE (len) == INTEGER_CST)
3679 {
3680 rtx len_rtx = expand_normal (len);
3681
3682 if (CONST_INT_P (len_rtx))
3683 {
3684 ret = expand_builtin_strcpy_args (dst, src, target);
3685
3686 if (ret)
3687 {
3688 if (! target)
3689 {
3690 if (mode != VOIDmode)
3691 target = gen_reg_rtx (mode);
3692 else
3693 target = gen_reg_rtx (GET_MODE (ret));
3694 }
3695 if (GET_MODE (target) != GET_MODE (ret))
3696 ret = gen_lowpart (GET_MODE (target), ret);
3697
3698 ret = plus_constant (ret, INTVAL (len_rtx));
3699 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3700 gcc_assert (ret);
3701
3702 return target;
3703 }
3704 }
3705 }
3706
3707 return expand_movstr (dst, src, target, /*endp=*/2);
3708 }
3709 }
3710
3711 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3712 bytes from constant string DATA + OFFSET and return it as target
3713 constant. */
3714
3715 rtx
3716 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3717 enum machine_mode mode)
3718 {
3719 const char *str = (const char *) data;
3720
3721 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3722 return const0_rtx;
3723
3724 return c_readstr (str + offset, mode);
3725 }
3726
3727 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3728 NULL_RTX if we failed the caller should emit a normal call. */
3729
3730 static rtx
3731 expand_builtin_strncpy (tree exp, rtx target)
3732 {
3733 location_t loc = EXPR_LOCATION (exp);
3734
3735 if (validate_arglist (exp,
3736 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3737 {
3738 tree dest = CALL_EXPR_ARG (exp, 0);
3739 tree src = CALL_EXPR_ARG (exp, 1);
3740 tree len = CALL_EXPR_ARG (exp, 2);
3741 tree slen = c_strlen (src, 1);
3742
3743 /* We must be passed a constant len and src parameter. */
3744 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3745 return NULL_RTX;
3746
3747 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3748
3749 /* We're required to pad with trailing zeros if the requested
3750 len is greater than strlen(s2)+1. In that case try to
3751 use store_by_pieces, if it fails, punt. */
3752 if (tree_int_cst_lt (slen, len))
3753 {
3754 unsigned int dest_align
3755 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3756 const char *p = c_getstr (src);
3757 rtx dest_mem;
3758
3759 if (!p || dest_align == 0 || !host_integerp (len, 1)
3760 || !can_store_by_pieces (tree_low_cst (len, 1),
3761 builtin_strncpy_read_str,
3762 CONST_CAST (char *, p),
3763 dest_align, false))
3764 return NULL_RTX;
3765
3766 dest_mem = get_memory_rtx (dest, len);
3767 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3768 builtin_strncpy_read_str,
3769 CONST_CAST (char *, p), dest_align, false, 0);
3770 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3771 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3772 return dest_mem;
3773 }
3774 }
3775 return NULL_RTX;
3776 }
3777
3778 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3779 bytes from constant string DATA + OFFSET and return it as target
3780 constant. */
3781
3782 rtx
3783 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3784 enum machine_mode mode)
3785 {
3786 const char *c = (const char *) data;
3787 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3788
3789 memset (p, *c, GET_MODE_SIZE (mode));
3790
3791 return c_readstr (p, mode);
3792 }
3793
3794 /* Callback routine for store_by_pieces. Return the RTL of a register
3795 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3796 char value given in the RTL register data. For example, if mode is
3797 4 bytes wide, return the RTL for 0x01010101*data. */
3798
3799 static rtx
3800 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3801 enum machine_mode mode)
3802 {
3803 rtx target, coeff;
3804 size_t size;
3805 char *p;
3806
3807 size = GET_MODE_SIZE (mode);
3808 if (size == 1)
3809 return (rtx) data;
3810
3811 p = XALLOCAVEC (char, size);
3812 memset (p, 1, size);
3813 coeff = c_readstr (p, mode);
3814
3815 target = convert_to_mode (mode, (rtx) data, 1);
3816 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3817 return force_reg (mode, target);
3818 }
3819
3820 /* Expand expression EXP, which is a call to the memset builtin. Return
3821 NULL_RTX if we failed the caller should emit a normal call, otherwise
3822 try to get the result in TARGET, if convenient (and in mode MODE if that's
3823 convenient). */
3824
3825 static rtx
3826 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3827 {
3828 if (!validate_arglist (exp,
3829 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3830 return NULL_RTX;
3831 else
3832 {
3833 tree dest = CALL_EXPR_ARG (exp, 0);
3834 tree val = CALL_EXPR_ARG (exp, 1);
3835 tree len = CALL_EXPR_ARG (exp, 2);
3836 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3837 }
3838 }
3839
3840 /* Helper function to do the actual work for expand_builtin_memset. The
3841 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3842 so that this can also be called without constructing an actual CALL_EXPR.
3843 The other arguments and return value are the same as for
3844 expand_builtin_memset. */
3845
3846 static rtx
3847 expand_builtin_memset_args (tree dest, tree val, tree len,
3848 rtx target, enum machine_mode mode, tree orig_exp)
3849 {
3850 tree fndecl, fn;
3851 enum built_in_function fcode;
3852 char c;
3853 unsigned int dest_align;
3854 rtx dest_mem, dest_addr, len_rtx;
3855 HOST_WIDE_INT expected_size = -1;
3856 unsigned int expected_align = 0;
3857
3858 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3859
3860 /* If DEST is not a pointer type, don't do this operation in-line. */
3861 if (dest_align == 0)
3862 return NULL_RTX;
3863
3864 if (currently_expanding_gimple_stmt)
3865 stringop_block_profile (currently_expanding_gimple_stmt,
3866 &expected_align, &expected_size);
3867
3868 if (expected_align < dest_align)
3869 expected_align = dest_align;
3870
3871 /* If the LEN parameter is zero, return DEST. */
3872 if (integer_zerop (len))
3873 {
3874 /* Evaluate and ignore VAL in case it has side-effects. */
3875 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3876 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3877 }
3878
3879 /* Stabilize the arguments in case we fail. */
3880 dest = builtin_save_expr (dest);
3881 val = builtin_save_expr (val);
3882 len = builtin_save_expr (len);
3883
3884 len_rtx = expand_normal (len);
3885 dest_mem = get_memory_rtx (dest, len);
3886
3887 if (TREE_CODE (val) != INTEGER_CST)
3888 {
3889 rtx val_rtx;
3890
3891 val_rtx = expand_normal (val);
3892 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3893 val_rtx, 0);
3894
3895 /* Assume that we can memset by pieces if we can store
3896 * the coefficients by pieces (in the required modes).
3897 * We can't pass builtin_memset_gen_str as that emits RTL. */
3898 c = 1;
3899 if (host_integerp (len, 1)
3900 && can_store_by_pieces (tree_low_cst (len, 1),
3901 builtin_memset_read_str, &c, dest_align,
3902 true))
3903 {
3904 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3905 val_rtx);
3906 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3907 builtin_memset_gen_str, val_rtx, dest_align,
3908 true, 0);
3909 }
3910 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3911 dest_align, expected_align,
3912 expected_size))
3913 goto do_libcall;
3914
3915 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3916 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3917 return dest_mem;
3918 }
3919
3920 if (target_char_cast (val, &c))
3921 goto do_libcall;
3922
3923 if (c)
3924 {
3925 if (host_integerp (len, 1)
3926 && can_store_by_pieces (tree_low_cst (len, 1),
3927 builtin_memset_read_str, &c, dest_align,
3928 true))
3929 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3930 builtin_memset_read_str, &c, dest_align, true, 0);
3931 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3932 dest_align, expected_align,
3933 expected_size))
3934 goto do_libcall;
3935
3936 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3937 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3938 return dest_mem;
3939 }
3940
3941 set_mem_align (dest_mem, dest_align);
3942 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3943 CALL_EXPR_TAILCALL (orig_exp)
3944 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3945 expected_align, expected_size);
3946
3947 if (dest_addr == 0)
3948 {
3949 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3950 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3951 }
3952
3953 return dest_addr;
3954
3955 do_libcall:
3956 fndecl = get_callee_fndecl (orig_exp);
3957 fcode = DECL_FUNCTION_CODE (fndecl);
3958 if (fcode == BUILT_IN_MEMSET)
3959 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3960 dest, val, len);
3961 else if (fcode == BUILT_IN_BZERO)
3962 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3963 dest, len);
3964 else
3965 gcc_unreachable ();
3966 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3967 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3968 return expand_call (fn, target, target == const0_rtx);
3969 }
3970
3971 /* Expand expression EXP, which is a call to the bzero builtin. Return
3972 NULL_RTX if we failed the caller should emit a normal call. */
3973
3974 static rtx
3975 expand_builtin_bzero (tree exp)
3976 {
3977 tree dest, size;
3978 location_t loc = EXPR_LOCATION (exp);
3979
3980 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3981 return NULL_RTX;
3982
3983 dest = CALL_EXPR_ARG (exp, 0);
3984 size = CALL_EXPR_ARG (exp, 1);
3985
3986 /* New argument list transforming bzero(ptr x, int y) to
3987 memset(ptr x, int 0, size_t y). This is done this way
3988 so that if it isn't expanded inline, we fallback to
3989 calling bzero instead of memset. */
3990
3991 return expand_builtin_memset_args (dest, integer_zero_node,
3992 fold_convert_loc (loc, sizetype, size),
3993 const0_rtx, VOIDmode, exp);
3994 }
3995
3996 /* Expand expression EXP, which is a call to the memcmp built-in function.
3997 Return NULL_RTX if we failed and the
3998 caller should emit a normal call, otherwise try to get the result in
3999 TARGET, if convenient (and in mode MODE, if that's convenient). */
4000
4001 static rtx
4002 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4003 ATTRIBUTE_UNUSED enum machine_mode mode)
4004 {
4005 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4006
4007 if (!validate_arglist (exp,
4008 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4009 return NULL_RTX;
4010
4011 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4012 {
4013 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4014 rtx result;
4015 rtx insn;
4016 tree arg1 = CALL_EXPR_ARG (exp, 0);
4017 tree arg2 = CALL_EXPR_ARG (exp, 1);
4018 tree len = CALL_EXPR_ARG (exp, 2);
4019
4020 int arg1_align
4021 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4022 int arg2_align
4023 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4024 enum machine_mode insn_mode;
4025
4026 #ifdef HAVE_cmpmemsi
4027 if (HAVE_cmpmemsi)
4028 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4029 else
4030 #endif
4031 #ifdef HAVE_cmpstrnsi
4032 if (HAVE_cmpstrnsi)
4033 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4034 else
4035 #endif
4036 return NULL_RTX;
4037
4038 /* If we don't have POINTER_TYPE, call the function. */
4039 if (arg1_align == 0 || arg2_align == 0)
4040 return NULL_RTX;
4041
4042 /* Make a place to write the result of the instruction. */
4043 result = target;
4044 if (! (result != 0
4045 && REG_P (result) && GET_MODE (result) == insn_mode
4046 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4047 result = gen_reg_rtx (insn_mode);
4048
4049 arg1_rtx = get_memory_rtx (arg1, len);
4050 arg2_rtx = get_memory_rtx (arg2, len);
4051 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4052
4053 /* Set MEM_SIZE as appropriate. */
4054 if (CONST_INT_P (arg3_rtx))
4055 {
4056 set_mem_size (arg1_rtx, arg3_rtx);
4057 set_mem_size (arg2_rtx, arg3_rtx);
4058 }
4059
4060 #ifdef HAVE_cmpmemsi
4061 if (HAVE_cmpmemsi)
4062 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4063 GEN_INT (MIN (arg1_align, arg2_align)));
4064 else
4065 #endif
4066 #ifdef HAVE_cmpstrnsi
4067 if (HAVE_cmpstrnsi)
4068 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4069 GEN_INT (MIN (arg1_align, arg2_align)));
4070 else
4071 #endif
4072 gcc_unreachable ();
4073
4074 if (insn)
4075 emit_insn (insn);
4076 else
4077 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4078 TYPE_MODE (integer_type_node), 3,
4079 XEXP (arg1_rtx, 0), Pmode,
4080 XEXP (arg2_rtx, 0), Pmode,
4081 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4082 TYPE_UNSIGNED (sizetype)),
4083 TYPE_MODE (sizetype));
4084
4085 /* Return the value in the proper mode for this function. */
4086 mode = TYPE_MODE (TREE_TYPE (exp));
4087 if (GET_MODE (result) == mode)
4088 return result;
4089 else if (target != 0)
4090 {
4091 convert_move (target, result, 0);
4092 return target;
4093 }
4094 else
4095 return convert_to_mode (mode, result, 0);
4096 }
4097 #endif
4098
4099 return NULL_RTX;
4100 }
4101
4102 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4103 if we failed the caller should emit a normal call, otherwise try to get
4104 the result in TARGET, if convenient. */
4105
4106 static rtx
4107 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4108 {
4109 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4110 return NULL_RTX;
4111
4112 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4113 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4114 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4115 {
4116 rtx arg1_rtx, arg2_rtx;
4117 rtx result, insn = NULL_RTX;
4118 tree fndecl, fn;
4119 tree arg1 = CALL_EXPR_ARG (exp, 0);
4120 tree arg2 = CALL_EXPR_ARG (exp, 1);
4121
4122 int arg1_align
4123 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4124 int arg2_align
4125 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4126
4127 /* If we don't have POINTER_TYPE, call the function. */
4128 if (arg1_align == 0 || arg2_align == 0)
4129 return NULL_RTX;
4130
4131 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4132 arg1 = builtin_save_expr (arg1);
4133 arg2 = builtin_save_expr (arg2);
4134
4135 arg1_rtx = get_memory_rtx (arg1, NULL);
4136 arg2_rtx = get_memory_rtx (arg2, NULL);
4137
4138 #ifdef HAVE_cmpstrsi
4139 /* Try to call cmpstrsi. */
4140 if (HAVE_cmpstrsi)
4141 {
4142 enum machine_mode insn_mode
4143 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4144
4145 /* Make a place to write the result of the instruction. */
4146 result = target;
4147 if (! (result != 0
4148 && REG_P (result) && GET_MODE (result) == insn_mode
4149 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4150 result = gen_reg_rtx (insn_mode);
4151
4152 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4153 GEN_INT (MIN (arg1_align, arg2_align)));
4154 }
4155 #endif
4156 #ifdef HAVE_cmpstrnsi
4157 /* Try to determine at least one length and call cmpstrnsi. */
4158 if (!insn && HAVE_cmpstrnsi)
4159 {
4160 tree len;
4161 rtx arg3_rtx;
4162
4163 enum machine_mode insn_mode
4164 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4165 tree len1 = c_strlen (arg1, 1);
4166 tree len2 = c_strlen (arg2, 1);
4167
4168 if (len1)
4169 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4170 if (len2)
4171 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4172
4173 /* If we don't have a constant length for the first, use the length
4174 of the second, if we know it. We don't require a constant for
4175 this case; some cost analysis could be done if both are available
4176 but neither is constant. For now, assume they're equally cheap,
4177 unless one has side effects. If both strings have constant lengths,
4178 use the smaller. */
4179
4180 if (!len1)
4181 len = len2;
4182 else if (!len2)
4183 len = len1;
4184 else if (TREE_SIDE_EFFECTS (len1))
4185 len = len2;
4186 else if (TREE_SIDE_EFFECTS (len2))
4187 len = len1;
4188 else if (TREE_CODE (len1) != INTEGER_CST)
4189 len = len2;
4190 else if (TREE_CODE (len2) != INTEGER_CST)
4191 len = len1;
4192 else if (tree_int_cst_lt (len1, len2))
4193 len = len1;
4194 else
4195 len = len2;
4196
4197 /* If both arguments have side effects, we cannot optimize. */
4198 if (!len || TREE_SIDE_EFFECTS (len))
4199 goto do_libcall;
4200
4201 arg3_rtx = expand_normal (len);
4202
4203 /* Make a place to write the result of the instruction. */
4204 result = target;
4205 if (! (result != 0
4206 && REG_P (result) && GET_MODE (result) == insn_mode
4207 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4208 result = gen_reg_rtx (insn_mode);
4209
4210 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4211 GEN_INT (MIN (arg1_align, arg2_align)));
4212 }
4213 #endif
4214
4215 if (insn)
4216 {
4217 enum machine_mode mode;
4218 emit_insn (insn);
4219
4220 /* Return the value in the proper mode for this function. */
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 if (GET_MODE (result) == mode)
4223 return result;
4224 if (target == 0)
4225 return convert_to_mode (mode, result, 0);
4226 convert_move (target, result, 0);
4227 return target;
4228 }
4229
4230 /* Expand the library call ourselves using a stabilized argument
4231 list to avoid re-evaluating the function's arguments twice. */
4232 #ifdef HAVE_cmpstrnsi
4233 do_libcall:
4234 #endif
4235 fndecl = get_callee_fndecl (exp);
4236 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4237 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4238 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4239 return expand_call (fn, target, target == const0_rtx);
4240 }
4241 #endif
4242 return NULL_RTX;
4243 }
4244
4245 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4246 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4247 the result in TARGET, if convenient. */
4248
4249 static rtx
4250 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4251 ATTRIBUTE_UNUSED enum machine_mode mode)
4252 {
4253 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4254
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX;
4258
4259 /* If c_strlen can determine an expression for one of the string
4260 lengths, and it doesn't have side effects, then emit cmpstrnsi
4261 using length MIN(strlen(string)+1, arg3). */
4262 #ifdef HAVE_cmpstrnsi
4263 if (HAVE_cmpstrnsi)
4264 {
4265 tree len, len1, len2;
4266 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4267 rtx result, insn;
4268 tree fndecl, fn;
4269 tree arg1 = CALL_EXPR_ARG (exp, 0);
4270 tree arg2 = CALL_EXPR_ARG (exp, 1);
4271 tree arg3 = CALL_EXPR_ARG (exp, 2);
4272
4273 int arg1_align
4274 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4275 int arg2_align
4276 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4277 enum machine_mode insn_mode
4278 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4279
4280 len1 = c_strlen (arg1, 1);
4281 len2 = c_strlen (arg2, 1);
4282
4283 if (len1)
4284 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4285 if (len2)
4286 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4287
4288 /* If we don't have a constant length for the first, use the length
4289 of the second, if we know it. We don't require a constant for
4290 this case; some cost analysis could be done if both are available
4291 but neither is constant. For now, assume they're equally cheap,
4292 unless one has side effects. If both strings have constant lengths,
4293 use the smaller. */
4294
4295 if (!len1)
4296 len = len2;
4297 else if (!len2)
4298 len = len1;
4299 else if (TREE_SIDE_EFFECTS (len1))
4300 len = len2;
4301 else if (TREE_SIDE_EFFECTS (len2))
4302 len = len1;
4303 else if (TREE_CODE (len1) != INTEGER_CST)
4304 len = len2;
4305 else if (TREE_CODE (len2) != INTEGER_CST)
4306 len = len1;
4307 else if (tree_int_cst_lt (len1, len2))
4308 len = len1;
4309 else
4310 len = len2;
4311
4312 /* If both arguments have side effects, we cannot optimize. */
4313 if (!len || TREE_SIDE_EFFECTS (len))
4314 return NULL_RTX;
4315
4316 /* The actual new length parameter is MIN(len,arg3). */
4317 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4318 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4319
4320 /* If we don't have POINTER_TYPE, call the function. */
4321 if (arg1_align == 0 || arg2_align == 0)
4322 return NULL_RTX;
4323
4324 /* Make a place to write the result of the instruction. */
4325 result = target;
4326 if (! (result != 0
4327 && REG_P (result) && GET_MODE (result) == insn_mode
4328 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4329 result = gen_reg_rtx (insn_mode);
4330
4331 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4332 arg1 = builtin_save_expr (arg1);
4333 arg2 = builtin_save_expr (arg2);
4334 len = builtin_save_expr (len);
4335
4336 arg1_rtx = get_memory_rtx (arg1, len);
4337 arg2_rtx = get_memory_rtx (arg2, len);
4338 arg3_rtx = expand_normal (len);
4339 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4340 GEN_INT (MIN (arg1_align, arg2_align)));
4341 if (insn)
4342 {
4343 emit_insn (insn);
4344
4345 /* Return the value in the proper mode for this function. */
4346 mode = TYPE_MODE (TREE_TYPE (exp));
4347 if (GET_MODE (result) == mode)
4348 return result;
4349 if (target == 0)
4350 return convert_to_mode (mode, result, 0);
4351 convert_move (target, result, 0);
4352 return target;
4353 }
4354
4355 /* Expand the library call ourselves using a stabilized argument
4356 list to avoid re-evaluating the function's arguments twice. */
4357 fndecl = get_callee_fndecl (exp);
4358 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4359 arg1, arg2, len);
4360 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4361 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4362 return expand_call (fn, target, target == const0_rtx);
4363 }
4364 #endif
4365 return NULL_RTX;
4366 }
4367
4368 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4369 if that's convenient. */
4370
4371 rtx
4372 expand_builtin_saveregs (void)
4373 {
4374 rtx val, seq;
4375
4376 /* Don't do __builtin_saveregs more than once in a function.
4377 Save the result of the first call and reuse it. */
4378 if (saveregs_value != 0)
4379 return saveregs_value;
4380
4381 /* When this function is called, it means that registers must be
4382 saved on entry to this function. So we migrate the call to the
4383 first insn of this function. */
4384
4385 start_sequence ();
4386
4387 /* Do whatever the machine needs done in this case. */
4388 val = targetm.calls.expand_builtin_saveregs ();
4389
4390 seq = get_insns ();
4391 end_sequence ();
4392
4393 saveregs_value = val;
4394
4395 /* Put the insns after the NOTE that starts the function. If this
4396 is inside a start_sequence, make the outer-level insn chain current, so
4397 the code is placed at the start of the function. */
4398 push_topmost_sequence ();
4399 emit_insn_after (seq, entry_of_function ());
4400 pop_topmost_sequence ();
4401
4402 return val;
4403 }
4404
4405 /* __builtin_args_info (N) returns word N of the arg space info
4406 for the current function. The number and meanings of words
4407 is controlled by the definition of CUMULATIVE_ARGS. */
4408
4409 static rtx
4410 expand_builtin_args_info (tree exp)
4411 {
4412 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4413 int *word_ptr = (int *) &crtl->args.info;
4414
4415 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4416
4417 if (call_expr_nargs (exp) != 0)
4418 {
4419 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4420 error ("argument of %<__builtin_args_info%> must be constant");
4421 else
4422 {
4423 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4424
4425 if (wordnum < 0 || wordnum >= nwords)
4426 error ("argument of %<__builtin_args_info%> out of range");
4427 else
4428 return GEN_INT (word_ptr[wordnum]);
4429 }
4430 }
4431 else
4432 error ("missing argument in %<__builtin_args_info%>");
4433
4434 return const0_rtx;
4435 }
4436
4437 /* Expand a call to __builtin_next_arg. */
4438
4439 static rtx
4440 expand_builtin_next_arg (void)
4441 {
4442 /* Checking arguments is already done in fold_builtin_next_arg
4443 that must be called before this function. */
4444 return expand_binop (ptr_mode, add_optab,
4445 crtl->args.internal_arg_pointer,
4446 crtl->args.arg_offset_rtx,
4447 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4448 }
4449
4450 /* Make it easier for the backends by protecting the valist argument
4451 from multiple evaluations. */
4452
4453 static tree
4454 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4455 {
4456 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4457
4458 gcc_assert (vatype != NULL_TREE);
4459
4460 if (TREE_CODE (vatype) == ARRAY_TYPE)
4461 {
4462 if (TREE_SIDE_EFFECTS (valist))
4463 valist = save_expr (valist);
4464
4465 /* For this case, the backends will be expecting a pointer to
4466 vatype, but it's possible we've actually been given an array
4467 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4468 So fix it. */
4469 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4470 {
4471 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4472 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4473 }
4474 }
4475 else
4476 {
4477 tree pt;
4478
4479 if (! needs_lvalue)
4480 {
4481 if (! TREE_SIDE_EFFECTS (valist))
4482 return valist;
4483
4484 pt = build_pointer_type (vatype);
4485 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4486 TREE_SIDE_EFFECTS (valist) = 1;
4487 }
4488
4489 if (TREE_SIDE_EFFECTS (valist))
4490 valist = save_expr (valist);
4491 valist = build_fold_indirect_ref_loc (loc, valist);
4492 }
4493
4494 return valist;
4495 }
4496
4497 /* The "standard" definition of va_list is void*. */
4498
4499 tree
4500 std_build_builtin_va_list (void)
4501 {
4502 return ptr_type_node;
4503 }
4504
4505 /* The "standard" abi va_list is va_list_type_node. */
4506
4507 tree
4508 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4509 {
4510 return va_list_type_node;
4511 }
4512
4513 /* The "standard" type of va_list is va_list_type_node. */
4514
4515 tree
4516 std_canonical_va_list_type (tree type)
4517 {
4518 tree wtype, htype;
4519
4520 if (INDIRECT_REF_P (type))
4521 type = TREE_TYPE (type);
4522 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4523 type = TREE_TYPE (type);
4524 wtype = va_list_type_node;
4525 htype = type;
4526 /* Treat structure va_list types. */
4527 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4528 htype = TREE_TYPE (htype);
4529 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4530 {
4531 /* If va_list is an array type, the argument may have decayed
4532 to a pointer type, e.g. by being passed to another function.
4533 In that case, unwrap both types so that we can compare the
4534 underlying records. */
4535 if (TREE_CODE (htype) == ARRAY_TYPE
4536 || POINTER_TYPE_P (htype))
4537 {
4538 wtype = TREE_TYPE (wtype);
4539 htype = TREE_TYPE (htype);
4540 }
4541 }
4542 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4543 return va_list_type_node;
4544
4545 return NULL_TREE;
4546 }
4547
4548 /* The "standard" implementation of va_start: just assign `nextarg' to
4549 the variable. */
4550
4551 void
4552 std_expand_builtin_va_start (tree valist, rtx nextarg)
4553 {
4554 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4555 convert_move (va_r, nextarg, 0);
4556 }
4557
4558 /* Expand EXP, a call to __builtin_va_start. */
4559
4560 static rtx
4561 expand_builtin_va_start (tree exp)
4562 {
4563 rtx nextarg;
4564 tree valist;
4565 location_t loc = EXPR_LOCATION (exp);
4566
4567 if (call_expr_nargs (exp) < 2)
4568 {
4569 error_at (loc, "too few arguments to function %<va_start%>");
4570 return const0_rtx;
4571 }
4572
4573 if (fold_builtin_next_arg (exp, true))
4574 return const0_rtx;
4575
4576 nextarg = expand_builtin_next_arg ();
4577 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4578
4579 if (targetm.expand_builtin_va_start)
4580 targetm.expand_builtin_va_start (valist, nextarg);
4581 else
4582 std_expand_builtin_va_start (valist, nextarg);
4583
4584 return const0_rtx;
4585 }
4586
4587 /* The "standard" implementation of va_arg: read the value from the
4588 current (padded) address and increment by the (padded) size. */
4589
4590 tree
4591 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4592 gimple_seq *post_p)
4593 {
4594 tree addr, t, type_size, rounded_size, valist_tmp;
4595 unsigned HOST_WIDE_INT align, boundary;
4596 bool indirect;
4597
4598 #ifdef ARGS_GROW_DOWNWARD
4599 /* All of the alignment and movement below is for args-grow-up machines.
4600 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4601 implement their own specialized gimplify_va_arg_expr routines. */
4602 gcc_unreachable ();
4603 #endif
4604
4605 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4606 if (indirect)
4607 type = build_pointer_type (type);
4608
4609 align = PARM_BOUNDARY / BITS_PER_UNIT;
4610 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4611
4612 /* When we align parameter on stack for caller, if the parameter
4613 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4614 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4615 here with caller. */
4616 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4617 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4618
4619 boundary /= BITS_PER_UNIT;
4620
4621 /* Hoist the valist value into a temporary for the moment. */
4622 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4623
4624 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4625 requires greater alignment, we must perform dynamic alignment. */
4626 if (boundary > align
4627 && !integer_zerop (TYPE_SIZE (type)))
4628 {
4629 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4630 fold_build2 (POINTER_PLUS_EXPR,
4631 TREE_TYPE (valist),
4632 valist_tmp, size_int (boundary - 1)));
4633 gimplify_and_add (t, pre_p);
4634
4635 t = fold_convert (sizetype, valist_tmp);
4636 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4637 fold_convert (TREE_TYPE (valist),
4638 fold_build2 (BIT_AND_EXPR, sizetype, t,
4639 size_int (-boundary))));
4640 gimplify_and_add (t, pre_p);
4641 }
4642 else
4643 boundary = align;
4644
4645 /* If the actual alignment is less than the alignment of the type,
4646 adjust the type accordingly so that we don't assume strict alignment
4647 when dereferencing the pointer. */
4648 boundary *= BITS_PER_UNIT;
4649 if (boundary < TYPE_ALIGN (type))
4650 {
4651 type = build_variant_type_copy (type);
4652 TYPE_ALIGN (type) = boundary;
4653 }
4654
4655 /* Compute the rounded size of the type. */
4656 type_size = size_in_bytes (type);
4657 rounded_size = round_up (type_size, align);
4658
4659 /* Reduce rounded_size so it's sharable with the postqueue. */
4660 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4661
4662 /* Get AP. */
4663 addr = valist_tmp;
4664 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4665 {
4666 /* Small args are padded downward. */
4667 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4668 rounded_size, size_int (align));
4669 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4670 size_binop (MINUS_EXPR, rounded_size, type_size));
4671 addr = fold_build2 (POINTER_PLUS_EXPR,
4672 TREE_TYPE (addr), addr, t);
4673 }
4674
4675 /* Compute new value for AP. */
4676 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4677 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4678 gimplify_and_add (t, pre_p);
4679
4680 addr = fold_convert (build_pointer_type (type), addr);
4681
4682 if (indirect)
4683 addr = build_va_arg_indirect_ref (addr);
4684
4685 return build_va_arg_indirect_ref (addr);
4686 }
4687
4688 /* Build an indirect-ref expression over the given TREE, which represents a
4689 piece of a va_arg() expansion. */
4690 tree
4691 build_va_arg_indirect_ref (tree addr)
4692 {
4693 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4694
4695 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4696 mf_mark (addr);
4697
4698 return addr;
4699 }
4700
4701 /* Return a dummy expression of type TYPE in order to keep going after an
4702 error. */
4703
4704 static tree
4705 dummy_object (tree type)
4706 {
4707 tree t = build_int_cst (build_pointer_type (type), 0);
4708 return build1 (INDIRECT_REF, type, t);
4709 }
4710
4711 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4712 builtin function, but a very special sort of operator. */
4713
4714 enum gimplify_status
4715 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4716 {
4717 tree promoted_type, have_va_type;
4718 tree valist = TREE_OPERAND (*expr_p, 0);
4719 tree type = TREE_TYPE (*expr_p);
4720 tree t;
4721 location_t loc = EXPR_LOCATION (*expr_p);
4722
4723 /* Verify that valist is of the proper type. */
4724 have_va_type = TREE_TYPE (valist);
4725 if (have_va_type == error_mark_node)
4726 return GS_ERROR;
4727 have_va_type = targetm.canonical_va_list_type (have_va_type);
4728
4729 if (have_va_type == NULL_TREE)
4730 {
4731 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4732 return GS_ERROR;
4733 }
4734
4735 /* Generate a diagnostic for requesting data of a type that cannot
4736 be passed through `...' due to type promotion at the call site. */
4737 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4738 != type)
4739 {
4740 static bool gave_help;
4741 bool warned;
4742
4743 /* Unfortunately, this is merely undefined, rather than a constraint
4744 violation, so we cannot make this an error. If this call is never
4745 executed, the program is still strictly conforming. */
4746 warned = warning_at (loc, 0,
4747 "%qT is promoted to %qT when passed through %<...%>",
4748 type, promoted_type);
4749 if (!gave_help && warned)
4750 {
4751 gave_help = true;
4752 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4753 promoted_type, type);
4754 }
4755
4756 /* We can, however, treat "undefined" any way we please.
4757 Call abort to encourage the user to fix the program. */
4758 if (warned)
4759 inform (loc, "if this code is reached, the program will abort");
4760 /* Before the abort, allow the evaluation of the va_list
4761 expression to exit or longjmp. */
4762 gimplify_and_add (valist, pre_p);
4763 t = build_call_expr_loc (loc,
4764 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4765 gimplify_and_add (t, pre_p);
4766
4767 /* This is dead code, but go ahead and finish so that the
4768 mode of the result comes out right. */
4769 *expr_p = dummy_object (type);
4770 return GS_ALL_DONE;
4771 }
4772 else
4773 {
4774 /* Make it easier for the backends by protecting the valist argument
4775 from multiple evaluations. */
4776 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4777 {
4778 /* For this case, the backends will be expecting a pointer to
4779 TREE_TYPE (abi), but it's possible we've
4780 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4781 So fix it. */
4782 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4783 {
4784 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4785 valist = fold_convert_loc (loc, p1,
4786 build_fold_addr_expr_loc (loc, valist));
4787 }
4788
4789 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4790 }
4791 else
4792 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4793
4794 if (!targetm.gimplify_va_arg_expr)
4795 /* FIXME: Once most targets are converted we should merely
4796 assert this is non-null. */
4797 return GS_ALL_DONE;
4798
4799 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4800 return GS_OK;
4801 }
4802 }
4803
4804 /* Expand EXP, a call to __builtin_va_end. */
4805
4806 static rtx
4807 expand_builtin_va_end (tree exp)
4808 {
4809 tree valist = CALL_EXPR_ARG (exp, 0);
4810
4811 /* Evaluate for side effects, if needed. I hate macros that don't
4812 do that. */
4813 if (TREE_SIDE_EFFECTS (valist))
4814 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4815
4816 return const0_rtx;
4817 }
4818
4819 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4820 builtin rather than just as an assignment in stdarg.h because of the
4821 nastiness of array-type va_list types. */
4822
4823 static rtx
4824 expand_builtin_va_copy (tree exp)
4825 {
4826 tree dst, src, t;
4827 location_t loc = EXPR_LOCATION (exp);
4828
4829 dst = CALL_EXPR_ARG (exp, 0);
4830 src = CALL_EXPR_ARG (exp, 1);
4831
4832 dst = stabilize_va_list_loc (loc, dst, 1);
4833 src = stabilize_va_list_loc (loc, src, 0);
4834
4835 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4836
4837 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4838 {
4839 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4840 TREE_SIDE_EFFECTS (t) = 1;
4841 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4842 }
4843 else
4844 {
4845 rtx dstb, srcb, size;
4846
4847 /* Evaluate to pointers. */
4848 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4849 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4850 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4851 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4852
4853 dstb = convert_memory_address (Pmode, dstb);
4854 srcb = convert_memory_address (Pmode, srcb);
4855
4856 /* "Dereference" to BLKmode memories. */
4857 dstb = gen_rtx_MEM (BLKmode, dstb);
4858 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4859 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4860 srcb = gen_rtx_MEM (BLKmode, srcb);
4861 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4862 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4863
4864 /* Copy. */
4865 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4866 }
4867
4868 return const0_rtx;
4869 }
4870
4871 /* Expand a call to one of the builtin functions __builtin_frame_address or
4872 __builtin_return_address. */
4873
4874 static rtx
4875 expand_builtin_frame_address (tree fndecl, tree exp)
4876 {
4877 /* The argument must be a nonnegative integer constant.
4878 It counts the number of frames to scan up the stack.
4879 The value is the return address saved in that frame. */
4880 if (call_expr_nargs (exp) == 0)
4881 /* Warning about missing arg was already issued. */
4882 return const0_rtx;
4883 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4884 {
4885 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4886 error ("invalid argument to %<__builtin_frame_address%>");
4887 else
4888 error ("invalid argument to %<__builtin_return_address%>");
4889 return const0_rtx;
4890 }
4891 else
4892 {
4893 rtx tem
4894 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4895 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4896
4897 /* Some ports cannot access arbitrary stack frames. */
4898 if (tem == NULL)
4899 {
4900 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4901 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4902 else
4903 warning (0, "unsupported argument to %<__builtin_return_address%>");
4904 return const0_rtx;
4905 }
4906
4907 /* For __builtin_frame_address, return what we've got. */
4908 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4909 return tem;
4910
4911 if (!REG_P (tem)
4912 && ! CONSTANT_P (tem))
4913 tem = copy_to_mode_reg (Pmode, tem);
4914 return tem;
4915 }
4916 }
4917
4918 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4919 we failed and the caller should emit a normal call, otherwise try to get
4920 the result in TARGET, if convenient. */
4921
4922 static rtx
4923 expand_builtin_alloca (tree exp, rtx target)
4924 {
4925 rtx op0;
4926 rtx result;
4927
4928 /* Emit normal call if marked not-inlineable. */
4929 if (CALL_CANNOT_INLINE_P (exp))
4930 return NULL_RTX;
4931
4932 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4933 return NULL_RTX;
4934
4935 /* Compute the argument. */
4936 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4937
4938 /* Allocate the desired space. */
4939 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4940 result = convert_memory_address (ptr_mode, result);
4941
4942 return result;
4943 }
4944
4945 /* Expand a call to a bswap builtin with argument ARG0. MODE
4946 is the mode to expand with. */
4947
4948 static rtx
4949 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4950 {
4951 enum machine_mode mode;
4952 tree arg;
4953 rtx op0;
4954
4955 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4956 return NULL_RTX;
4957
4958 arg = CALL_EXPR_ARG (exp, 0);
4959 mode = TYPE_MODE (TREE_TYPE (arg));
4960 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4961
4962 target = expand_unop (mode, bswap_optab, op0, target, 1);
4963
4964 gcc_assert (target);
4965
4966 return convert_to_mode (mode, target, 0);
4967 }
4968
4969 /* Expand a call to a unary builtin in EXP.
4970 Return NULL_RTX if a normal call should be emitted rather than expanding the
4971 function in-line. If convenient, the result should be placed in TARGET.
4972 SUBTARGET may be used as the target for computing one of EXP's operands. */
4973
4974 static rtx
4975 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4976 rtx subtarget, optab op_optab)
4977 {
4978 rtx op0;
4979
4980 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4981 return NULL_RTX;
4982
4983 /* Compute the argument. */
4984 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4985 VOIDmode, EXPAND_NORMAL);
4986 /* Compute op, into TARGET if possible.
4987 Set TARGET to wherever the result comes back. */
4988 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4989 op_optab, op0, target, 1);
4990 gcc_assert (target);
4991
4992 return convert_to_mode (target_mode, target, 0);
4993 }
4994
4995 /* Expand a call to __builtin_expect. We just return our argument
4996 as the builtin_expect semantic should've been already executed by
4997 tree branch prediction pass. */
4998
4999 static rtx
5000 expand_builtin_expect (tree exp, rtx target)
5001 {
5002 tree arg;
5003
5004 if (call_expr_nargs (exp) < 2)
5005 return const0_rtx;
5006 arg = CALL_EXPR_ARG (exp, 0);
5007
5008 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5009 /* When guessing was done, the hints should be already stripped away. */
5010 gcc_assert (!flag_guess_branch_prob
5011 || optimize == 0 || seen_error ());
5012 return target;
5013 }
5014
5015 void
5016 expand_builtin_trap (void)
5017 {
5018 #ifdef HAVE_trap
5019 if (HAVE_trap)
5020 emit_insn (gen_trap ());
5021 else
5022 #endif
5023 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5024 emit_barrier ();
5025 }
5026
5027 /* Expand a call to __builtin_unreachable. We do nothing except emit
5028 a barrier saying that control flow will not pass here.
5029
5030 It is the responsibility of the program being compiled to ensure
5031 that control flow does never reach __builtin_unreachable. */
5032 static void
5033 expand_builtin_unreachable (void)
5034 {
5035 emit_barrier ();
5036 }
5037
5038 /* Expand EXP, a call to fabs, fabsf or fabsl.
5039 Return NULL_RTX if a normal call should be emitted rather than expanding
5040 the function inline. If convenient, the result should be placed
5041 in TARGET. SUBTARGET may be used as the target for computing
5042 the operand. */
5043
5044 static rtx
5045 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5046 {
5047 enum machine_mode mode;
5048 tree arg;
5049 rtx op0;
5050
5051 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5052 return NULL_RTX;
5053
5054 arg = CALL_EXPR_ARG (exp, 0);
5055 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5056 mode = TYPE_MODE (TREE_TYPE (arg));
5057 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5058 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5059 }
5060
5061 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5062 Return NULL is a normal call should be emitted rather than expanding the
5063 function inline. If convenient, the result should be placed in TARGET.
5064 SUBTARGET may be used as the target for computing the operand. */
5065
5066 static rtx
5067 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5068 {
5069 rtx op0, op1;
5070 tree arg;
5071
5072 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5073 return NULL_RTX;
5074
5075 arg = CALL_EXPR_ARG (exp, 0);
5076 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5077
5078 arg = CALL_EXPR_ARG (exp, 1);
5079 op1 = expand_normal (arg);
5080
5081 return expand_copysign (op0, op1, target);
5082 }
5083
5084 /* Create a new constant string literal and return a char* pointer to it.
5085 The STRING_CST value is the LEN characters at STR. */
5086 tree
5087 build_string_literal (int len, const char *str)
5088 {
5089 tree t, elem, index, type;
5090
5091 t = build_string (len, str);
5092 elem = build_type_variant (char_type_node, 1, 0);
5093 index = build_index_type (size_int (len - 1));
5094 type = build_array_type (elem, index);
5095 TREE_TYPE (t) = type;
5096 TREE_CONSTANT (t) = 1;
5097 TREE_READONLY (t) = 1;
5098 TREE_STATIC (t) = 1;
5099
5100 type = build_pointer_type (elem);
5101 t = build1 (ADDR_EXPR, type,
5102 build4 (ARRAY_REF, elem,
5103 t, integer_zero_node, NULL_TREE, NULL_TREE));
5104 return t;
5105 }
5106
5107 /* Expand a call to either the entry or exit function profiler. */
5108
5109 static rtx
5110 expand_builtin_profile_func (bool exitp)
5111 {
5112 rtx this_rtx, which;
5113
5114 this_rtx = DECL_RTL (current_function_decl);
5115 gcc_assert (MEM_P (this_rtx));
5116 this_rtx = XEXP (this_rtx, 0);
5117
5118 if (exitp)
5119 which = profile_function_exit_libfunc;
5120 else
5121 which = profile_function_entry_libfunc;
5122
5123 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5124 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5125 0),
5126 Pmode);
5127
5128 return const0_rtx;
5129 }
5130
5131 /* Expand a call to __builtin___clear_cache. */
5132
5133 static rtx
5134 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5135 {
5136 #ifndef HAVE_clear_cache
5137 #ifdef CLEAR_INSN_CACHE
5138 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5139 does something. Just do the default expansion to a call to
5140 __clear_cache(). */
5141 return NULL_RTX;
5142 #else
5143 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5144 does nothing. There is no need to call it. Do nothing. */
5145 return const0_rtx;
5146 #endif /* CLEAR_INSN_CACHE */
5147 #else
5148 /* We have a "clear_cache" insn, and it will handle everything. */
5149 tree begin, end;
5150 rtx begin_rtx, end_rtx;
5151 enum insn_code icode;
5152
5153 /* We must not expand to a library call. If we did, any
5154 fallback library function in libgcc that might contain a call to
5155 __builtin___clear_cache() would recurse infinitely. */
5156 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5157 {
5158 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5159 return const0_rtx;
5160 }
5161
5162 if (HAVE_clear_cache)
5163 {
5164 icode = CODE_FOR_clear_cache;
5165
5166 begin = CALL_EXPR_ARG (exp, 0);
5167 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5168 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5169 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5170 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5171
5172 end = CALL_EXPR_ARG (exp, 1);
5173 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5174 end_rtx = convert_memory_address (Pmode, end_rtx);
5175 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5176 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5177
5178 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5179 }
5180 return const0_rtx;
5181 #endif /* HAVE_clear_cache */
5182 }
5183
5184 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5185
5186 static rtx
5187 round_trampoline_addr (rtx tramp)
5188 {
5189 rtx temp, addend, mask;
5190
5191 /* If we don't need too much alignment, we'll have been guaranteed
5192 proper alignment by get_trampoline_type. */
5193 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5194 return tramp;
5195
5196 /* Round address up to desired boundary. */
5197 temp = gen_reg_rtx (Pmode);
5198 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5199 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5200
5201 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5202 temp, 0, OPTAB_LIB_WIDEN);
5203 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5204 temp, 0, OPTAB_LIB_WIDEN);
5205
5206 return tramp;
5207 }
5208
5209 static rtx
5210 expand_builtin_init_trampoline (tree exp)
5211 {
5212 tree t_tramp, t_func, t_chain;
5213 rtx m_tramp, r_tramp, r_chain, tmp;
5214
5215 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5216 POINTER_TYPE, VOID_TYPE))
5217 return NULL_RTX;
5218
5219 t_tramp = CALL_EXPR_ARG (exp, 0);
5220 t_func = CALL_EXPR_ARG (exp, 1);
5221 t_chain = CALL_EXPR_ARG (exp, 2);
5222
5223 r_tramp = expand_normal (t_tramp);
5224 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5225 MEM_NOTRAP_P (m_tramp) = 1;
5226
5227 /* The TRAMP argument should be the address of a field within the
5228 local function's FRAME decl. Let's see if we can fill in the
5229 to fill in the MEM_ATTRs for this memory. */
5230 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5231 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5232 true, 0);
5233
5234 tmp = round_trampoline_addr (r_tramp);
5235 if (tmp != r_tramp)
5236 {
5237 m_tramp = change_address (m_tramp, BLKmode, tmp);
5238 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5239 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5240 }
5241
5242 /* The FUNC argument should be the address of the nested function.
5243 Extract the actual function decl to pass to the hook. */
5244 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5245 t_func = TREE_OPERAND (t_func, 0);
5246 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5247
5248 r_chain = expand_normal (t_chain);
5249
5250 /* Generate insns to initialize the trampoline. */
5251 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5252
5253 trampolines_created = 1;
5254 return const0_rtx;
5255 }
5256
5257 static rtx
5258 expand_builtin_adjust_trampoline (tree exp)
5259 {
5260 rtx tramp;
5261
5262 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5263 return NULL_RTX;
5264
5265 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5266 tramp = round_trampoline_addr (tramp);
5267 if (targetm.calls.trampoline_adjust_address)
5268 tramp = targetm.calls.trampoline_adjust_address (tramp);
5269
5270 return tramp;
5271 }
5272
5273 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5274 function. The function first checks whether the back end provides
5275 an insn to implement signbit for the respective mode. If not, it
5276 checks whether the floating point format of the value is such that
5277 the sign bit can be extracted. If that is not the case, the
5278 function returns NULL_RTX to indicate that a normal call should be
5279 emitted rather than expanding the function in-line. EXP is the
5280 expression that is a call to the builtin function; if convenient,
5281 the result should be placed in TARGET. */
5282 static rtx
5283 expand_builtin_signbit (tree exp, rtx target)
5284 {
5285 const struct real_format *fmt;
5286 enum machine_mode fmode, imode, rmode;
5287 tree arg;
5288 int word, bitpos;
5289 enum insn_code icode;
5290 rtx temp;
5291 location_t loc = EXPR_LOCATION (exp);
5292
5293 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5294 return NULL_RTX;
5295
5296 arg = CALL_EXPR_ARG (exp, 0);
5297 fmode = TYPE_MODE (TREE_TYPE (arg));
5298 rmode = TYPE_MODE (TREE_TYPE (exp));
5299 fmt = REAL_MODE_FORMAT (fmode);
5300
5301 arg = builtin_save_expr (arg);
5302
5303 /* Expand the argument yielding a RTX expression. */
5304 temp = expand_normal (arg);
5305
5306 /* Check if the back end provides an insn that handles signbit for the
5307 argument's mode. */
5308 icode = signbit_optab->handlers [(int) fmode].insn_code;
5309 if (icode != CODE_FOR_nothing)
5310 {
5311 rtx last = get_last_insn ();
5312 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5313 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5314 return target;
5315 delete_insns_since (last);
5316 }
5317
5318 /* For floating point formats without a sign bit, implement signbit
5319 as "ARG < 0.0". */
5320 bitpos = fmt->signbit_ro;
5321 if (bitpos < 0)
5322 {
5323 /* But we can't do this if the format supports signed zero. */
5324 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5325 return NULL_RTX;
5326
5327 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5328 build_real (TREE_TYPE (arg), dconst0));
5329 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5330 }
5331
5332 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5333 {
5334 imode = int_mode_for_mode (fmode);
5335 if (imode == BLKmode)
5336 return NULL_RTX;
5337 temp = gen_lowpart (imode, temp);
5338 }
5339 else
5340 {
5341 imode = word_mode;
5342 /* Handle targets with different FP word orders. */
5343 if (FLOAT_WORDS_BIG_ENDIAN)
5344 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5345 else
5346 word = bitpos / BITS_PER_WORD;
5347 temp = operand_subword_force (temp, word, fmode);
5348 bitpos = bitpos % BITS_PER_WORD;
5349 }
5350
5351 /* Force the intermediate word_mode (or narrower) result into a
5352 register. This avoids attempting to create paradoxical SUBREGs
5353 of floating point modes below. */
5354 temp = force_reg (imode, temp);
5355
5356 /* If the bitpos is within the "result mode" lowpart, the operation
5357 can be implement with a single bitwise AND. Otherwise, we need
5358 a right shift and an AND. */
5359
5360 if (bitpos < GET_MODE_BITSIZE (rmode))
5361 {
5362 double_int mask = double_int_setbit (double_int_zero, bitpos);
5363
5364 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5365 temp = gen_lowpart (rmode, temp);
5366 temp = expand_binop (rmode, and_optab, temp,
5367 immed_double_int_const (mask, rmode),
5368 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5369 }
5370 else
5371 {
5372 /* Perform a logical right shift to place the signbit in the least
5373 significant bit, then truncate the result to the desired mode
5374 and mask just this bit. */
5375 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5376 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5377 temp = gen_lowpart (rmode, temp);
5378 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5379 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5380 }
5381
5382 return temp;
5383 }
5384
5385 /* Expand fork or exec calls. TARGET is the desired target of the
5386 call. EXP is the call. FN is the
5387 identificator of the actual function. IGNORE is nonzero if the
5388 value is to be ignored. */
5389
5390 static rtx
5391 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5392 {
5393 tree id, decl;
5394 tree call;
5395
5396 /* If we are not profiling, just call the function. */
5397 if (!profile_arc_flag)
5398 return NULL_RTX;
5399
5400 /* Otherwise call the wrapper. This should be equivalent for the rest of
5401 compiler, so the code does not diverge, and the wrapper may run the
5402 code necessary for keeping the profiling sane. */
5403
5404 switch (DECL_FUNCTION_CODE (fn))
5405 {
5406 case BUILT_IN_FORK:
5407 id = get_identifier ("__gcov_fork");
5408 break;
5409
5410 case BUILT_IN_EXECL:
5411 id = get_identifier ("__gcov_execl");
5412 break;
5413
5414 case BUILT_IN_EXECV:
5415 id = get_identifier ("__gcov_execv");
5416 break;
5417
5418 case BUILT_IN_EXECLP:
5419 id = get_identifier ("__gcov_execlp");
5420 break;
5421
5422 case BUILT_IN_EXECLE:
5423 id = get_identifier ("__gcov_execle");
5424 break;
5425
5426 case BUILT_IN_EXECVP:
5427 id = get_identifier ("__gcov_execvp");
5428 break;
5429
5430 case BUILT_IN_EXECVE:
5431 id = get_identifier ("__gcov_execve");
5432 break;
5433
5434 default:
5435 gcc_unreachable ();
5436 }
5437
5438 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5439 FUNCTION_DECL, id, TREE_TYPE (fn));
5440 DECL_EXTERNAL (decl) = 1;
5441 TREE_PUBLIC (decl) = 1;
5442 DECL_ARTIFICIAL (decl) = 1;
5443 TREE_NOTHROW (decl) = 1;
5444 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5445 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5446 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5447 return expand_call (call, target, ignore);
5448 }
5449
5450
5451 \f
5452 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5453 the pointer in these functions is void*, the tree optimizers may remove
5454 casts. The mode computed in expand_builtin isn't reliable either, due
5455 to __sync_bool_compare_and_swap.
5456
5457 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5458 group of builtins. This gives us log2 of the mode size. */
5459
5460 static inline enum machine_mode
5461 get_builtin_sync_mode (int fcode_diff)
5462 {
5463 /* The size is not negotiable, so ask not to get BLKmode in return
5464 if the target indicates that a smaller size would be better. */
5465 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5466 }
5467
5468 /* Expand the memory expression LOC and return the appropriate memory operand
5469 for the builtin_sync operations. */
5470
5471 static rtx
5472 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5473 {
5474 rtx addr, mem;
5475
5476 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5477 addr = convert_memory_address (Pmode, addr);
5478
5479 /* Note that we explicitly do not want any alias information for this
5480 memory, so that we kill all other live memories. Otherwise we don't
5481 satisfy the full barrier semantics of the intrinsic. */
5482 mem = validize_mem (gen_rtx_MEM (mode, addr));
5483
5484 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5485 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5486 MEM_VOLATILE_P (mem) = 1;
5487
5488 return mem;
5489 }
5490
5491 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5492 EXP is the CALL_EXPR. CODE is the rtx code
5493 that corresponds to the arithmetic or logical operation from the name;
5494 an exception here is that NOT actually means NAND. TARGET is an optional
5495 place for us to store the results; AFTER is true if this is the
5496 fetch_and_xxx form. IGNORE is true if we don't actually care about
5497 the result of the operation at all. */
5498
5499 static rtx
5500 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5501 enum rtx_code code, bool after,
5502 rtx target, bool ignore)
5503 {
5504 rtx val, mem;
5505 enum machine_mode old_mode;
5506 location_t loc = EXPR_LOCATION (exp);
5507
5508 if (code == NOT && warn_sync_nand)
5509 {
5510 tree fndecl = get_callee_fndecl (exp);
5511 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5512
5513 static bool warned_f_a_n, warned_n_a_f;
5514
5515 switch (fcode)
5516 {
5517 case BUILT_IN_FETCH_AND_NAND_1:
5518 case BUILT_IN_FETCH_AND_NAND_2:
5519 case BUILT_IN_FETCH_AND_NAND_4:
5520 case BUILT_IN_FETCH_AND_NAND_8:
5521 case BUILT_IN_FETCH_AND_NAND_16:
5522
5523 if (warned_f_a_n)
5524 break;
5525
5526 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5527 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5528 warned_f_a_n = true;
5529 break;
5530
5531 case BUILT_IN_NAND_AND_FETCH_1:
5532 case BUILT_IN_NAND_AND_FETCH_2:
5533 case BUILT_IN_NAND_AND_FETCH_4:
5534 case BUILT_IN_NAND_AND_FETCH_8:
5535 case BUILT_IN_NAND_AND_FETCH_16:
5536
5537 if (warned_n_a_f)
5538 break;
5539
5540 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5541 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5542 warned_n_a_f = true;
5543 break;
5544
5545 default:
5546 gcc_unreachable ();
5547 }
5548 }
5549
5550 /* Expand the operands. */
5551 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5552
5553 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5554 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5555 of CONST_INTs, where we know the old_mode only from the call argument. */
5556 old_mode = GET_MODE (val);
5557 if (old_mode == VOIDmode)
5558 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5559 val = convert_modes (mode, old_mode, val, 1);
5560
5561 if (ignore)
5562 return expand_sync_operation (mem, val, code);
5563 else
5564 return expand_sync_fetch_operation (mem, val, code, after, target);
5565 }
5566
5567 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5568 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5569 true if this is the boolean form. TARGET is a place for us to store the
5570 results; this is NOT optional if IS_BOOL is true. */
5571
5572 static rtx
5573 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5574 bool is_bool, rtx target)
5575 {
5576 rtx old_val, new_val, mem;
5577 enum machine_mode old_mode;
5578
5579 /* Expand the operands. */
5580 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5581
5582
5583 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5584 mode, EXPAND_NORMAL);
5585 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5586 of CONST_INTs, where we know the old_mode only from the call argument. */
5587 old_mode = GET_MODE (old_val);
5588 if (old_mode == VOIDmode)
5589 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5590 old_val = convert_modes (mode, old_mode, old_val, 1);
5591
5592 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5593 mode, EXPAND_NORMAL);
5594 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5595 of CONST_INTs, where we know the old_mode only from the call argument. */
5596 old_mode = GET_MODE (new_val);
5597 if (old_mode == VOIDmode)
5598 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5599 new_val = convert_modes (mode, old_mode, new_val, 1);
5600
5601 if (is_bool)
5602 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5603 else
5604 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5605 }
5606
5607 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5608 general form is actually an atomic exchange, and some targets only
5609 support a reduced form with the second argument being a constant 1.
5610 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5611 the results. */
5612
5613 static rtx
5614 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5615 rtx target)
5616 {
5617 rtx val, mem;
5618 enum machine_mode old_mode;
5619
5620 /* Expand the operands. */
5621 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5622 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5623 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5624 of CONST_INTs, where we know the old_mode only from the call argument. */
5625 old_mode = GET_MODE (val);
5626 if (old_mode == VOIDmode)
5627 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5628 val = convert_modes (mode, old_mode, val, 1);
5629
5630 return expand_sync_lock_test_and_set (mem, val, target);
5631 }
5632
5633 /* Expand the __sync_synchronize intrinsic. */
5634
5635 static void
5636 expand_builtin_synchronize (void)
5637 {
5638 gimple x;
5639 VEC (tree, gc) *v_clobbers;
5640
5641 #ifdef HAVE_memory_barrier
5642 if (HAVE_memory_barrier)
5643 {
5644 emit_insn (gen_memory_barrier ());
5645 return;
5646 }
5647 #endif
5648
5649 if (synchronize_libfunc != NULL_RTX)
5650 {
5651 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5652 return;
5653 }
5654
5655 /* If no explicit memory barrier instruction is available, create an
5656 empty asm stmt with a memory clobber. */
5657 v_clobbers = VEC_alloc (tree, gc, 1);
5658 VEC_quick_push (tree, v_clobbers,
5659 tree_cons (NULL, build_string (6, "memory"), NULL));
5660 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5661 gimple_asm_set_volatile (x, true);
5662 expand_asm_stmt (x);
5663 }
5664
5665 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5666
5667 static void
5668 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5669 {
5670 enum insn_code icode;
5671 rtx mem, insn;
5672 rtx val = const0_rtx;
5673
5674 /* Expand the operands. */
5675 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5676
5677 /* If there is an explicit operation in the md file, use it. */
5678 icode = sync_lock_release[mode];
5679 if (icode != CODE_FOR_nothing)
5680 {
5681 if (!insn_data[icode].operand[1].predicate (val, mode))
5682 val = force_reg (mode, val);
5683
5684 insn = GEN_FCN (icode) (mem, val);
5685 if (insn)
5686 {
5687 emit_insn (insn);
5688 return;
5689 }
5690 }
5691
5692 /* Otherwise we can implement this operation by emitting a barrier
5693 followed by a store of zero. */
5694 expand_builtin_synchronize ();
5695 emit_move_insn (mem, val);
5696 }
5697 \f
5698 /* Expand an expression EXP that calls a built-in function,
5699 with result going to TARGET if that's convenient
5700 (and in mode MODE if that's convenient).
5701 SUBTARGET may be used as the target for computing one of EXP's operands.
5702 IGNORE is nonzero if the value is to be ignored. */
5703
5704 rtx
5705 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5706 int ignore)
5707 {
5708 tree fndecl = get_callee_fndecl (exp);
5709 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5710 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5711
5712 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5713 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5714
5715 /* When not optimizing, generate calls to library functions for a certain
5716 set of builtins. */
5717 if (!optimize
5718 && !called_as_built_in (fndecl)
5719 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5720 && fcode != BUILT_IN_ALLOCA
5721 && fcode != BUILT_IN_FREE)
5722 return expand_call (exp, target, ignore);
5723
5724 /* The built-in function expanders test for target == const0_rtx
5725 to determine whether the function's result will be ignored. */
5726 if (ignore)
5727 target = const0_rtx;
5728
5729 /* If the result of a pure or const built-in function is ignored, and
5730 none of its arguments are volatile, we can avoid expanding the
5731 built-in call and just evaluate the arguments for side-effects. */
5732 if (target == const0_rtx
5733 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5734 {
5735 bool volatilep = false;
5736 tree arg;
5737 call_expr_arg_iterator iter;
5738
5739 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5740 if (TREE_THIS_VOLATILE (arg))
5741 {
5742 volatilep = true;
5743 break;
5744 }
5745
5746 if (! volatilep)
5747 {
5748 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5749 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5750 return const0_rtx;
5751 }
5752 }
5753
5754 switch (fcode)
5755 {
5756 CASE_FLT_FN (BUILT_IN_FABS):
5757 target = expand_builtin_fabs (exp, target, subtarget);
5758 if (target)
5759 return target;
5760 break;
5761
5762 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5763 target = expand_builtin_copysign (exp, target, subtarget);
5764 if (target)
5765 return target;
5766 break;
5767
5768 /* Just do a normal library call if we were unable to fold
5769 the values. */
5770 CASE_FLT_FN (BUILT_IN_CABS):
5771 break;
5772
5773 CASE_FLT_FN (BUILT_IN_EXP):
5774 CASE_FLT_FN (BUILT_IN_EXP10):
5775 CASE_FLT_FN (BUILT_IN_POW10):
5776 CASE_FLT_FN (BUILT_IN_EXP2):
5777 CASE_FLT_FN (BUILT_IN_EXPM1):
5778 CASE_FLT_FN (BUILT_IN_LOGB):
5779 CASE_FLT_FN (BUILT_IN_LOG):
5780 CASE_FLT_FN (BUILT_IN_LOG10):
5781 CASE_FLT_FN (BUILT_IN_LOG2):
5782 CASE_FLT_FN (BUILT_IN_LOG1P):
5783 CASE_FLT_FN (BUILT_IN_TAN):
5784 CASE_FLT_FN (BUILT_IN_ASIN):
5785 CASE_FLT_FN (BUILT_IN_ACOS):
5786 CASE_FLT_FN (BUILT_IN_ATAN):
5787 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5788 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5789 because of possible accuracy problems. */
5790 if (! flag_unsafe_math_optimizations)
5791 break;
5792 CASE_FLT_FN (BUILT_IN_SQRT):
5793 CASE_FLT_FN (BUILT_IN_FLOOR):
5794 CASE_FLT_FN (BUILT_IN_CEIL):
5795 CASE_FLT_FN (BUILT_IN_TRUNC):
5796 CASE_FLT_FN (BUILT_IN_ROUND):
5797 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5798 CASE_FLT_FN (BUILT_IN_RINT):
5799 target = expand_builtin_mathfn (exp, target, subtarget);
5800 if (target)
5801 return target;
5802 break;
5803
5804 CASE_FLT_FN (BUILT_IN_ILOGB):
5805 if (! flag_unsafe_math_optimizations)
5806 break;
5807 CASE_FLT_FN (BUILT_IN_ISINF):
5808 CASE_FLT_FN (BUILT_IN_FINITE):
5809 case BUILT_IN_ISFINITE:
5810 case BUILT_IN_ISNORMAL:
5811 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5812 if (target)
5813 return target;
5814 break;
5815
5816 CASE_FLT_FN (BUILT_IN_LCEIL):
5817 CASE_FLT_FN (BUILT_IN_LLCEIL):
5818 CASE_FLT_FN (BUILT_IN_LFLOOR):
5819 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5820 target = expand_builtin_int_roundingfn (exp, target);
5821 if (target)
5822 return target;
5823 break;
5824
5825 CASE_FLT_FN (BUILT_IN_LRINT):
5826 CASE_FLT_FN (BUILT_IN_LLRINT):
5827 CASE_FLT_FN (BUILT_IN_LROUND):
5828 CASE_FLT_FN (BUILT_IN_LLROUND):
5829 target = expand_builtin_int_roundingfn_2 (exp, target);
5830 if (target)
5831 return target;
5832 break;
5833
5834 CASE_FLT_FN (BUILT_IN_POW):
5835 target = expand_builtin_pow (exp, target, subtarget);
5836 if (target)
5837 return target;
5838 break;
5839
5840 CASE_FLT_FN (BUILT_IN_POWI):
5841 target = expand_builtin_powi (exp, target, subtarget);
5842 if (target)
5843 return target;
5844 break;
5845
5846 CASE_FLT_FN (BUILT_IN_ATAN2):
5847 CASE_FLT_FN (BUILT_IN_LDEXP):
5848 CASE_FLT_FN (BUILT_IN_SCALB):
5849 CASE_FLT_FN (BUILT_IN_SCALBN):
5850 CASE_FLT_FN (BUILT_IN_SCALBLN):
5851 if (! flag_unsafe_math_optimizations)
5852 break;
5853
5854 CASE_FLT_FN (BUILT_IN_FMOD):
5855 CASE_FLT_FN (BUILT_IN_REMAINDER):
5856 CASE_FLT_FN (BUILT_IN_DREM):
5857 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5858 if (target)
5859 return target;
5860 break;
5861
5862 CASE_FLT_FN (BUILT_IN_CEXPI):
5863 target = expand_builtin_cexpi (exp, target, subtarget);
5864 gcc_assert (target);
5865 return target;
5866
5867 CASE_FLT_FN (BUILT_IN_SIN):
5868 CASE_FLT_FN (BUILT_IN_COS):
5869 if (! flag_unsafe_math_optimizations)
5870 break;
5871 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5872 if (target)
5873 return target;
5874 break;
5875
5876 CASE_FLT_FN (BUILT_IN_SINCOS):
5877 if (! flag_unsafe_math_optimizations)
5878 break;
5879 target = expand_builtin_sincos (exp);
5880 if (target)
5881 return target;
5882 break;
5883
5884 case BUILT_IN_APPLY_ARGS:
5885 return expand_builtin_apply_args ();
5886
5887 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5888 FUNCTION with a copy of the parameters described by
5889 ARGUMENTS, and ARGSIZE. It returns a block of memory
5890 allocated on the stack into which is stored all the registers
5891 that might possibly be used for returning the result of a
5892 function. ARGUMENTS is the value returned by
5893 __builtin_apply_args. ARGSIZE is the number of bytes of
5894 arguments that must be copied. ??? How should this value be
5895 computed? We'll also need a safe worst case value for varargs
5896 functions. */
5897 case BUILT_IN_APPLY:
5898 if (!validate_arglist (exp, POINTER_TYPE,
5899 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5900 && !validate_arglist (exp, REFERENCE_TYPE,
5901 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5902 return const0_rtx;
5903 else
5904 {
5905 rtx ops[3];
5906
5907 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5908 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5909 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5910
5911 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5912 }
5913
5914 /* __builtin_return (RESULT) causes the function to return the
5915 value described by RESULT. RESULT is address of the block of
5916 memory returned by __builtin_apply. */
5917 case BUILT_IN_RETURN:
5918 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5919 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5920 return const0_rtx;
5921
5922 case BUILT_IN_SAVEREGS:
5923 return expand_builtin_saveregs ();
5924
5925 case BUILT_IN_ARGS_INFO:
5926 return expand_builtin_args_info (exp);
5927
5928 case BUILT_IN_VA_ARG_PACK:
5929 /* All valid uses of __builtin_va_arg_pack () are removed during
5930 inlining. */
5931 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5932 return const0_rtx;
5933
5934 case BUILT_IN_VA_ARG_PACK_LEN:
5935 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5936 inlining. */
5937 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5938 return const0_rtx;
5939
5940 /* Return the address of the first anonymous stack arg. */
5941 case BUILT_IN_NEXT_ARG:
5942 if (fold_builtin_next_arg (exp, false))
5943 return const0_rtx;
5944 return expand_builtin_next_arg ();
5945
5946 case BUILT_IN_CLEAR_CACHE:
5947 target = expand_builtin___clear_cache (exp);
5948 if (target)
5949 return target;
5950 break;
5951
5952 case BUILT_IN_CLASSIFY_TYPE:
5953 return expand_builtin_classify_type (exp);
5954
5955 case BUILT_IN_CONSTANT_P:
5956 return const0_rtx;
5957
5958 case BUILT_IN_FRAME_ADDRESS:
5959 case BUILT_IN_RETURN_ADDRESS:
5960 return expand_builtin_frame_address (fndecl, exp);
5961
5962 /* Returns the address of the area where the structure is returned.
5963 0 otherwise. */
5964 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5965 if (call_expr_nargs (exp) != 0
5966 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5967 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5968 return const0_rtx;
5969 else
5970 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5971
5972 case BUILT_IN_ALLOCA:
5973 target = expand_builtin_alloca (exp, target);
5974 if (target)
5975 return target;
5976 break;
5977
5978 case BUILT_IN_STACK_SAVE:
5979 return expand_stack_save ();
5980
5981 case BUILT_IN_STACK_RESTORE:
5982 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5983 return const0_rtx;
5984
5985 case BUILT_IN_BSWAP32:
5986 case BUILT_IN_BSWAP64:
5987 target = expand_builtin_bswap (exp, target, subtarget);
5988
5989 if (target)
5990 return target;
5991 break;
5992
5993 CASE_INT_FN (BUILT_IN_FFS):
5994 case BUILT_IN_FFSIMAX:
5995 target = expand_builtin_unop (target_mode, exp, target,
5996 subtarget, ffs_optab);
5997 if (target)
5998 return target;
5999 break;
6000
6001 CASE_INT_FN (BUILT_IN_CLZ):
6002 case BUILT_IN_CLZIMAX:
6003 target = expand_builtin_unop (target_mode, exp, target,
6004 subtarget, clz_optab);
6005 if (target)
6006 return target;
6007 break;
6008
6009 CASE_INT_FN (BUILT_IN_CTZ):
6010 case BUILT_IN_CTZIMAX:
6011 target = expand_builtin_unop (target_mode, exp, target,
6012 subtarget, ctz_optab);
6013 if (target)
6014 return target;
6015 break;
6016
6017 CASE_INT_FN (BUILT_IN_POPCOUNT):
6018 case BUILT_IN_POPCOUNTIMAX:
6019 target = expand_builtin_unop (target_mode, exp, target,
6020 subtarget, popcount_optab);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_INT_FN (BUILT_IN_PARITY):
6026 case BUILT_IN_PARITYIMAX:
6027 target = expand_builtin_unop (target_mode, exp, target,
6028 subtarget, parity_optab);
6029 if (target)
6030 return target;
6031 break;
6032
6033 case BUILT_IN_STRLEN:
6034 target = expand_builtin_strlen (exp, target, target_mode);
6035 if (target)
6036 return target;
6037 break;
6038
6039 case BUILT_IN_STRCPY:
6040 target = expand_builtin_strcpy (exp, target);
6041 if (target)
6042 return target;
6043 break;
6044
6045 case BUILT_IN_STRNCPY:
6046 target = expand_builtin_strncpy (exp, target);
6047 if (target)
6048 return target;
6049 break;
6050
6051 case BUILT_IN_STPCPY:
6052 target = expand_builtin_stpcpy (exp, target, mode);
6053 if (target)
6054 return target;
6055 break;
6056
6057 case BUILT_IN_MEMCPY:
6058 target = expand_builtin_memcpy (exp, target);
6059 if (target)
6060 return target;
6061 break;
6062
6063 case BUILT_IN_MEMPCPY:
6064 target = expand_builtin_mempcpy (exp, target, mode);
6065 if (target)
6066 return target;
6067 break;
6068
6069 case BUILT_IN_MEMSET:
6070 target = expand_builtin_memset (exp, target, mode);
6071 if (target)
6072 return target;
6073 break;
6074
6075 case BUILT_IN_BZERO:
6076 target = expand_builtin_bzero (exp);
6077 if (target)
6078 return target;
6079 break;
6080
6081 case BUILT_IN_STRCMP:
6082 target = expand_builtin_strcmp (exp, target);
6083 if (target)
6084 return target;
6085 break;
6086
6087 case BUILT_IN_STRNCMP:
6088 target = expand_builtin_strncmp (exp, target, mode);
6089 if (target)
6090 return target;
6091 break;
6092
6093 case BUILT_IN_BCMP:
6094 case BUILT_IN_MEMCMP:
6095 target = expand_builtin_memcmp (exp, target, mode);
6096 if (target)
6097 return target;
6098 break;
6099
6100 case BUILT_IN_SETJMP:
6101 /* This should have been lowered to the builtins below. */
6102 gcc_unreachable ();
6103
6104 case BUILT_IN_SETJMP_SETUP:
6105 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6106 and the receiver label. */
6107 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6108 {
6109 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6110 VOIDmode, EXPAND_NORMAL);
6111 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6112 rtx label_r = label_rtx (label);
6113
6114 /* This is copied from the handling of non-local gotos. */
6115 expand_builtin_setjmp_setup (buf_addr, label_r);
6116 nonlocal_goto_handler_labels
6117 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6118 nonlocal_goto_handler_labels);
6119 /* ??? Do not let expand_label treat us as such since we would
6120 not want to be both on the list of non-local labels and on
6121 the list of forced labels. */
6122 FORCED_LABEL (label) = 0;
6123 return const0_rtx;
6124 }
6125 break;
6126
6127 case BUILT_IN_SETJMP_DISPATCHER:
6128 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6129 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6130 {
6131 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6132 rtx label_r = label_rtx (label);
6133
6134 /* Remove the dispatcher label from the list of non-local labels
6135 since the receiver labels have been added to it above. */
6136 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6137 return const0_rtx;
6138 }
6139 break;
6140
6141 case BUILT_IN_SETJMP_RECEIVER:
6142 /* __builtin_setjmp_receiver is passed the receiver label. */
6143 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6144 {
6145 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6146 rtx label_r = label_rtx (label);
6147
6148 expand_builtin_setjmp_receiver (label_r);
6149 return const0_rtx;
6150 }
6151 break;
6152
6153 /* __builtin_longjmp is passed a pointer to an array of five words.
6154 It's similar to the C library longjmp function but works with
6155 __builtin_setjmp above. */
6156 case BUILT_IN_LONGJMP:
6157 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6158 {
6159 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6160 VOIDmode, EXPAND_NORMAL);
6161 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6162
6163 if (value != const1_rtx)
6164 {
6165 error ("%<__builtin_longjmp%> second argument must be 1");
6166 return const0_rtx;
6167 }
6168
6169 expand_builtin_longjmp (buf_addr, value);
6170 return const0_rtx;
6171 }
6172 break;
6173
6174 case BUILT_IN_NONLOCAL_GOTO:
6175 target = expand_builtin_nonlocal_goto (exp);
6176 if (target)
6177 return target;
6178 break;
6179
6180 /* This updates the setjmp buffer that is its argument with the value
6181 of the current stack pointer. */
6182 case BUILT_IN_UPDATE_SETJMP_BUF:
6183 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6184 {
6185 rtx buf_addr
6186 = expand_normal (CALL_EXPR_ARG (exp, 0));
6187
6188 expand_builtin_update_setjmp_buf (buf_addr);
6189 return const0_rtx;
6190 }
6191 break;
6192
6193 case BUILT_IN_TRAP:
6194 expand_builtin_trap ();
6195 return const0_rtx;
6196
6197 case BUILT_IN_UNREACHABLE:
6198 expand_builtin_unreachable ();
6199 return const0_rtx;
6200
6201 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6202 case BUILT_IN_SIGNBITD32:
6203 case BUILT_IN_SIGNBITD64:
6204 case BUILT_IN_SIGNBITD128:
6205 target = expand_builtin_signbit (exp, target);
6206 if (target)
6207 return target;
6208 break;
6209
6210 /* Various hooks for the DWARF 2 __throw routine. */
6211 case BUILT_IN_UNWIND_INIT:
6212 expand_builtin_unwind_init ();
6213 return const0_rtx;
6214 case BUILT_IN_DWARF_CFA:
6215 return virtual_cfa_rtx;
6216 #ifdef DWARF2_UNWIND_INFO
6217 case BUILT_IN_DWARF_SP_COLUMN:
6218 return expand_builtin_dwarf_sp_column ();
6219 case BUILT_IN_INIT_DWARF_REG_SIZES:
6220 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6221 return const0_rtx;
6222 #endif
6223 case BUILT_IN_FROB_RETURN_ADDR:
6224 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6225 case BUILT_IN_EXTRACT_RETURN_ADDR:
6226 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6227 case BUILT_IN_EH_RETURN:
6228 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6229 CALL_EXPR_ARG (exp, 1));
6230 return const0_rtx;
6231 #ifdef EH_RETURN_DATA_REGNO
6232 case BUILT_IN_EH_RETURN_DATA_REGNO:
6233 return expand_builtin_eh_return_data_regno (exp);
6234 #endif
6235 case BUILT_IN_EXTEND_POINTER:
6236 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6237 case BUILT_IN_EH_POINTER:
6238 return expand_builtin_eh_pointer (exp);
6239 case BUILT_IN_EH_FILTER:
6240 return expand_builtin_eh_filter (exp);
6241 case BUILT_IN_EH_COPY_VALUES:
6242 return expand_builtin_eh_copy_values (exp);
6243
6244 case BUILT_IN_VA_START:
6245 return expand_builtin_va_start (exp);
6246 case BUILT_IN_VA_END:
6247 return expand_builtin_va_end (exp);
6248 case BUILT_IN_VA_COPY:
6249 return expand_builtin_va_copy (exp);
6250 case BUILT_IN_EXPECT:
6251 return expand_builtin_expect (exp, target);
6252 case BUILT_IN_PREFETCH:
6253 expand_builtin_prefetch (exp);
6254 return const0_rtx;
6255
6256 case BUILT_IN_PROFILE_FUNC_ENTER:
6257 return expand_builtin_profile_func (false);
6258 case BUILT_IN_PROFILE_FUNC_EXIT:
6259 return expand_builtin_profile_func (true);
6260
6261 case BUILT_IN_INIT_TRAMPOLINE:
6262 return expand_builtin_init_trampoline (exp);
6263 case BUILT_IN_ADJUST_TRAMPOLINE:
6264 return expand_builtin_adjust_trampoline (exp);
6265
6266 case BUILT_IN_FORK:
6267 case BUILT_IN_EXECL:
6268 case BUILT_IN_EXECV:
6269 case BUILT_IN_EXECLP:
6270 case BUILT_IN_EXECLE:
6271 case BUILT_IN_EXECVP:
6272 case BUILT_IN_EXECVE:
6273 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6274 if (target)
6275 return target;
6276 break;
6277
6278 case BUILT_IN_FETCH_AND_ADD_1:
6279 case BUILT_IN_FETCH_AND_ADD_2:
6280 case BUILT_IN_FETCH_AND_ADD_4:
6281 case BUILT_IN_FETCH_AND_ADD_8:
6282 case BUILT_IN_FETCH_AND_ADD_16:
6283 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6284 target = expand_builtin_sync_operation (mode, exp, PLUS,
6285 false, target, ignore);
6286 if (target)
6287 return target;
6288 break;
6289
6290 case BUILT_IN_FETCH_AND_SUB_1:
6291 case BUILT_IN_FETCH_AND_SUB_2:
6292 case BUILT_IN_FETCH_AND_SUB_4:
6293 case BUILT_IN_FETCH_AND_SUB_8:
6294 case BUILT_IN_FETCH_AND_SUB_16:
6295 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6296 target = expand_builtin_sync_operation (mode, exp, MINUS,
6297 false, target, ignore);
6298 if (target)
6299 return target;
6300 break;
6301
6302 case BUILT_IN_FETCH_AND_OR_1:
6303 case BUILT_IN_FETCH_AND_OR_2:
6304 case BUILT_IN_FETCH_AND_OR_4:
6305 case BUILT_IN_FETCH_AND_OR_8:
6306 case BUILT_IN_FETCH_AND_OR_16:
6307 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6308 target = expand_builtin_sync_operation (mode, exp, IOR,
6309 false, target, ignore);
6310 if (target)
6311 return target;
6312 break;
6313
6314 case BUILT_IN_FETCH_AND_AND_1:
6315 case BUILT_IN_FETCH_AND_AND_2:
6316 case BUILT_IN_FETCH_AND_AND_4:
6317 case BUILT_IN_FETCH_AND_AND_8:
6318 case BUILT_IN_FETCH_AND_AND_16:
6319 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6320 target = expand_builtin_sync_operation (mode, exp, AND,
6321 false, target, ignore);
6322 if (target)
6323 return target;
6324 break;
6325
6326 case BUILT_IN_FETCH_AND_XOR_1:
6327 case BUILT_IN_FETCH_AND_XOR_2:
6328 case BUILT_IN_FETCH_AND_XOR_4:
6329 case BUILT_IN_FETCH_AND_XOR_8:
6330 case BUILT_IN_FETCH_AND_XOR_16:
6331 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6332 target = expand_builtin_sync_operation (mode, exp, XOR,
6333 false, target, ignore);
6334 if (target)
6335 return target;
6336 break;
6337
6338 case BUILT_IN_FETCH_AND_NAND_1:
6339 case BUILT_IN_FETCH_AND_NAND_2:
6340 case BUILT_IN_FETCH_AND_NAND_4:
6341 case BUILT_IN_FETCH_AND_NAND_8:
6342 case BUILT_IN_FETCH_AND_NAND_16:
6343 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6344 target = expand_builtin_sync_operation (mode, exp, NOT,
6345 false, target, ignore);
6346 if (target)
6347 return target;
6348 break;
6349
6350 case BUILT_IN_ADD_AND_FETCH_1:
6351 case BUILT_IN_ADD_AND_FETCH_2:
6352 case BUILT_IN_ADD_AND_FETCH_4:
6353 case BUILT_IN_ADD_AND_FETCH_8:
6354 case BUILT_IN_ADD_AND_FETCH_16:
6355 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6356 target = expand_builtin_sync_operation (mode, exp, PLUS,
6357 true, target, ignore);
6358 if (target)
6359 return target;
6360 break;
6361
6362 case BUILT_IN_SUB_AND_FETCH_1:
6363 case BUILT_IN_SUB_AND_FETCH_2:
6364 case BUILT_IN_SUB_AND_FETCH_4:
6365 case BUILT_IN_SUB_AND_FETCH_8:
6366 case BUILT_IN_SUB_AND_FETCH_16:
6367 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6368 target = expand_builtin_sync_operation (mode, exp, MINUS,
6369 true, target, ignore);
6370 if (target)
6371 return target;
6372 break;
6373
6374 case BUILT_IN_OR_AND_FETCH_1:
6375 case BUILT_IN_OR_AND_FETCH_2:
6376 case BUILT_IN_OR_AND_FETCH_4:
6377 case BUILT_IN_OR_AND_FETCH_8:
6378 case BUILT_IN_OR_AND_FETCH_16:
6379 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6380 target = expand_builtin_sync_operation (mode, exp, IOR,
6381 true, target, ignore);
6382 if (target)
6383 return target;
6384 break;
6385
6386 case BUILT_IN_AND_AND_FETCH_1:
6387 case BUILT_IN_AND_AND_FETCH_2:
6388 case BUILT_IN_AND_AND_FETCH_4:
6389 case BUILT_IN_AND_AND_FETCH_8:
6390 case BUILT_IN_AND_AND_FETCH_16:
6391 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6392 target = expand_builtin_sync_operation (mode, exp, AND,
6393 true, target, ignore);
6394 if (target)
6395 return target;
6396 break;
6397
6398 case BUILT_IN_XOR_AND_FETCH_1:
6399 case BUILT_IN_XOR_AND_FETCH_2:
6400 case BUILT_IN_XOR_AND_FETCH_4:
6401 case BUILT_IN_XOR_AND_FETCH_8:
6402 case BUILT_IN_XOR_AND_FETCH_16:
6403 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6404 target = expand_builtin_sync_operation (mode, exp, XOR,
6405 true, target, ignore);
6406 if (target)
6407 return target;
6408 break;
6409
6410 case BUILT_IN_NAND_AND_FETCH_1:
6411 case BUILT_IN_NAND_AND_FETCH_2:
6412 case BUILT_IN_NAND_AND_FETCH_4:
6413 case BUILT_IN_NAND_AND_FETCH_8:
6414 case BUILT_IN_NAND_AND_FETCH_16:
6415 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6416 target = expand_builtin_sync_operation (mode, exp, NOT,
6417 true, target, ignore);
6418 if (target)
6419 return target;
6420 break;
6421
6422 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6423 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6424 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6425 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6426 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6427 if (mode == VOIDmode)
6428 mode = TYPE_MODE (boolean_type_node);
6429 if (!target || !register_operand (target, mode))
6430 target = gen_reg_rtx (mode);
6431
6432 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6433 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6434 if (target)
6435 return target;
6436 break;
6437
6438 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6439 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6440 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6441 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6442 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6444 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6445 if (target)
6446 return target;
6447 break;
6448
6449 case BUILT_IN_LOCK_TEST_AND_SET_1:
6450 case BUILT_IN_LOCK_TEST_AND_SET_2:
6451 case BUILT_IN_LOCK_TEST_AND_SET_4:
6452 case BUILT_IN_LOCK_TEST_AND_SET_8:
6453 case BUILT_IN_LOCK_TEST_AND_SET_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6455 target = expand_builtin_lock_test_and_set (mode, exp, target);
6456 if (target)
6457 return target;
6458 break;
6459
6460 case BUILT_IN_LOCK_RELEASE_1:
6461 case BUILT_IN_LOCK_RELEASE_2:
6462 case BUILT_IN_LOCK_RELEASE_4:
6463 case BUILT_IN_LOCK_RELEASE_8:
6464 case BUILT_IN_LOCK_RELEASE_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6466 expand_builtin_lock_release (mode, exp);
6467 return const0_rtx;
6468
6469 case BUILT_IN_SYNCHRONIZE:
6470 expand_builtin_synchronize ();
6471 return const0_rtx;
6472
6473 case BUILT_IN_OBJECT_SIZE:
6474 return expand_builtin_object_size (exp);
6475
6476 case BUILT_IN_MEMCPY_CHK:
6477 case BUILT_IN_MEMPCPY_CHK:
6478 case BUILT_IN_MEMMOVE_CHK:
6479 case BUILT_IN_MEMSET_CHK:
6480 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6481 if (target)
6482 return target;
6483 break;
6484
6485 case BUILT_IN_STRCPY_CHK:
6486 case BUILT_IN_STPCPY_CHK:
6487 case BUILT_IN_STRNCPY_CHK:
6488 case BUILT_IN_STRCAT_CHK:
6489 case BUILT_IN_STRNCAT_CHK:
6490 case BUILT_IN_SNPRINTF_CHK:
6491 case BUILT_IN_VSNPRINTF_CHK:
6492 maybe_emit_chk_warning (exp, fcode);
6493 break;
6494
6495 case BUILT_IN_SPRINTF_CHK:
6496 case BUILT_IN_VSPRINTF_CHK:
6497 maybe_emit_sprintf_chk_warning (exp, fcode);
6498 break;
6499
6500 case BUILT_IN_FREE:
6501 maybe_emit_free_warning (exp);
6502 break;
6503
6504 default: /* just do library call, if unknown builtin */
6505 break;
6506 }
6507
6508 /* The switch statement above can drop through to cause the function
6509 to be called normally. */
6510 return expand_call (exp, target, ignore);
6511 }
6512
6513 /* Determine whether a tree node represents a call to a built-in
6514 function. If the tree T is a call to a built-in function with
6515 the right number of arguments of the appropriate types, return
6516 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6517 Otherwise the return value is END_BUILTINS. */
6518
6519 enum built_in_function
6520 builtin_mathfn_code (const_tree t)
6521 {
6522 const_tree fndecl, arg, parmlist;
6523 const_tree argtype, parmtype;
6524 const_call_expr_arg_iterator iter;
6525
6526 if (TREE_CODE (t) != CALL_EXPR
6527 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6528 return END_BUILTINS;
6529
6530 fndecl = get_callee_fndecl (t);
6531 if (fndecl == NULL_TREE
6532 || TREE_CODE (fndecl) != FUNCTION_DECL
6533 || ! DECL_BUILT_IN (fndecl)
6534 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6535 return END_BUILTINS;
6536
6537 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6538 init_const_call_expr_arg_iterator (t, &iter);
6539 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6540 {
6541 /* If a function doesn't take a variable number of arguments,
6542 the last element in the list will have type `void'. */
6543 parmtype = TREE_VALUE (parmlist);
6544 if (VOID_TYPE_P (parmtype))
6545 {
6546 if (more_const_call_expr_args_p (&iter))
6547 return END_BUILTINS;
6548 return DECL_FUNCTION_CODE (fndecl);
6549 }
6550
6551 if (! more_const_call_expr_args_p (&iter))
6552 return END_BUILTINS;
6553
6554 arg = next_const_call_expr_arg (&iter);
6555 argtype = TREE_TYPE (arg);
6556
6557 if (SCALAR_FLOAT_TYPE_P (parmtype))
6558 {
6559 if (! SCALAR_FLOAT_TYPE_P (argtype))
6560 return END_BUILTINS;
6561 }
6562 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6563 {
6564 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6565 return END_BUILTINS;
6566 }
6567 else if (POINTER_TYPE_P (parmtype))
6568 {
6569 if (! POINTER_TYPE_P (argtype))
6570 return END_BUILTINS;
6571 }
6572 else if (INTEGRAL_TYPE_P (parmtype))
6573 {
6574 if (! INTEGRAL_TYPE_P (argtype))
6575 return END_BUILTINS;
6576 }
6577 else
6578 return END_BUILTINS;
6579 }
6580
6581 /* Variable-length argument list. */
6582 return DECL_FUNCTION_CODE (fndecl);
6583 }
6584
6585 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6586 evaluate to a constant. */
6587
6588 static tree
6589 fold_builtin_constant_p (tree arg)
6590 {
6591 /* We return 1 for a numeric type that's known to be a constant
6592 value at compile-time or for an aggregate type that's a
6593 literal constant. */
6594 STRIP_NOPS (arg);
6595
6596 /* If we know this is a constant, emit the constant of one. */
6597 if (CONSTANT_CLASS_P (arg)
6598 || (TREE_CODE (arg) == CONSTRUCTOR
6599 && TREE_CONSTANT (arg)))
6600 return integer_one_node;
6601 if (TREE_CODE (arg) == ADDR_EXPR)
6602 {
6603 tree op = TREE_OPERAND (arg, 0);
6604 if (TREE_CODE (op) == STRING_CST
6605 || (TREE_CODE (op) == ARRAY_REF
6606 && integer_zerop (TREE_OPERAND (op, 1))
6607 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6608 return integer_one_node;
6609 }
6610
6611 /* If this expression has side effects, show we don't know it to be a
6612 constant. Likewise if it's a pointer or aggregate type since in
6613 those case we only want literals, since those are only optimized
6614 when generating RTL, not later.
6615 And finally, if we are compiling an initializer, not code, we
6616 need to return a definite result now; there's not going to be any
6617 more optimization done. */
6618 if (TREE_SIDE_EFFECTS (arg)
6619 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6620 || POINTER_TYPE_P (TREE_TYPE (arg))
6621 || cfun == 0
6622 || folding_initializer)
6623 return integer_zero_node;
6624
6625 return NULL_TREE;
6626 }
6627
6628 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6629 return it as a truthvalue. */
6630
6631 static tree
6632 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6633 {
6634 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6635
6636 fn = built_in_decls[BUILT_IN_EXPECT];
6637 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6638 ret_type = TREE_TYPE (TREE_TYPE (fn));
6639 pred_type = TREE_VALUE (arg_types);
6640 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6641
6642 pred = fold_convert_loc (loc, pred_type, pred);
6643 expected = fold_convert_loc (loc, expected_type, expected);
6644 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6645
6646 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6647 build_int_cst (ret_type, 0));
6648 }
6649
6650 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6651 NULL_TREE if no simplification is possible. */
6652
6653 static tree
6654 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6655 {
6656 tree inner, fndecl;
6657 enum tree_code code;
6658
6659 /* If this is a builtin_expect within a builtin_expect keep the
6660 inner one. See through a comparison against a constant. It
6661 might have been added to create a thruthvalue. */
6662 inner = arg0;
6663 if (COMPARISON_CLASS_P (inner)
6664 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6665 inner = TREE_OPERAND (inner, 0);
6666
6667 if (TREE_CODE (inner) == CALL_EXPR
6668 && (fndecl = get_callee_fndecl (inner))
6669 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6670 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6671 return arg0;
6672
6673 /* Distribute the expected value over short-circuiting operators.
6674 See through the cast from truthvalue_type_node to long. */
6675 inner = arg0;
6676 while (TREE_CODE (inner) == NOP_EXPR
6677 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6678 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6679 inner = TREE_OPERAND (inner, 0);
6680
6681 code = TREE_CODE (inner);
6682 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6683 {
6684 tree op0 = TREE_OPERAND (inner, 0);
6685 tree op1 = TREE_OPERAND (inner, 1);
6686
6687 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6688 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6689 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6690
6691 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6692 }
6693
6694 /* If the argument isn't invariant then there's nothing else we can do. */
6695 if (!TREE_CONSTANT (arg0))
6696 return NULL_TREE;
6697
6698 /* If we expect that a comparison against the argument will fold to
6699 a constant return the constant. In practice, this means a true
6700 constant or the address of a non-weak symbol. */
6701 inner = arg0;
6702 STRIP_NOPS (inner);
6703 if (TREE_CODE (inner) == ADDR_EXPR)
6704 {
6705 do
6706 {
6707 inner = TREE_OPERAND (inner, 0);
6708 }
6709 while (TREE_CODE (inner) == COMPONENT_REF
6710 || TREE_CODE (inner) == ARRAY_REF);
6711 if ((TREE_CODE (inner) == VAR_DECL
6712 || TREE_CODE (inner) == FUNCTION_DECL)
6713 && DECL_WEAK (inner))
6714 return NULL_TREE;
6715 }
6716
6717 /* Otherwise, ARG0 already has the proper type for the return value. */
6718 return arg0;
6719 }
6720
6721 /* Fold a call to __builtin_classify_type with argument ARG. */
6722
6723 static tree
6724 fold_builtin_classify_type (tree arg)
6725 {
6726 if (arg == 0)
6727 return build_int_cst (NULL_TREE, no_type_class);
6728
6729 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6730 }
6731
6732 /* Fold a call to __builtin_strlen with argument ARG. */
6733
6734 static tree
6735 fold_builtin_strlen (location_t loc, tree type, tree arg)
6736 {
6737 if (!validate_arg (arg, POINTER_TYPE))
6738 return NULL_TREE;
6739 else
6740 {
6741 tree len = c_strlen (arg, 0);
6742
6743 if (len)
6744 return fold_convert_loc (loc, type, len);
6745
6746 return NULL_TREE;
6747 }
6748 }
6749
6750 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6751
6752 static tree
6753 fold_builtin_inf (location_t loc, tree type, int warn)
6754 {
6755 REAL_VALUE_TYPE real;
6756
6757 /* __builtin_inff is intended to be usable to define INFINITY on all
6758 targets. If an infinity is not available, INFINITY expands "to a
6759 positive constant of type float that overflows at translation
6760 time", footnote "In this case, using INFINITY will violate the
6761 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6762 Thus we pedwarn to ensure this constraint violation is
6763 diagnosed. */
6764 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6765 pedwarn (loc, 0, "target format does not support infinity");
6766
6767 real_inf (&real);
6768 return build_real (type, real);
6769 }
6770
6771 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6772
6773 static tree
6774 fold_builtin_nan (tree arg, tree type, int quiet)
6775 {
6776 REAL_VALUE_TYPE real;
6777 const char *str;
6778
6779 if (!validate_arg (arg, POINTER_TYPE))
6780 return NULL_TREE;
6781 str = c_getstr (arg);
6782 if (!str)
6783 return NULL_TREE;
6784
6785 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6786 return NULL_TREE;
6787
6788 return build_real (type, real);
6789 }
6790
6791 /* Return true if the floating point expression T has an integer value.
6792 We also allow +Inf, -Inf and NaN to be considered integer values. */
6793
6794 static bool
6795 integer_valued_real_p (tree t)
6796 {
6797 switch (TREE_CODE (t))
6798 {
6799 case FLOAT_EXPR:
6800 return true;
6801
6802 case ABS_EXPR:
6803 case SAVE_EXPR:
6804 return integer_valued_real_p (TREE_OPERAND (t, 0));
6805
6806 case COMPOUND_EXPR:
6807 case MODIFY_EXPR:
6808 case BIND_EXPR:
6809 return integer_valued_real_p (TREE_OPERAND (t, 1));
6810
6811 case PLUS_EXPR:
6812 case MINUS_EXPR:
6813 case MULT_EXPR:
6814 case MIN_EXPR:
6815 case MAX_EXPR:
6816 return integer_valued_real_p (TREE_OPERAND (t, 0))
6817 && integer_valued_real_p (TREE_OPERAND (t, 1));
6818
6819 case COND_EXPR:
6820 return integer_valued_real_p (TREE_OPERAND (t, 1))
6821 && integer_valued_real_p (TREE_OPERAND (t, 2));
6822
6823 case REAL_CST:
6824 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6825
6826 case NOP_EXPR:
6827 {
6828 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6829 if (TREE_CODE (type) == INTEGER_TYPE)
6830 return true;
6831 if (TREE_CODE (type) == REAL_TYPE)
6832 return integer_valued_real_p (TREE_OPERAND (t, 0));
6833 break;
6834 }
6835
6836 case CALL_EXPR:
6837 switch (builtin_mathfn_code (t))
6838 {
6839 CASE_FLT_FN (BUILT_IN_CEIL):
6840 CASE_FLT_FN (BUILT_IN_FLOOR):
6841 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6842 CASE_FLT_FN (BUILT_IN_RINT):
6843 CASE_FLT_FN (BUILT_IN_ROUND):
6844 CASE_FLT_FN (BUILT_IN_TRUNC):
6845 return true;
6846
6847 CASE_FLT_FN (BUILT_IN_FMIN):
6848 CASE_FLT_FN (BUILT_IN_FMAX):
6849 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6850 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6851
6852 default:
6853 break;
6854 }
6855 break;
6856
6857 default:
6858 break;
6859 }
6860 return false;
6861 }
6862
6863 /* FNDECL is assumed to be a builtin where truncation can be propagated
6864 across (for instance floor((double)f) == (double)floorf (f).
6865 Do the transformation for a call with argument ARG. */
6866
6867 static tree
6868 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6869 {
6870 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6871
6872 if (!validate_arg (arg, REAL_TYPE))
6873 return NULL_TREE;
6874
6875 /* Integer rounding functions are idempotent. */
6876 if (fcode == builtin_mathfn_code (arg))
6877 return arg;
6878
6879 /* If argument is already integer valued, and we don't need to worry
6880 about setting errno, there's no need to perform rounding. */
6881 if (! flag_errno_math && integer_valued_real_p (arg))
6882 return arg;
6883
6884 if (optimize)
6885 {
6886 tree arg0 = strip_float_extensions (arg);
6887 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6888 tree newtype = TREE_TYPE (arg0);
6889 tree decl;
6890
6891 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6892 && (decl = mathfn_built_in (newtype, fcode)))
6893 return fold_convert_loc (loc, ftype,
6894 build_call_expr_loc (loc, decl, 1,
6895 fold_convert_loc (loc,
6896 newtype,
6897 arg0)));
6898 }
6899 return NULL_TREE;
6900 }
6901
6902 /* FNDECL is assumed to be builtin which can narrow the FP type of
6903 the argument, for instance lround((double)f) -> lroundf (f).
6904 Do the transformation for a call with argument ARG. */
6905
6906 static tree
6907 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6908 {
6909 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6910
6911 if (!validate_arg (arg, REAL_TYPE))
6912 return NULL_TREE;
6913
6914 /* If argument is already integer valued, and we don't need to worry
6915 about setting errno, there's no need to perform rounding. */
6916 if (! flag_errno_math && integer_valued_real_p (arg))
6917 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6918 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6919
6920 if (optimize)
6921 {
6922 tree ftype = TREE_TYPE (arg);
6923 tree arg0 = strip_float_extensions (arg);
6924 tree newtype = TREE_TYPE (arg0);
6925 tree decl;
6926
6927 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6928 && (decl = mathfn_built_in (newtype, fcode)))
6929 return build_call_expr_loc (loc, decl, 1,
6930 fold_convert_loc (loc, newtype, arg0));
6931 }
6932
6933 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6934 sizeof (long long) == sizeof (long). */
6935 if (TYPE_PRECISION (long_long_integer_type_node)
6936 == TYPE_PRECISION (long_integer_type_node))
6937 {
6938 tree newfn = NULL_TREE;
6939 switch (fcode)
6940 {
6941 CASE_FLT_FN (BUILT_IN_LLCEIL):
6942 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6943 break;
6944
6945 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6946 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6947 break;
6948
6949 CASE_FLT_FN (BUILT_IN_LLROUND):
6950 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6951 break;
6952
6953 CASE_FLT_FN (BUILT_IN_LLRINT):
6954 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6955 break;
6956
6957 default:
6958 break;
6959 }
6960
6961 if (newfn)
6962 {
6963 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6964 return fold_convert_loc (loc,
6965 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6966 }
6967 }
6968
6969 return NULL_TREE;
6970 }
6971
6972 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6973 return type. Return NULL_TREE if no simplification can be made. */
6974
6975 static tree
6976 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6977 {
6978 tree res;
6979
6980 if (!validate_arg (arg, COMPLEX_TYPE)
6981 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6982 return NULL_TREE;
6983
6984 /* Calculate the result when the argument is a constant. */
6985 if (TREE_CODE (arg) == COMPLEX_CST
6986 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6987 type, mpfr_hypot)))
6988 return res;
6989
6990 if (TREE_CODE (arg) == COMPLEX_EXPR)
6991 {
6992 tree real = TREE_OPERAND (arg, 0);
6993 tree imag = TREE_OPERAND (arg, 1);
6994
6995 /* If either part is zero, cabs is fabs of the other. */
6996 if (real_zerop (real))
6997 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6998 if (real_zerop (imag))
6999 return fold_build1_loc (loc, ABS_EXPR, type, real);
7000
7001 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7002 if (flag_unsafe_math_optimizations
7003 && operand_equal_p (real, imag, OEP_PURE_SAME))
7004 {
7005 const REAL_VALUE_TYPE sqrt2_trunc
7006 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7007 STRIP_NOPS (real);
7008 return fold_build2_loc (loc, MULT_EXPR, type,
7009 fold_build1_loc (loc, ABS_EXPR, type, real),
7010 build_real (type, sqrt2_trunc));
7011 }
7012 }
7013
7014 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7015 if (TREE_CODE (arg) == NEGATE_EXPR
7016 || TREE_CODE (arg) == CONJ_EXPR)
7017 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7018
7019 /* Don't do this when optimizing for size. */
7020 if (flag_unsafe_math_optimizations
7021 && optimize && optimize_function_for_speed_p (cfun))
7022 {
7023 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7024
7025 if (sqrtfn != NULL_TREE)
7026 {
7027 tree rpart, ipart, result;
7028
7029 arg = builtin_save_expr (arg);
7030
7031 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7032 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7033
7034 rpart = builtin_save_expr (rpart);
7035 ipart = builtin_save_expr (ipart);
7036
7037 result = fold_build2_loc (loc, PLUS_EXPR, type,
7038 fold_build2_loc (loc, MULT_EXPR, type,
7039 rpart, rpart),
7040 fold_build2_loc (loc, MULT_EXPR, type,
7041 ipart, ipart));
7042
7043 return build_call_expr_loc (loc, sqrtfn, 1, result);
7044 }
7045 }
7046
7047 return NULL_TREE;
7048 }
7049
7050 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7051 complex tree type of the result. If NEG is true, the imaginary
7052 zero is negative. */
7053
7054 static tree
7055 build_complex_cproj (tree type, bool neg)
7056 {
7057 REAL_VALUE_TYPE rinf, rzero = dconst0;
7058
7059 real_inf (&rinf);
7060 rzero.sign = neg;
7061 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7062 build_real (TREE_TYPE (type), rzero));
7063 }
7064
7065 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7066 return type. Return NULL_TREE if no simplification can be made. */
7067
7068 static tree
7069 fold_builtin_cproj (location_t loc, tree arg, tree type)
7070 {
7071 if (!validate_arg (arg, COMPLEX_TYPE)
7072 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7073 return NULL_TREE;
7074
7075 /* If there are no infinities, return arg. */
7076 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7077 return non_lvalue_loc (loc, arg);
7078
7079 /* Calculate the result when the argument is a constant. */
7080 if (TREE_CODE (arg) == COMPLEX_CST)
7081 {
7082 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7083 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7084
7085 if (real_isinf (real) || real_isinf (imag))
7086 return build_complex_cproj (type, imag->sign);
7087 else
7088 return arg;
7089 }
7090 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7091 {
7092 tree real = TREE_OPERAND (arg, 0);
7093 tree imag = TREE_OPERAND (arg, 1);
7094
7095 STRIP_NOPS (real);
7096 STRIP_NOPS (imag);
7097
7098 /* If the real part is inf and the imag part is known to be
7099 nonnegative, return (inf + 0i). Remember side-effects are
7100 possible in the imag part. */
7101 if (TREE_CODE (real) == REAL_CST
7102 && real_isinf (TREE_REAL_CST_PTR (real))
7103 && tree_expr_nonnegative_p (imag))
7104 return omit_one_operand_loc (loc, type,
7105 build_complex_cproj (type, false),
7106 arg);
7107
7108 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7109 Remember side-effects are possible in the real part. */
7110 if (TREE_CODE (imag) == REAL_CST
7111 && real_isinf (TREE_REAL_CST_PTR (imag)))
7112 return
7113 omit_one_operand_loc (loc, type,
7114 build_complex_cproj (type, TREE_REAL_CST_PTR
7115 (imag)->sign), arg);
7116 }
7117
7118 return NULL_TREE;
7119 }
7120
7121 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7122 Return NULL_TREE if no simplification can be made. */
7123
7124 static tree
7125 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7126 {
7127
7128 enum built_in_function fcode;
7129 tree res;
7130
7131 if (!validate_arg (arg, REAL_TYPE))
7132 return NULL_TREE;
7133
7134 /* Calculate the result when the argument is a constant. */
7135 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7136 return res;
7137
7138 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7139 fcode = builtin_mathfn_code (arg);
7140 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7141 {
7142 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7143 arg = fold_build2_loc (loc, MULT_EXPR, type,
7144 CALL_EXPR_ARG (arg, 0),
7145 build_real (type, dconsthalf));
7146 return build_call_expr_loc (loc, expfn, 1, arg);
7147 }
7148
7149 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7150 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7151 {
7152 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7153
7154 if (powfn)
7155 {
7156 tree arg0 = CALL_EXPR_ARG (arg, 0);
7157 tree tree_root;
7158 /* The inner root was either sqrt or cbrt. */
7159 /* This was a conditional expression but it triggered a bug
7160 in Sun C 5.5. */
7161 REAL_VALUE_TYPE dconstroot;
7162 if (BUILTIN_SQRT_P (fcode))
7163 dconstroot = dconsthalf;
7164 else
7165 dconstroot = dconst_third ();
7166
7167 /* Adjust for the outer root. */
7168 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7169 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7170 tree_root = build_real (type, dconstroot);
7171 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7172 }
7173 }
7174
7175 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7176 if (flag_unsafe_math_optimizations
7177 && (fcode == BUILT_IN_POW
7178 || fcode == BUILT_IN_POWF
7179 || fcode == BUILT_IN_POWL))
7180 {
7181 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7182 tree arg0 = CALL_EXPR_ARG (arg, 0);
7183 tree arg1 = CALL_EXPR_ARG (arg, 1);
7184 tree narg1;
7185 if (!tree_expr_nonnegative_p (arg0))
7186 arg0 = build1 (ABS_EXPR, type, arg0);
7187 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7188 build_real (type, dconsthalf));
7189 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7190 }
7191
7192 return NULL_TREE;
7193 }
7194
7195 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7196 Return NULL_TREE if no simplification can be made. */
7197
7198 static tree
7199 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7200 {
7201 const enum built_in_function fcode = builtin_mathfn_code (arg);
7202 tree res;
7203
7204 if (!validate_arg (arg, REAL_TYPE))
7205 return NULL_TREE;
7206
7207 /* Calculate the result when the argument is a constant. */
7208 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7209 return res;
7210
7211 if (flag_unsafe_math_optimizations)
7212 {
7213 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7214 if (BUILTIN_EXPONENT_P (fcode))
7215 {
7216 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7217 const REAL_VALUE_TYPE third_trunc =
7218 real_value_truncate (TYPE_MODE (type), dconst_third ());
7219 arg = fold_build2_loc (loc, MULT_EXPR, type,
7220 CALL_EXPR_ARG (arg, 0),
7221 build_real (type, third_trunc));
7222 return build_call_expr_loc (loc, expfn, 1, arg);
7223 }
7224
7225 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7226 if (BUILTIN_SQRT_P (fcode))
7227 {
7228 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7229
7230 if (powfn)
7231 {
7232 tree arg0 = CALL_EXPR_ARG (arg, 0);
7233 tree tree_root;
7234 REAL_VALUE_TYPE dconstroot = dconst_third ();
7235
7236 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7237 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7238 tree_root = build_real (type, dconstroot);
7239 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7240 }
7241 }
7242
7243 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7244 if (BUILTIN_CBRT_P (fcode))
7245 {
7246 tree arg0 = CALL_EXPR_ARG (arg, 0);
7247 if (tree_expr_nonnegative_p (arg0))
7248 {
7249 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7250
7251 if (powfn)
7252 {
7253 tree tree_root;
7254 REAL_VALUE_TYPE dconstroot;
7255
7256 real_arithmetic (&dconstroot, MULT_EXPR,
7257 dconst_third_ptr (), dconst_third_ptr ());
7258 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7259 tree_root = build_real (type, dconstroot);
7260 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7261 }
7262 }
7263 }
7264
7265 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7266 if (fcode == BUILT_IN_POW
7267 || fcode == BUILT_IN_POWF
7268 || fcode == BUILT_IN_POWL)
7269 {
7270 tree arg00 = CALL_EXPR_ARG (arg, 0);
7271 tree arg01 = CALL_EXPR_ARG (arg, 1);
7272 if (tree_expr_nonnegative_p (arg00))
7273 {
7274 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7275 const REAL_VALUE_TYPE dconstroot
7276 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7277 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7278 build_real (type, dconstroot));
7279 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7280 }
7281 }
7282 }
7283 return NULL_TREE;
7284 }
7285
7286 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7287 TYPE is the type of the return value. Return NULL_TREE if no
7288 simplification can be made. */
7289
7290 static tree
7291 fold_builtin_cos (location_t loc,
7292 tree arg, tree type, tree fndecl)
7293 {
7294 tree res, narg;
7295
7296 if (!validate_arg (arg, REAL_TYPE))
7297 return NULL_TREE;
7298
7299 /* Calculate the result when the argument is a constant. */
7300 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7301 return res;
7302
7303 /* Optimize cos(-x) into cos (x). */
7304 if ((narg = fold_strip_sign_ops (arg)))
7305 return build_call_expr_loc (loc, fndecl, 1, narg);
7306
7307 return NULL_TREE;
7308 }
7309
7310 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7311 Return NULL_TREE if no simplification can be made. */
7312
7313 static tree
7314 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7315 {
7316 if (validate_arg (arg, REAL_TYPE))
7317 {
7318 tree res, narg;
7319
7320 /* Calculate the result when the argument is a constant. */
7321 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7322 return res;
7323
7324 /* Optimize cosh(-x) into cosh (x). */
7325 if ((narg = fold_strip_sign_ops (arg)))
7326 return build_call_expr_loc (loc, fndecl, 1, narg);
7327 }
7328
7329 return NULL_TREE;
7330 }
7331
7332 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7333 argument ARG. TYPE is the type of the return value. Return
7334 NULL_TREE if no simplification can be made. */
7335
7336 static tree
7337 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7338 bool hyper)
7339 {
7340 if (validate_arg (arg, COMPLEX_TYPE)
7341 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7342 {
7343 tree tmp;
7344
7345 /* Calculate the result when the argument is a constant. */
7346 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7347 return tmp;
7348
7349 /* Optimize fn(-x) into fn(x). */
7350 if ((tmp = fold_strip_sign_ops (arg)))
7351 return build_call_expr_loc (loc, fndecl, 1, tmp);
7352 }
7353
7354 return NULL_TREE;
7355 }
7356
7357 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7358 Return NULL_TREE if no simplification can be made. */
7359
7360 static tree
7361 fold_builtin_tan (tree arg, tree type)
7362 {
7363 enum built_in_function fcode;
7364 tree res;
7365
7366 if (!validate_arg (arg, REAL_TYPE))
7367 return NULL_TREE;
7368
7369 /* Calculate the result when the argument is a constant. */
7370 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7371 return res;
7372
7373 /* Optimize tan(atan(x)) = x. */
7374 fcode = builtin_mathfn_code (arg);
7375 if (flag_unsafe_math_optimizations
7376 && (fcode == BUILT_IN_ATAN
7377 || fcode == BUILT_IN_ATANF
7378 || fcode == BUILT_IN_ATANL))
7379 return CALL_EXPR_ARG (arg, 0);
7380
7381 return NULL_TREE;
7382 }
7383
7384 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7385 NULL_TREE if no simplification can be made. */
7386
7387 static tree
7388 fold_builtin_sincos (location_t loc,
7389 tree arg0, tree arg1, tree arg2)
7390 {
7391 tree type;
7392 tree res, fn, call;
7393
7394 if (!validate_arg (arg0, REAL_TYPE)
7395 || !validate_arg (arg1, POINTER_TYPE)
7396 || !validate_arg (arg2, POINTER_TYPE))
7397 return NULL_TREE;
7398
7399 type = TREE_TYPE (arg0);
7400
7401 /* Calculate the result when the argument is a constant. */
7402 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7403 return res;
7404
7405 /* Canonicalize sincos to cexpi. */
7406 if (!TARGET_C99_FUNCTIONS)
7407 return NULL_TREE;
7408 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7409 if (!fn)
7410 return NULL_TREE;
7411
7412 call = build_call_expr_loc (loc, fn, 1, arg0);
7413 call = builtin_save_expr (call);
7414
7415 return build2 (COMPOUND_EXPR, void_type_node,
7416 build2 (MODIFY_EXPR, void_type_node,
7417 build_fold_indirect_ref_loc (loc, arg1),
7418 build1 (IMAGPART_EXPR, type, call)),
7419 build2 (MODIFY_EXPR, void_type_node,
7420 build_fold_indirect_ref_loc (loc, arg2),
7421 build1 (REALPART_EXPR, type, call)));
7422 }
7423
7424 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7425 NULL_TREE if no simplification can be made. */
7426
7427 static tree
7428 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7429 {
7430 tree rtype;
7431 tree realp, imagp, ifn;
7432 tree res;
7433
7434 if (!validate_arg (arg0, COMPLEX_TYPE)
7435 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7436 return NULL_TREE;
7437
7438 /* Calculate the result when the argument is a constant. */
7439 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7440 return res;
7441
7442 rtype = TREE_TYPE (TREE_TYPE (arg0));
7443
7444 /* In case we can figure out the real part of arg0 and it is constant zero
7445 fold to cexpi. */
7446 if (!TARGET_C99_FUNCTIONS)
7447 return NULL_TREE;
7448 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7449 if (!ifn)
7450 return NULL_TREE;
7451
7452 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7453 && real_zerop (realp))
7454 {
7455 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7456 return build_call_expr_loc (loc, ifn, 1, narg);
7457 }
7458
7459 /* In case we can easily decompose real and imaginary parts split cexp
7460 to exp (r) * cexpi (i). */
7461 if (flag_unsafe_math_optimizations
7462 && realp)
7463 {
7464 tree rfn, rcall, icall;
7465
7466 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7467 if (!rfn)
7468 return NULL_TREE;
7469
7470 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7471 if (!imagp)
7472 return NULL_TREE;
7473
7474 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7475 icall = builtin_save_expr (icall);
7476 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7477 rcall = builtin_save_expr (rcall);
7478 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7479 fold_build2_loc (loc, MULT_EXPR, rtype,
7480 rcall,
7481 fold_build1_loc (loc, REALPART_EXPR,
7482 rtype, icall)),
7483 fold_build2_loc (loc, MULT_EXPR, rtype,
7484 rcall,
7485 fold_build1_loc (loc, IMAGPART_EXPR,
7486 rtype, icall)));
7487 }
7488
7489 return NULL_TREE;
7490 }
7491
7492 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7493 Return NULL_TREE if no simplification can be made. */
7494
7495 static tree
7496 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7497 {
7498 if (!validate_arg (arg, REAL_TYPE))
7499 return NULL_TREE;
7500
7501 /* Optimize trunc of constant value. */
7502 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7503 {
7504 REAL_VALUE_TYPE r, x;
7505 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7506
7507 x = TREE_REAL_CST (arg);
7508 real_trunc (&r, TYPE_MODE (type), &x);
7509 return build_real (type, r);
7510 }
7511
7512 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7513 }
7514
7515 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7516 Return NULL_TREE if no simplification can be made. */
7517
7518 static tree
7519 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7520 {
7521 if (!validate_arg (arg, REAL_TYPE))
7522 return NULL_TREE;
7523
7524 /* Optimize floor of constant value. */
7525 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7526 {
7527 REAL_VALUE_TYPE x;
7528
7529 x = TREE_REAL_CST (arg);
7530 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7531 {
7532 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7533 REAL_VALUE_TYPE r;
7534
7535 real_floor (&r, TYPE_MODE (type), &x);
7536 return build_real (type, r);
7537 }
7538 }
7539
7540 /* Fold floor (x) where x is nonnegative to trunc (x). */
7541 if (tree_expr_nonnegative_p (arg))
7542 {
7543 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7544 if (truncfn)
7545 return build_call_expr_loc (loc, truncfn, 1, arg);
7546 }
7547
7548 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7549 }
7550
7551 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7552 Return NULL_TREE if no simplification can be made. */
7553
7554 static tree
7555 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7556 {
7557 if (!validate_arg (arg, REAL_TYPE))
7558 return NULL_TREE;
7559
7560 /* Optimize ceil of constant value. */
7561 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7562 {
7563 REAL_VALUE_TYPE x;
7564
7565 x = TREE_REAL_CST (arg);
7566 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7567 {
7568 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7569 REAL_VALUE_TYPE r;
7570
7571 real_ceil (&r, TYPE_MODE (type), &x);
7572 return build_real (type, r);
7573 }
7574 }
7575
7576 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7577 }
7578
7579 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7580 Return NULL_TREE if no simplification can be made. */
7581
7582 static tree
7583 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7584 {
7585 if (!validate_arg (arg, REAL_TYPE))
7586 return NULL_TREE;
7587
7588 /* Optimize round of constant value. */
7589 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7590 {
7591 REAL_VALUE_TYPE x;
7592
7593 x = TREE_REAL_CST (arg);
7594 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7595 {
7596 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7597 REAL_VALUE_TYPE r;
7598
7599 real_round (&r, TYPE_MODE (type), &x);
7600 return build_real (type, r);
7601 }
7602 }
7603
7604 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7605 }
7606
7607 /* Fold function call to builtin lround, lroundf or lroundl (or the
7608 corresponding long long versions) and other rounding functions. ARG
7609 is the argument to the call. Return NULL_TREE if no simplification
7610 can be made. */
7611
7612 static tree
7613 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7614 {
7615 if (!validate_arg (arg, REAL_TYPE))
7616 return NULL_TREE;
7617
7618 /* Optimize lround of constant value. */
7619 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7620 {
7621 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7622
7623 if (real_isfinite (&x))
7624 {
7625 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7626 tree ftype = TREE_TYPE (arg);
7627 unsigned HOST_WIDE_INT lo2;
7628 HOST_WIDE_INT hi, lo;
7629 REAL_VALUE_TYPE r;
7630
7631 switch (DECL_FUNCTION_CODE (fndecl))
7632 {
7633 CASE_FLT_FN (BUILT_IN_LFLOOR):
7634 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7635 real_floor (&r, TYPE_MODE (ftype), &x);
7636 break;
7637
7638 CASE_FLT_FN (BUILT_IN_LCEIL):
7639 CASE_FLT_FN (BUILT_IN_LLCEIL):
7640 real_ceil (&r, TYPE_MODE (ftype), &x);
7641 break;
7642
7643 CASE_FLT_FN (BUILT_IN_LROUND):
7644 CASE_FLT_FN (BUILT_IN_LLROUND):
7645 real_round (&r, TYPE_MODE (ftype), &x);
7646 break;
7647
7648 default:
7649 gcc_unreachable ();
7650 }
7651
7652 REAL_VALUE_TO_INT (&lo, &hi, r);
7653 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7654 return build_int_cst_wide (itype, lo2, hi);
7655 }
7656 }
7657
7658 switch (DECL_FUNCTION_CODE (fndecl))
7659 {
7660 CASE_FLT_FN (BUILT_IN_LFLOOR):
7661 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7662 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7663 if (tree_expr_nonnegative_p (arg))
7664 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7665 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7666 break;
7667 default:;
7668 }
7669
7670 return fold_fixed_mathfn (loc, fndecl, arg);
7671 }
7672
7673 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7674 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7675 the argument to the call. Return NULL_TREE if no simplification can
7676 be made. */
7677
7678 static tree
7679 fold_builtin_bitop (tree fndecl, tree arg)
7680 {
7681 if (!validate_arg (arg, INTEGER_TYPE))
7682 return NULL_TREE;
7683
7684 /* Optimize for constant argument. */
7685 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7686 {
7687 HOST_WIDE_INT hi, width, result;
7688 unsigned HOST_WIDE_INT lo;
7689 tree type;
7690
7691 type = TREE_TYPE (arg);
7692 width = TYPE_PRECISION (type);
7693 lo = TREE_INT_CST_LOW (arg);
7694
7695 /* Clear all the bits that are beyond the type's precision. */
7696 if (width > HOST_BITS_PER_WIDE_INT)
7697 {
7698 hi = TREE_INT_CST_HIGH (arg);
7699 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7700 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7701 }
7702 else
7703 {
7704 hi = 0;
7705 if (width < HOST_BITS_PER_WIDE_INT)
7706 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7707 }
7708
7709 switch (DECL_FUNCTION_CODE (fndecl))
7710 {
7711 CASE_INT_FN (BUILT_IN_FFS):
7712 if (lo != 0)
7713 result = exact_log2 (lo & -lo) + 1;
7714 else if (hi != 0)
7715 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7716 else
7717 result = 0;
7718 break;
7719
7720 CASE_INT_FN (BUILT_IN_CLZ):
7721 if (hi != 0)
7722 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7723 else if (lo != 0)
7724 result = width - floor_log2 (lo) - 1;
7725 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7726 result = width;
7727 break;
7728
7729 CASE_INT_FN (BUILT_IN_CTZ):
7730 if (lo != 0)
7731 result = exact_log2 (lo & -lo);
7732 else if (hi != 0)
7733 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7734 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7735 result = width;
7736 break;
7737
7738 CASE_INT_FN (BUILT_IN_POPCOUNT):
7739 result = 0;
7740 while (lo)
7741 result++, lo &= lo - 1;
7742 while (hi)
7743 result++, hi &= hi - 1;
7744 break;
7745
7746 CASE_INT_FN (BUILT_IN_PARITY):
7747 result = 0;
7748 while (lo)
7749 result++, lo &= lo - 1;
7750 while (hi)
7751 result++, hi &= hi - 1;
7752 result &= 1;
7753 break;
7754
7755 default:
7756 gcc_unreachable ();
7757 }
7758
7759 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7760 }
7761
7762 return NULL_TREE;
7763 }
7764
7765 /* Fold function call to builtin_bswap and the long and long long
7766 variants. Return NULL_TREE if no simplification can be made. */
7767 static tree
7768 fold_builtin_bswap (tree fndecl, tree arg)
7769 {
7770 if (! validate_arg (arg, INTEGER_TYPE))
7771 return NULL_TREE;
7772
7773 /* Optimize constant value. */
7774 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7775 {
7776 HOST_WIDE_INT hi, width, r_hi = 0;
7777 unsigned HOST_WIDE_INT lo, r_lo = 0;
7778 tree type;
7779
7780 type = TREE_TYPE (arg);
7781 width = TYPE_PRECISION (type);
7782 lo = TREE_INT_CST_LOW (arg);
7783 hi = TREE_INT_CST_HIGH (arg);
7784
7785 switch (DECL_FUNCTION_CODE (fndecl))
7786 {
7787 case BUILT_IN_BSWAP32:
7788 case BUILT_IN_BSWAP64:
7789 {
7790 int s;
7791
7792 for (s = 0; s < width; s += 8)
7793 {
7794 int d = width - s - 8;
7795 unsigned HOST_WIDE_INT byte;
7796
7797 if (s < HOST_BITS_PER_WIDE_INT)
7798 byte = (lo >> s) & 0xff;
7799 else
7800 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7801
7802 if (d < HOST_BITS_PER_WIDE_INT)
7803 r_lo |= byte << d;
7804 else
7805 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7806 }
7807 }
7808
7809 break;
7810
7811 default:
7812 gcc_unreachable ();
7813 }
7814
7815 if (width < HOST_BITS_PER_WIDE_INT)
7816 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7817 else
7818 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7819 }
7820
7821 return NULL_TREE;
7822 }
7823
7824 /* A subroutine of fold_builtin to fold the various logarithmic
7825 functions. Return NULL_TREE if no simplification can me made.
7826 FUNC is the corresponding MPFR logarithm function. */
7827
7828 static tree
7829 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7830 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7831 {
7832 if (validate_arg (arg, REAL_TYPE))
7833 {
7834 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7835 tree res;
7836 const enum built_in_function fcode = builtin_mathfn_code (arg);
7837
7838 /* Calculate the result when the argument is a constant. */
7839 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7840 return res;
7841
7842 /* Special case, optimize logN(expN(x)) = x. */
7843 if (flag_unsafe_math_optimizations
7844 && ((func == mpfr_log
7845 && (fcode == BUILT_IN_EXP
7846 || fcode == BUILT_IN_EXPF
7847 || fcode == BUILT_IN_EXPL))
7848 || (func == mpfr_log2
7849 && (fcode == BUILT_IN_EXP2
7850 || fcode == BUILT_IN_EXP2F
7851 || fcode == BUILT_IN_EXP2L))
7852 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7853 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7854
7855 /* Optimize logN(func()) for various exponential functions. We
7856 want to determine the value "x" and the power "exponent" in
7857 order to transform logN(x**exponent) into exponent*logN(x). */
7858 if (flag_unsafe_math_optimizations)
7859 {
7860 tree exponent = 0, x = 0;
7861
7862 switch (fcode)
7863 {
7864 CASE_FLT_FN (BUILT_IN_EXP):
7865 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7866 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7867 dconst_e ()));
7868 exponent = CALL_EXPR_ARG (arg, 0);
7869 break;
7870 CASE_FLT_FN (BUILT_IN_EXP2):
7871 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7872 x = build_real (type, dconst2);
7873 exponent = CALL_EXPR_ARG (arg, 0);
7874 break;
7875 CASE_FLT_FN (BUILT_IN_EXP10):
7876 CASE_FLT_FN (BUILT_IN_POW10):
7877 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7878 {
7879 REAL_VALUE_TYPE dconst10;
7880 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7881 x = build_real (type, dconst10);
7882 }
7883 exponent = CALL_EXPR_ARG (arg, 0);
7884 break;
7885 CASE_FLT_FN (BUILT_IN_SQRT):
7886 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7887 x = CALL_EXPR_ARG (arg, 0);
7888 exponent = build_real (type, dconsthalf);
7889 break;
7890 CASE_FLT_FN (BUILT_IN_CBRT):
7891 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7892 x = CALL_EXPR_ARG (arg, 0);
7893 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7894 dconst_third ()));
7895 break;
7896 CASE_FLT_FN (BUILT_IN_POW):
7897 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7898 x = CALL_EXPR_ARG (arg, 0);
7899 exponent = CALL_EXPR_ARG (arg, 1);
7900 break;
7901 default:
7902 break;
7903 }
7904
7905 /* Now perform the optimization. */
7906 if (x && exponent)
7907 {
7908 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7909 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7910 }
7911 }
7912 }
7913
7914 return NULL_TREE;
7915 }
7916
7917 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7918 NULL_TREE if no simplification can be made. */
7919
7920 static tree
7921 fold_builtin_hypot (location_t loc, tree fndecl,
7922 tree arg0, tree arg1, tree type)
7923 {
7924 tree res, narg0, narg1;
7925
7926 if (!validate_arg (arg0, REAL_TYPE)
7927 || !validate_arg (arg1, REAL_TYPE))
7928 return NULL_TREE;
7929
7930 /* Calculate the result when the argument is a constant. */
7931 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7932 return res;
7933
7934 /* If either argument to hypot has a negate or abs, strip that off.
7935 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7936 narg0 = fold_strip_sign_ops (arg0);
7937 narg1 = fold_strip_sign_ops (arg1);
7938 if (narg0 || narg1)
7939 {
7940 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7941 narg1 ? narg1 : arg1);
7942 }
7943
7944 /* If either argument is zero, hypot is fabs of the other. */
7945 if (real_zerop (arg0))
7946 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7947 else if (real_zerop (arg1))
7948 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7949
7950 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7951 if (flag_unsafe_math_optimizations
7952 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7953 {
7954 const REAL_VALUE_TYPE sqrt2_trunc
7955 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7956 return fold_build2_loc (loc, MULT_EXPR, type,
7957 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7958 build_real (type, sqrt2_trunc));
7959 }
7960
7961 return NULL_TREE;
7962 }
7963
7964
7965 /* Fold a builtin function call to pow, powf, or powl. Return
7966 NULL_TREE if no simplification can be made. */
7967 static tree
7968 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7969 {
7970 tree res;
7971
7972 if (!validate_arg (arg0, REAL_TYPE)
7973 || !validate_arg (arg1, REAL_TYPE))
7974 return NULL_TREE;
7975
7976 /* Calculate the result when the argument is a constant. */
7977 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7978 return res;
7979
7980 /* Optimize pow(1.0,y) = 1.0. */
7981 if (real_onep (arg0))
7982 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7983
7984 if (TREE_CODE (arg1) == REAL_CST
7985 && !TREE_OVERFLOW (arg1))
7986 {
7987 REAL_VALUE_TYPE cint;
7988 REAL_VALUE_TYPE c;
7989 HOST_WIDE_INT n;
7990
7991 c = TREE_REAL_CST (arg1);
7992
7993 /* Optimize pow(x,0.0) = 1.0. */
7994 if (REAL_VALUES_EQUAL (c, dconst0))
7995 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7996 arg0);
7997
7998 /* Optimize pow(x,1.0) = x. */
7999 if (REAL_VALUES_EQUAL (c, dconst1))
8000 return arg0;
8001
8002 /* Optimize pow(x,-1.0) = 1.0/x. */
8003 if (REAL_VALUES_EQUAL (c, dconstm1))
8004 return fold_build2_loc (loc, RDIV_EXPR, type,
8005 build_real (type, dconst1), arg0);
8006
8007 /* Optimize pow(x,0.5) = sqrt(x). */
8008 if (flag_unsafe_math_optimizations
8009 && REAL_VALUES_EQUAL (c, dconsthalf))
8010 {
8011 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8012
8013 if (sqrtfn != NULL_TREE)
8014 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8015 }
8016
8017 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8018 if (flag_unsafe_math_optimizations)
8019 {
8020 const REAL_VALUE_TYPE dconstroot
8021 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8022
8023 if (REAL_VALUES_EQUAL (c, dconstroot))
8024 {
8025 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8026 if (cbrtfn != NULL_TREE)
8027 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8028 }
8029 }
8030
8031 /* Check for an integer exponent. */
8032 n = real_to_integer (&c);
8033 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8034 if (real_identical (&c, &cint))
8035 {
8036 /* Attempt to evaluate pow at compile-time, unless this should
8037 raise an exception. */
8038 if (TREE_CODE (arg0) == REAL_CST
8039 && !TREE_OVERFLOW (arg0)
8040 && (n > 0
8041 || (!flag_trapping_math && !flag_errno_math)
8042 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8043 {
8044 REAL_VALUE_TYPE x;
8045 bool inexact;
8046
8047 x = TREE_REAL_CST (arg0);
8048 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8049 if (flag_unsafe_math_optimizations || !inexact)
8050 return build_real (type, x);
8051 }
8052
8053 /* Strip sign ops from even integer powers. */
8054 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8055 {
8056 tree narg0 = fold_strip_sign_ops (arg0);
8057 if (narg0)
8058 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8059 }
8060 }
8061 }
8062
8063 if (flag_unsafe_math_optimizations)
8064 {
8065 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8066
8067 /* Optimize pow(expN(x),y) = expN(x*y). */
8068 if (BUILTIN_EXPONENT_P (fcode))
8069 {
8070 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8071 tree arg = CALL_EXPR_ARG (arg0, 0);
8072 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8073 return build_call_expr_loc (loc, expfn, 1, arg);
8074 }
8075
8076 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8077 if (BUILTIN_SQRT_P (fcode))
8078 {
8079 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8080 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8081 build_real (type, dconsthalf));
8082 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8083 }
8084
8085 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8086 if (BUILTIN_CBRT_P (fcode))
8087 {
8088 tree arg = CALL_EXPR_ARG (arg0, 0);
8089 if (tree_expr_nonnegative_p (arg))
8090 {
8091 const REAL_VALUE_TYPE dconstroot
8092 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8093 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8094 build_real (type, dconstroot));
8095 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8096 }
8097 }
8098
8099 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8100 if (fcode == BUILT_IN_POW
8101 || fcode == BUILT_IN_POWF
8102 || fcode == BUILT_IN_POWL)
8103 {
8104 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8105 if (tree_expr_nonnegative_p (arg00))
8106 {
8107 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8108 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8109 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8110 }
8111 }
8112 }
8113
8114 return NULL_TREE;
8115 }
8116
8117 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8118 Return NULL_TREE if no simplification can be made. */
8119 static tree
8120 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8121 tree arg0, tree arg1, tree type)
8122 {
8123 if (!validate_arg (arg0, REAL_TYPE)
8124 || !validate_arg (arg1, INTEGER_TYPE))
8125 return NULL_TREE;
8126
8127 /* Optimize pow(1.0,y) = 1.0. */
8128 if (real_onep (arg0))
8129 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8130
8131 if (host_integerp (arg1, 0))
8132 {
8133 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8134
8135 /* Evaluate powi at compile-time. */
8136 if (TREE_CODE (arg0) == REAL_CST
8137 && !TREE_OVERFLOW (arg0))
8138 {
8139 REAL_VALUE_TYPE x;
8140 x = TREE_REAL_CST (arg0);
8141 real_powi (&x, TYPE_MODE (type), &x, c);
8142 return build_real (type, x);
8143 }
8144
8145 /* Optimize pow(x,0) = 1.0. */
8146 if (c == 0)
8147 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8148 arg0);
8149
8150 /* Optimize pow(x,1) = x. */
8151 if (c == 1)
8152 return arg0;
8153
8154 /* Optimize pow(x,-1) = 1.0/x. */
8155 if (c == -1)
8156 return fold_build2_loc (loc, RDIV_EXPR, type,
8157 build_real (type, dconst1), arg0);
8158 }
8159
8160 return NULL_TREE;
8161 }
8162
8163 /* A subroutine of fold_builtin to fold the various exponent
8164 functions. Return NULL_TREE if no simplification can be made.
8165 FUNC is the corresponding MPFR exponent function. */
8166
8167 static tree
8168 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8169 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8170 {
8171 if (validate_arg (arg, REAL_TYPE))
8172 {
8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8174 tree res;
8175
8176 /* Calculate the result when the argument is a constant. */
8177 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8178 return res;
8179
8180 /* Optimize expN(logN(x)) = x. */
8181 if (flag_unsafe_math_optimizations)
8182 {
8183 const enum built_in_function fcode = builtin_mathfn_code (arg);
8184
8185 if ((func == mpfr_exp
8186 && (fcode == BUILT_IN_LOG
8187 || fcode == BUILT_IN_LOGF
8188 || fcode == BUILT_IN_LOGL))
8189 || (func == mpfr_exp2
8190 && (fcode == BUILT_IN_LOG2
8191 || fcode == BUILT_IN_LOG2F
8192 || fcode == BUILT_IN_LOG2L))
8193 || (func == mpfr_exp10
8194 && (fcode == BUILT_IN_LOG10
8195 || fcode == BUILT_IN_LOG10F
8196 || fcode == BUILT_IN_LOG10L)))
8197 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8198 }
8199 }
8200
8201 return NULL_TREE;
8202 }
8203
8204 /* Return true if VAR is a VAR_DECL or a component thereof. */
8205
8206 static bool
8207 var_decl_component_p (tree var)
8208 {
8209 tree inner = var;
8210 while (handled_component_p (inner))
8211 inner = TREE_OPERAND (inner, 0);
8212 return SSA_VAR_P (inner);
8213 }
8214
8215 /* Fold function call to builtin memset. Return
8216 NULL_TREE if no simplification can be made. */
8217
8218 static tree
8219 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8220 tree type, bool ignore)
8221 {
8222 tree var, ret, etype;
8223 unsigned HOST_WIDE_INT length, cval;
8224
8225 if (! validate_arg (dest, POINTER_TYPE)
8226 || ! validate_arg (c, INTEGER_TYPE)
8227 || ! validate_arg (len, INTEGER_TYPE))
8228 return NULL_TREE;
8229
8230 if (! host_integerp (len, 1))
8231 return NULL_TREE;
8232
8233 /* If the LEN parameter is zero, return DEST. */
8234 if (integer_zerop (len))
8235 return omit_one_operand_loc (loc, type, dest, c);
8236
8237 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8238 return NULL_TREE;
8239
8240 var = dest;
8241 STRIP_NOPS (var);
8242 if (TREE_CODE (var) != ADDR_EXPR)
8243 return NULL_TREE;
8244
8245 var = TREE_OPERAND (var, 0);
8246 if (TREE_THIS_VOLATILE (var))
8247 return NULL_TREE;
8248
8249 etype = TREE_TYPE (var);
8250 if (TREE_CODE (etype) == ARRAY_TYPE)
8251 etype = TREE_TYPE (etype);
8252
8253 if (!INTEGRAL_TYPE_P (etype)
8254 && !POINTER_TYPE_P (etype))
8255 return NULL_TREE;
8256
8257 if (! var_decl_component_p (var))
8258 return NULL_TREE;
8259
8260 length = tree_low_cst (len, 1);
8261 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8262 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8263 < (int) length)
8264 return NULL_TREE;
8265
8266 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8267 return NULL_TREE;
8268
8269 if (integer_zerop (c))
8270 cval = 0;
8271 else
8272 {
8273 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8274 return NULL_TREE;
8275
8276 cval = tree_low_cst (c, 1);
8277 cval &= 0xff;
8278 cval |= cval << 8;
8279 cval |= cval << 16;
8280 cval |= (cval << 31) << 1;
8281 }
8282
8283 ret = build_int_cst_type (etype, cval);
8284 var = build_fold_indirect_ref_loc (loc,
8285 fold_convert_loc (loc,
8286 build_pointer_type (etype),
8287 dest));
8288 ret = build2 (MODIFY_EXPR, etype, var, ret);
8289 if (ignore)
8290 return ret;
8291
8292 return omit_one_operand_loc (loc, type, dest, ret);
8293 }
8294
8295 /* Fold function call to builtin memset. Return
8296 NULL_TREE if no simplification can be made. */
8297
8298 static tree
8299 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8300 {
8301 if (! validate_arg (dest, POINTER_TYPE)
8302 || ! validate_arg (size, INTEGER_TYPE))
8303 return NULL_TREE;
8304
8305 if (!ignore)
8306 return NULL_TREE;
8307
8308 /* New argument list transforming bzero(ptr x, int y) to
8309 memset(ptr x, int 0, size_t y). This is done this way
8310 so that if it isn't expanded inline, we fallback to
8311 calling bzero instead of memset. */
8312
8313 return fold_builtin_memset (loc, dest, integer_zero_node,
8314 fold_convert_loc (loc, sizetype, size),
8315 void_type_node, ignore);
8316 }
8317
8318 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8319 NULL_TREE if no simplification can be made.
8320 If ENDP is 0, return DEST (like memcpy).
8321 If ENDP is 1, return DEST+LEN (like mempcpy).
8322 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8323 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8324 (memmove). */
8325
8326 static tree
8327 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8328 tree len, tree type, bool ignore, int endp)
8329 {
8330 tree destvar, srcvar, expr;
8331
8332 if (! validate_arg (dest, POINTER_TYPE)
8333 || ! validate_arg (src, POINTER_TYPE)
8334 || ! validate_arg (len, INTEGER_TYPE))
8335 return NULL_TREE;
8336
8337 /* If the LEN parameter is zero, return DEST. */
8338 if (integer_zerop (len))
8339 return omit_one_operand_loc (loc, type, dest, src);
8340
8341 /* If SRC and DEST are the same (and not volatile), return
8342 DEST{,+LEN,+LEN-1}. */
8343 if (operand_equal_p (src, dest, 0))
8344 expr = len;
8345 else
8346 {
8347 tree srctype, desttype;
8348 int src_align, dest_align;
8349
8350 if (endp == 3)
8351 {
8352 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8353 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8354
8355 /* Both DEST and SRC must be pointer types.
8356 ??? This is what old code did. Is the testing for pointer types
8357 really mandatory?
8358
8359 If either SRC is readonly or length is 1, we can use memcpy. */
8360 if (!dest_align || !src_align)
8361 return NULL_TREE;
8362 if (readonly_data_expr (src)
8363 || (host_integerp (len, 1)
8364 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8365 >= tree_low_cst (len, 1))))
8366 {
8367 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8368 if (!fn)
8369 return NULL_TREE;
8370 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8371 }
8372
8373 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8374 srcvar = build_fold_indirect_ref_loc (loc, src);
8375 destvar = build_fold_indirect_ref_loc (loc, dest);
8376 if (srcvar
8377 && !TREE_THIS_VOLATILE (srcvar)
8378 && destvar
8379 && !TREE_THIS_VOLATILE (destvar))
8380 {
8381 tree src_base, dest_base, fn;
8382 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8383 HOST_WIDE_INT size = -1;
8384 HOST_WIDE_INT maxsize = -1;
8385
8386 src_base = srcvar;
8387 if (handled_component_p (src_base))
8388 src_base = get_ref_base_and_extent (src_base, &src_offset,
8389 &size, &maxsize);
8390 dest_base = destvar;
8391 if (handled_component_p (dest_base))
8392 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8393 &size, &maxsize);
8394 if (host_integerp (len, 1))
8395 {
8396 maxsize = tree_low_cst (len, 1);
8397 if (maxsize
8398 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8399 maxsize = -1;
8400 else
8401 maxsize *= BITS_PER_UNIT;
8402 }
8403 else
8404 maxsize = -1;
8405 if (SSA_VAR_P (src_base)
8406 && SSA_VAR_P (dest_base))
8407 {
8408 if (operand_equal_p (src_base, dest_base, 0)
8409 && ranges_overlap_p (src_offset, maxsize,
8410 dest_offset, maxsize))
8411 return NULL_TREE;
8412 }
8413 else if (TREE_CODE (src_base) == INDIRECT_REF
8414 && TREE_CODE (dest_base) == INDIRECT_REF)
8415 {
8416 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8417 TREE_OPERAND (dest_base, 0), 0)
8418 || ranges_overlap_p (src_offset, maxsize,
8419 dest_offset, maxsize))
8420 return NULL_TREE;
8421 }
8422 else
8423 return NULL_TREE;
8424
8425 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8426 if (!fn)
8427 return NULL_TREE;
8428 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8429 }
8430 return NULL_TREE;
8431 }
8432
8433 if (!host_integerp (len, 0))
8434 return NULL_TREE;
8435 /* FIXME:
8436 This logic lose for arguments like (type *)malloc (sizeof (type)),
8437 since we strip the casts of up to VOID return value from malloc.
8438 Perhaps we ought to inherit type from non-VOID argument here? */
8439 STRIP_NOPS (src);
8440 STRIP_NOPS (dest);
8441 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8442 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8443 {
8444 tree tem = TREE_OPERAND (src, 0);
8445 STRIP_NOPS (tem);
8446 if (tem != TREE_OPERAND (src, 0))
8447 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8448 }
8449 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8450 {
8451 tree tem = TREE_OPERAND (dest, 0);
8452 STRIP_NOPS (tem);
8453 if (tem != TREE_OPERAND (dest, 0))
8454 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8455 }
8456 srctype = TREE_TYPE (TREE_TYPE (src));
8457 if (srctype
8458 && TREE_CODE (srctype) == ARRAY_TYPE
8459 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8460 {
8461 srctype = TREE_TYPE (srctype);
8462 STRIP_NOPS (src);
8463 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8464 }
8465 desttype = TREE_TYPE (TREE_TYPE (dest));
8466 if (desttype
8467 && TREE_CODE (desttype) == ARRAY_TYPE
8468 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8469 {
8470 desttype = TREE_TYPE (desttype);
8471 STRIP_NOPS (dest);
8472 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8473 }
8474 if (!srctype || !desttype
8475 || !TYPE_SIZE_UNIT (srctype)
8476 || !TYPE_SIZE_UNIT (desttype)
8477 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8478 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8479 || TYPE_VOLATILE (srctype)
8480 || TYPE_VOLATILE (desttype))
8481 return NULL_TREE;
8482
8483 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8484 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8485 if (dest_align < (int) TYPE_ALIGN (desttype)
8486 || src_align < (int) TYPE_ALIGN (srctype))
8487 return NULL_TREE;
8488
8489 if (!ignore)
8490 dest = builtin_save_expr (dest);
8491
8492 srcvar = NULL_TREE;
8493 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8494 {
8495 srcvar = build_fold_indirect_ref_loc (loc, src);
8496 if (TREE_THIS_VOLATILE (srcvar))
8497 return NULL_TREE;
8498 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8499 srcvar = NULL_TREE;
8500 /* With memcpy, it is possible to bypass aliasing rules, so without
8501 this check i.e. execute/20060930-2.c would be misoptimized,
8502 because it use conflicting alias set to hold argument for the
8503 memcpy call. This check is probably unnecessary with
8504 -fno-strict-aliasing. Similarly for destvar. See also
8505 PR29286. */
8506 else if (!var_decl_component_p (srcvar))
8507 srcvar = NULL_TREE;
8508 }
8509
8510 destvar = NULL_TREE;
8511 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8512 {
8513 destvar = build_fold_indirect_ref_loc (loc, dest);
8514 if (TREE_THIS_VOLATILE (destvar))
8515 return NULL_TREE;
8516 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8517 destvar = NULL_TREE;
8518 else if (!var_decl_component_p (destvar))
8519 destvar = NULL_TREE;
8520 }
8521
8522 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8523 return NULL_TREE;
8524
8525 if (srcvar == NULL_TREE)
8526 {
8527 tree srcptype;
8528 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8529 return NULL_TREE;
8530
8531 srctype = build_qualified_type (desttype, 0);
8532 if (src_align < (int) TYPE_ALIGN (srctype))
8533 {
8534 if (AGGREGATE_TYPE_P (srctype)
8535 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8536 return NULL_TREE;
8537
8538 srctype = build_variant_type_copy (srctype);
8539 TYPE_ALIGN (srctype) = src_align;
8540 TYPE_USER_ALIGN (srctype) = 1;
8541 TYPE_PACKED (srctype) = 1;
8542 }
8543 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8544 src = fold_convert_loc (loc, srcptype, src);
8545 srcvar = build_fold_indirect_ref_loc (loc, src);
8546 }
8547 else if (destvar == NULL_TREE)
8548 {
8549 tree destptype;
8550 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8551 return NULL_TREE;
8552
8553 desttype = build_qualified_type (srctype, 0);
8554 if (dest_align < (int) TYPE_ALIGN (desttype))
8555 {
8556 if (AGGREGATE_TYPE_P (desttype)
8557 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8558 return NULL_TREE;
8559
8560 desttype = build_variant_type_copy (desttype);
8561 TYPE_ALIGN (desttype) = dest_align;
8562 TYPE_USER_ALIGN (desttype) = 1;
8563 TYPE_PACKED (desttype) = 1;
8564 }
8565 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8566 dest = fold_convert_loc (loc, destptype, dest);
8567 destvar = build_fold_indirect_ref_loc (loc, dest);
8568 }
8569
8570 if (srctype == desttype
8571 || (gimple_in_ssa_p (cfun)
8572 && useless_type_conversion_p (desttype, srctype)))
8573 expr = srcvar;
8574 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8575 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8576 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8577 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8578 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8579 else
8580 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8581 TREE_TYPE (destvar), srcvar);
8582 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8583 }
8584
8585 if (ignore)
8586 return expr;
8587
8588 if (endp == 0 || endp == 3)
8589 return omit_one_operand_loc (loc, type, dest, expr);
8590
8591 if (expr == len)
8592 expr = NULL_TREE;
8593
8594 if (endp == 2)
8595 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8596 ssize_int (1));
8597
8598 len = fold_convert_loc (loc, sizetype, len);
8599 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8600 dest = fold_convert_loc (loc, type, dest);
8601 if (expr)
8602 dest = omit_one_operand_loc (loc, type, dest, expr);
8603 return dest;
8604 }
8605
8606 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8607 If LEN is not NULL, it represents the length of the string to be
8608 copied. Return NULL_TREE if no simplification can be made. */
8609
8610 tree
8611 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8612 {
8613 tree fn;
8614
8615 if (!validate_arg (dest, POINTER_TYPE)
8616 || !validate_arg (src, POINTER_TYPE))
8617 return NULL_TREE;
8618
8619 /* If SRC and DEST are the same (and not volatile), return DEST. */
8620 if (operand_equal_p (src, dest, 0))
8621 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8622
8623 if (optimize_function_for_size_p (cfun))
8624 return NULL_TREE;
8625
8626 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8627 if (!fn)
8628 return NULL_TREE;
8629
8630 if (!len)
8631 {
8632 len = c_strlen (src, 1);
8633 if (! len || TREE_SIDE_EFFECTS (len))
8634 return NULL_TREE;
8635 }
8636
8637 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8638 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8639 build_call_expr_loc (loc, fn, 3, dest, src, len));
8640 }
8641
8642 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8643 Return NULL_TREE if no simplification can be made. */
8644
8645 static tree
8646 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8647 {
8648 tree fn, len, lenp1, call, type;
8649
8650 if (!validate_arg (dest, POINTER_TYPE)
8651 || !validate_arg (src, POINTER_TYPE))
8652 return NULL_TREE;
8653
8654 len = c_strlen (src, 1);
8655 if (!len
8656 || TREE_CODE (len) != INTEGER_CST)
8657 return NULL_TREE;
8658
8659 if (optimize_function_for_size_p (cfun)
8660 /* If length is zero it's small enough. */
8661 && !integer_zerop (len))
8662 return NULL_TREE;
8663
8664 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8665 if (!fn)
8666 return NULL_TREE;
8667
8668 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8669 /* We use dest twice in building our expression. Save it from
8670 multiple expansions. */
8671 dest = builtin_save_expr (dest);
8672 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8673
8674 type = TREE_TYPE (TREE_TYPE (fndecl));
8675 len = fold_convert_loc (loc, sizetype, len);
8676 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8677 dest = fold_convert_loc (loc, type, dest);
8678 dest = omit_one_operand_loc (loc, type, dest, call);
8679 return dest;
8680 }
8681
8682 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8683 If SLEN is not NULL, it represents the length of the source string.
8684 Return NULL_TREE if no simplification can be made. */
8685
8686 tree
8687 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8688 tree src, tree len, tree slen)
8689 {
8690 tree fn;
8691
8692 if (!validate_arg (dest, POINTER_TYPE)
8693 || !validate_arg (src, POINTER_TYPE)
8694 || !validate_arg (len, INTEGER_TYPE))
8695 return NULL_TREE;
8696
8697 /* If the LEN parameter is zero, return DEST. */
8698 if (integer_zerop (len))
8699 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8700
8701 /* We can't compare slen with len as constants below if len is not a
8702 constant. */
8703 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8704 return NULL_TREE;
8705
8706 if (!slen)
8707 slen = c_strlen (src, 1);
8708
8709 /* Now, we must be passed a constant src ptr parameter. */
8710 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8711 return NULL_TREE;
8712
8713 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8714
8715 /* We do not support simplification of this case, though we do
8716 support it when expanding trees into RTL. */
8717 /* FIXME: generate a call to __builtin_memset. */
8718 if (tree_int_cst_lt (slen, len))
8719 return NULL_TREE;
8720
8721 /* OK transform into builtin memcpy. */
8722 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8723 if (!fn)
8724 return NULL_TREE;
8725 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8726 build_call_expr_loc (loc, fn, 3, dest, src, len));
8727 }
8728
8729 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8730 arguments to the call, and TYPE is its return type.
8731 Return NULL_TREE if no simplification can be made. */
8732
8733 static tree
8734 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8735 {
8736 if (!validate_arg (arg1, POINTER_TYPE)
8737 || !validate_arg (arg2, INTEGER_TYPE)
8738 || !validate_arg (len, INTEGER_TYPE))
8739 return NULL_TREE;
8740 else
8741 {
8742 const char *p1;
8743
8744 if (TREE_CODE (arg2) != INTEGER_CST
8745 || !host_integerp (len, 1))
8746 return NULL_TREE;
8747
8748 p1 = c_getstr (arg1);
8749 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8750 {
8751 char c;
8752 const char *r;
8753 tree tem;
8754
8755 if (target_char_cast (arg2, &c))
8756 return NULL_TREE;
8757
8758 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8759
8760 if (r == NULL)
8761 return build_int_cst (TREE_TYPE (arg1), 0);
8762
8763 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8764 size_int (r - p1));
8765 return fold_convert_loc (loc, type, tem);
8766 }
8767 return NULL_TREE;
8768 }
8769 }
8770
8771 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8772 Return NULL_TREE if no simplification can be made. */
8773
8774 static tree
8775 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8776 {
8777 const char *p1, *p2;
8778
8779 if (!validate_arg (arg1, POINTER_TYPE)
8780 || !validate_arg (arg2, POINTER_TYPE)
8781 || !validate_arg (len, INTEGER_TYPE))
8782 return NULL_TREE;
8783
8784 /* If the LEN parameter is zero, return zero. */
8785 if (integer_zerop (len))
8786 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8787 arg1, arg2);
8788
8789 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8790 if (operand_equal_p (arg1, arg2, 0))
8791 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8792
8793 p1 = c_getstr (arg1);
8794 p2 = c_getstr (arg2);
8795
8796 /* If all arguments are constant, and the value of len is not greater
8797 than the lengths of arg1 and arg2, evaluate at compile-time. */
8798 if (host_integerp (len, 1) && p1 && p2
8799 && compare_tree_int (len, strlen (p1) + 1) <= 0
8800 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8801 {
8802 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8803
8804 if (r > 0)
8805 return integer_one_node;
8806 else if (r < 0)
8807 return integer_minus_one_node;
8808 else
8809 return integer_zero_node;
8810 }
8811
8812 /* If len parameter is one, return an expression corresponding to
8813 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8814 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8815 {
8816 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8817 tree cst_uchar_ptr_node
8818 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8819
8820 tree ind1
8821 = fold_convert_loc (loc, integer_type_node,
8822 build1 (INDIRECT_REF, cst_uchar_node,
8823 fold_convert_loc (loc,
8824 cst_uchar_ptr_node,
8825 arg1)));
8826 tree ind2
8827 = fold_convert_loc (loc, integer_type_node,
8828 build1 (INDIRECT_REF, cst_uchar_node,
8829 fold_convert_loc (loc,
8830 cst_uchar_ptr_node,
8831 arg2)));
8832 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8833 }
8834
8835 return NULL_TREE;
8836 }
8837
8838 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8839 Return NULL_TREE if no simplification can be made. */
8840
8841 static tree
8842 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8843 {
8844 const char *p1, *p2;
8845
8846 if (!validate_arg (arg1, POINTER_TYPE)
8847 || !validate_arg (arg2, POINTER_TYPE))
8848 return NULL_TREE;
8849
8850 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8851 if (operand_equal_p (arg1, arg2, 0))
8852 return integer_zero_node;
8853
8854 p1 = c_getstr (arg1);
8855 p2 = c_getstr (arg2);
8856
8857 if (p1 && p2)
8858 {
8859 const int i = strcmp (p1, p2);
8860 if (i < 0)
8861 return integer_minus_one_node;
8862 else if (i > 0)
8863 return integer_one_node;
8864 else
8865 return integer_zero_node;
8866 }
8867
8868 /* If the second arg is "", return *(const unsigned char*)arg1. */
8869 if (p2 && *p2 == '\0')
8870 {
8871 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8872 tree cst_uchar_ptr_node
8873 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8874
8875 return fold_convert_loc (loc, integer_type_node,
8876 build1 (INDIRECT_REF, cst_uchar_node,
8877 fold_convert_loc (loc,
8878 cst_uchar_ptr_node,
8879 arg1)));
8880 }
8881
8882 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8883 if (p1 && *p1 == '\0')
8884 {
8885 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8886 tree cst_uchar_ptr_node
8887 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8888
8889 tree temp
8890 = fold_convert_loc (loc, integer_type_node,
8891 build1 (INDIRECT_REF, cst_uchar_node,
8892 fold_convert_loc (loc,
8893 cst_uchar_ptr_node,
8894 arg2)));
8895 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8896 }
8897
8898 return NULL_TREE;
8899 }
8900
8901 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8902 Return NULL_TREE if no simplification can be made. */
8903
8904 static tree
8905 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8906 {
8907 const char *p1, *p2;
8908
8909 if (!validate_arg (arg1, POINTER_TYPE)
8910 || !validate_arg (arg2, POINTER_TYPE)
8911 || !validate_arg (len, INTEGER_TYPE))
8912 return NULL_TREE;
8913
8914 /* If the LEN parameter is zero, return zero. */
8915 if (integer_zerop (len))
8916 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8917 arg1, arg2);
8918
8919 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8920 if (operand_equal_p (arg1, arg2, 0))
8921 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8922
8923 p1 = c_getstr (arg1);
8924 p2 = c_getstr (arg2);
8925
8926 if (host_integerp (len, 1) && p1 && p2)
8927 {
8928 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8929 if (i > 0)
8930 return integer_one_node;
8931 else if (i < 0)
8932 return integer_minus_one_node;
8933 else
8934 return integer_zero_node;
8935 }
8936
8937 /* If the second arg is "", and the length is greater than zero,
8938 return *(const unsigned char*)arg1. */
8939 if (p2 && *p2 == '\0'
8940 && TREE_CODE (len) == INTEGER_CST
8941 && tree_int_cst_sgn (len) == 1)
8942 {
8943 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8944 tree cst_uchar_ptr_node
8945 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8946
8947 return fold_convert_loc (loc, integer_type_node,
8948 build1 (INDIRECT_REF, cst_uchar_node,
8949 fold_convert_loc (loc,
8950 cst_uchar_ptr_node,
8951 arg1)));
8952 }
8953
8954 /* If the first arg is "", and the length is greater than zero,
8955 return -*(const unsigned char*)arg2. */
8956 if (p1 && *p1 == '\0'
8957 && TREE_CODE (len) == INTEGER_CST
8958 && tree_int_cst_sgn (len) == 1)
8959 {
8960 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8961 tree cst_uchar_ptr_node
8962 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8963
8964 tree temp = fold_convert_loc (loc, integer_type_node,
8965 build1 (INDIRECT_REF, cst_uchar_node,
8966 fold_convert_loc (loc,
8967 cst_uchar_ptr_node,
8968 arg2)));
8969 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8970 }
8971
8972 /* If len parameter is one, return an expression corresponding to
8973 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8974 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8975 {
8976 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8977 tree cst_uchar_ptr_node
8978 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8979
8980 tree ind1 = fold_convert_loc (loc, integer_type_node,
8981 build1 (INDIRECT_REF, cst_uchar_node,
8982 fold_convert_loc (loc,
8983 cst_uchar_ptr_node,
8984 arg1)));
8985 tree ind2 = fold_convert_loc (loc, integer_type_node,
8986 build1 (INDIRECT_REF, cst_uchar_node,
8987 fold_convert_loc (loc,
8988 cst_uchar_ptr_node,
8989 arg2)));
8990 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8991 }
8992
8993 return NULL_TREE;
8994 }
8995
8996 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8997 ARG. Return NULL_TREE if no simplification can be made. */
8998
8999 static tree
9000 fold_builtin_signbit (location_t loc, tree arg, tree type)
9001 {
9002 tree temp;
9003
9004 if (!validate_arg (arg, REAL_TYPE))
9005 return NULL_TREE;
9006
9007 /* If ARG is a compile-time constant, determine the result. */
9008 if (TREE_CODE (arg) == REAL_CST
9009 && !TREE_OVERFLOW (arg))
9010 {
9011 REAL_VALUE_TYPE c;
9012
9013 c = TREE_REAL_CST (arg);
9014 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9015 return fold_convert_loc (loc, type, temp);
9016 }
9017
9018 /* If ARG is non-negative, the result is always zero. */
9019 if (tree_expr_nonnegative_p (arg))
9020 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9021
9022 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9023 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9024 return fold_build2_loc (loc, LT_EXPR, type, arg,
9025 build_real (TREE_TYPE (arg), dconst0));
9026
9027 return NULL_TREE;
9028 }
9029
9030 /* Fold function call to builtin copysign, copysignf or copysignl with
9031 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9032 be made. */
9033
9034 static tree
9035 fold_builtin_copysign (location_t loc, tree fndecl,
9036 tree arg1, tree arg2, tree type)
9037 {
9038 tree tem;
9039
9040 if (!validate_arg (arg1, REAL_TYPE)
9041 || !validate_arg (arg2, REAL_TYPE))
9042 return NULL_TREE;
9043
9044 /* copysign(X,X) is X. */
9045 if (operand_equal_p (arg1, arg2, 0))
9046 return fold_convert_loc (loc, type, arg1);
9047
9048 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9049 if (TREE_CODE (arg1) == REAL_CST
9050 && TREE_CODE (arg2) == REAL_CST
9051 && !TREE_OVERFLOW (arg1)
9052 && !TREE_OVERFLOW (arg2))
9053 {
9054 REAL_VALUE_TYPE c1, c2;
9055
9056 c1 = TREE_REAL_CST (arg1);
9057 c2 = TREE_REAL_CST (arg2);
9058 /* c1.sign := c2.sign. */
9059 real_copysign (&c1, &c2);
9060 return build_real (type, c1);
9061 }
9062
9063 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9064 Remember to evaluate Y for side-effects. */
9065 if (tree_expr_nonnegative_p (arg2))
9066 return omit_one_operand_loc (loc, type,
9067 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9068 arg2);
9069
9070 /* Strip sign changing operations for the first argument. */
9071 tem = fold_strip_sign_ops (arg1);
9072 if (tem)
9073 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9074
9075 return NULL_TREE;
9076 }
9077
9078 /* Fold a call to builtin isascii with argument ARG. */
9079
9080 static tree
9081 fold_builtin_isascii (location_t loc, tree arg)
9082 {
9083 if (!validate_arg (arg, INTEGER_TYPE))
9084 return NULL_TREE;
9085 else
9086 {
9087 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9088 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9089 build_int_cst (NULL_TREE,
9090 ~ (unsigned HOST_WIDE_INT) 0x7f));
9091 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9092 arg, integer_zero_node);
9093 }
9094 }
9095
9096 /* Fold a call to builtin toascii with argument ARG. */
9097
9098 static tree
9099 fold_builtin_toascii (location_t loc, tree arg)
9100 {
9101 if (!validate_arg (arg, INTEGER_TYPE))
9102 return NULL_TREE;
9103
9104 /* Transform toascii(c) -> (c & 0x7f). */
9105 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9106 build_int_cst (NULL_TREE, 0x7f));
9107 }
9108
9109 /* Fold a call to builtin isdigit with argument ARG. */
9110
9111 static tree
9112 fold_builtin_isdigit (location_t loc, tree arg)
9113 {
9114 if (!validate_arg (arg, INTEGER_TYPE))
9115 return NULL_TREE;
9116 else
9117 {
9118 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9119 /* According to the C standard, isdigit is unaffected by locale.
9120 However, it definitely is affected by the target character set. */
9121 unsigned HOST_WIDE_INT target_digit0
9122 = lang_hooks.to_target_charset ('0');
9123
9124 if (target_digit0 == 0)
9125 return NULL_TREE;
9126
9127 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9128 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9129 build_int_cst (unsigned_type_node, target_digit0));
9130 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9131 build_int_cst (unsigned_type_node, 9));
9132 }
9133 }
9134
9135 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9136
9137 static tree
9138 fold_builtin_fabs (location_t loc, tree arg, tree type)
9139 {
9140 if (!validate_arg (arg, REAL_TYPE))
9141 return NULL_TREE;
9142
9143 arg = fold_convert_loc (loc, type, arg);
9144 if (TREE_CODE (arg) == REAL_CST)
9145 return fold_abs_const (arg, type);
9146 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9147 }
9148
9149 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9150
9151 static tree
9152 fold_builtin_abs (location_t loc, tree arg, tree type)
9153 {
9154 if (!validate_arg (arg, INTEGER_TYPE))
9155 return NULL_TREE;
9156
9157 arg = fold_convert_loc (loc, type, arg);
9158 if (TREE_CODE (arg) == INTEGER_CST)
9159 return fold_abs_const (arg, type);
9160 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9161 }
9162
9163 /* Fold a call to builtin fmin or fmax. */
9164
9165 static tree
9166 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9167 tree type, bool max)
9168 {
9169 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9170 {
9171 /* Calculate the result when the argument is a constant. */
9172 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9173
9174 if (res)
9175 return res;
9176
9177 /* If either argument is NaN, return the other one. Avoid the
9178 transformation if we get (and honor) a signalling NaN. Using
9179 omit_one_operand() ensures we create a non-lvalue. */
9180 if (TREE_CODE (arg0) == REAL_CST
9181 && real_isnan (&TREE_REAL_CST (arg0))
9182 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9183 || ! TREE_REAL_CST (arg0).signalling))
9184 return omit_one_operand_loc (loc, type, arg1, arg0);
9185 if (TREE_CODE (arg1) == REAL_CST
9186 && real_isnan (&TREE_REAL_CST (arg1))
9187 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9188 || ! TREE_REAL_CST (arg1).signalling))
9189 return omit_one_operand_loc (loc, type, arg0, arg1);
9190
9191 /* Transform fmin/fmax(x,x) -> x. */
9192 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9193 return omit_one_operand_loc (loc, type, arg0, arg1);
9194
9195 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9196 functions to return the numeric arg if the other one is NaN.
9197 These tree codes don't honor that, so only transform if
9198 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9199 handled, so we don't have to worry about it either. */
9200 if (flag_finite_math_only)
9201 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9202 fold_convert_loc (loc, type, arg0),
9203 fold_convert_loc (loc, type, arg1));
9204 }
9205 return NULL_TREE;
9206 }
9207
9208 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9209
9210 static tree
9211 fold_builtin_carg (location_t loc, tree arg, tree type)
9212 {
9213 if (validate_arg (arg, COMPLEX_TYPE)
9214 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9215 {
9216 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9217
9218 if (atan2_fn)
9219 {
9220 tree new_arg = builtin_save_expr (arg);
9221 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9222 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9223 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9224 }
9225 }
9226
9227 return NULL_TREE;
9228 }
9229
9230 /* Fold a call to builtin logb/ilogb. */
9231
9232 static tree
9233 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9234 {
9235 if (! validate_arg (arg, REAL_TYPE))
9236 return NULL_TREE;
9237
9238 STRIP_NOPS (arg);
9239
9240 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9241 {
9242 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9243
9244 switch (value->cl)
9245 {
9246 case rvc_nan:
9247 case rvc_inf:
9248 /* If arg is Inf or NaN and we're logb, return it. */
9249 if (TREE_CODE (rettype) == REAL_TYPE)
9250 return fold_convert_loc (loc, rettype, arg);
9251 /* Fall through... */
9252 case rvc_zero:
9253 /* Zero may set errno and/or raise an exception for logb, also
9254 for ilogb we don't know FP_ILOGB0. */
9255 return NULL_TREE;
9256 case rvc_normal:
9257 /* For normal numbers, proceed iff radix == 2. In GCC,
9258 normalized significands are in the range [0.5, 1.0). We
9259 want the exponent as if they were [1.0, 2.0) so get the
9260 exponent and subtract 1. */
9261 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9262 return fold_convert_loc (loc, rettype,
9263 build_int_cst (NULL_TREE,
9264 REAL_EXP (value)-1));
9265 break;
9266 }
9267 }
9268
9269 return NULL_TREE;
9270 }
9271
9272 /* Fold a call to builtin significand, if radix == 2. */
9273
9274 static tree
9275 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9276 {
9277 if (! validate_arg (arg, REAL_TYPE))
9278 return NULL_TREE;
9279
9280 STRIP_NOPS (arg);
9281
9282 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9283 {
9284 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9285
9286 switch (value->cl)
9287 {
9288 case rvc_zero:
9289 case rvc_nan:
9290 case rvc_inf:
9291 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9292 return fold_convert_loc (loc, rettype, arg);
9293 case rvc_normal:
9294 /* For normal numbers, proceed iff radix == 2. */
9295 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9296 {
9297 REAL_VALUE_TYPE result = *value;
9298 /* In GCC, normalized significands are in the range [0.5,
9299 1.0). We want them to be [1.0, 2.0) so set the
9300 exponent to 1. */
9301 SET_REAL_EXP (&result, 1);
9302 return build_real (rettype, result);
9303 }
9304 break;
9305 }
9306 }
9307
9308 return NULL_TREE;
9309 }
9310
9311 /* Fold a call to builtin frexp, we can assume the base is 2. */
9312
9313 static tree
9314 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9315 {
9316 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9317 return NULL_TREE;
9318
9319 STRIP_NOPS (arg0);
9320
9321 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9322 return NULL_TREE;
9323
9324 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9325
9326 /* Proceed if a valid pointer type was passed in. */
9327 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9328 {
9329 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9330 tree frac, exp;
9331
9332 switch (value->cl)
9333 {
9334 case rvc_zero:
9335 /* For +-0, return (*exp = 0, +-0). */
9336 exp = integer_zero_node;
9337 frac = arg0;
9338 break;
9339 case rvc_nan:
9340 case rvc_inf:
9341 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9342 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9343 case rvc_normal:
9344 {
9345 /* Since the frexp function always expects base 2, and in
9346 GCC normalized significands are already in the range
9347 [0.5, 1.0), we have exactly what frexp wants. */
9348 REAL_VALUE_TYPE frac_rvt = *value;
9349 SET_REAL_EXP (&frac_rvt, 0);
9350 frac = build_real (rettype, frac_rvt);
9351 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9352 }
9353 break;
9354 default:
9355 gcc_unreachable ();
9356 }
9357
9358 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9359 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9360 TREE_SIDE_EFFECTS (arg1) = 1;
9361 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9362 }
9363
9364 return NULL_TREE;
9365 }
9366
9367 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9368 then we can assume the base is two. If it's false, then we have to
9369 check the mode of the TYPE parameter in certain cases. */
9370
9371 static tree
9372 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9373 tree type, bool ldexp)
9374 {
9375 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9376 {
9377 STRIP_NOPS (arg0);
9378 STRIP_NOPS (arg1);
9379
9380 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9381 if (real_zerop (arg0) || integer_zerop (arg1)
9382 || (TREE_CODE (arg0) == REAL_CST
9383 && !real_isfinite (&TREE_REAL_CST (arg0))))
9384 return omit_one_operand_loc (loc, type, arg0, arg1);
9385
9386 /* If both arguments are constant, then try to evaluate it. */
9387 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9388 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9389 && host_integerp (arg1, 0))
9390 {
9391 /* Bound the maximum adjustment to twice the range of the
9392 mode's valid exponents. Use abs to ensure the range is
9393 positive as a sanity check. */
9394 const long max_exp_adj = 2 *
9395 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9396 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9397
9398 /* Get the user-requested adjustment. */
9399 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9400
9401 /* The requested adjustment must be inside this range. This
9402 is a preliminary cap to avoid things like overflow, we
9403 may still fail to compute the result for other reasons. */
9404 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9405 {
9406 REAL_VALUE_TYPE initial_result;
9407
9408 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9409
9410 /* Ensure we didn't overflow. */
9411 if (! real_isinf (&initial_result))
9412 {
9413 const REAL_VALUE_TYPE trunc_result
9414 = real_value_truncate (TYPE_MODE (type), initial_result);
9415
9416 /* Only proceed if the target mode can hold the
9417 resulting value. */
9418 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9419 return build_real (type, trunc_result);
9420 }
9421 }
9422 }
9423 }
9424
9425 return NULL_TREE;
9426 }
9427
9428 /* Fold a call to builtin modf. */
9429
9430 static tree
9431 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9432 {
9433 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9434 return NULL_TREE;
9435
9436 STRIP_NOPS (arg0);
9437
9438 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9439 return NULL_TREE;
9440
9441 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9442
9443 /* Proceed if a valid pointer type was passed in. */
9444 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9445 {
9446 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9447 REAL_VALUE_TYPE trunc, frac;
9448
9449 switch (value->cl)
9450 {
9451 case rvc_nan:
9452 case rvc_zero:
9453 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9454 trunc = frac = *value;
9455 break;
9456 case rvc_inf:
9457 /* For +-Inf, return (*arg1 = arg0, +-0). */
9458 frac = dconst0;
9459 frac.sign = value->sign;
9460 trunc = *value;
9461 break;
9462 case rvc_normal:
9463 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9464 real_trunc (&trunc, VOIDmode, value);
9465 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9466 /* If the original number was negative and already
9467 integral, then the fractional part is -0.0. */
9468 if (value->sign && frac.cl == rvc_zero)
9469 frac.sign = value->sign;
9470 break;
9471 }
9472
9473 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9474 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9475 build_real (rettype, trunc));
9476 TREE_SIDE_EFFECTS (arg1) = 1;
9477 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9478 build_real (rettype, frac));
9479 }
9480
9481 return NULL_TREE;
9482 }
9483
9484 /* Given a location LOC, an interclass builtin function decl FNDECL
9485 and its single argument ARG, return an folded expression computing
9486 the same, or NULL_TREE if we either couldn't or didn't want to fold
9487 (the latter happen if there's an RTL instruction available). */
9488
9489 static tree
9490 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9491 {
9492 enum machine_mode mode;
9493
9494 if (!validate_arg (arg, REAL_TYPE))
9495 return NULL_TREE;
9496
9497 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9498 return NULL_TREE;
9499
9500 mode = TYPE_MODE (TREE_TYPE (arg));
9501
9502 /* If there is no optab, try generic code. */
9503 switch (DECL_FUNCTION_CODE (fndecl))
9504 {
9505 tree result;
9506
9507 CASE_FLT_FN (BUILT_IN_ISINF):
9508 {
9509 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9510 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9511 tree const type = TREE_TYPE (arg);
9512 REAL_VALUE_TYPE r;
9513 char buf[128];
9514
9515 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9516 real_from_string (&r, buf);
9517 result = build_call_expr (isgr_fn, 2,
9518 fold_build1_loc (loc, ABS_EXPR, type, arg),
9519 build_real (type, r));
9520 return result;
9521 }
9522 CASE_FLT_FN (BUILT_IN_FINITE):
9523 case BUILT_IN_ISFINITE:
9524 {
9525 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9526 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9527 tree const type = TREE_TYPE (arg);
9528 REAL_VALUE_TYPE r;
9529 char buf[128];
9530
9531 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9532 real_from_string (&r, buf);
9533 result = build_call_expr (isle_fn, 2,
9534 fold_build1_loc (loc, ABS_EXPR, type, arg),
9535 build_real (type, r));
9536 /*result = fold_build2_loc (loc, UNGT_EXPR,
9537 TREE_TYPE (TREE_TYPE (fndecl)),
9538 fold_build1_loc (loc, ABS_EXPR, type, arg),
9539 build_real (type, r));
9540 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9541 TREE_TYPE (TREE_TYPE (fndecl)),
9542 result);*/
9543 return result;
9544 }
9545 case BUILT_IN_ISNORMAL:
9546 {
9547 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9548 islessequal(fabs(x),DBL_MAX). */
9549 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9550 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9551 tree const type = TREE_TYPE (arg);
9552 REAL_VALUE_TYPE rmax, rmin;
9553 char buf[128];
9554
9555 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9556 real_from_string (&rmax, buf);
9557 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9558 real_from_string (&rmin, buf);
9559 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9560 result = build_call_expr (isle_fn, 2, arg,
9561 build_real (type, rmax));
9562 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9563 build_call_expr (isge_fn, 2, arg,
9564 build_real (type, rmin)));
9565 return result;
9566 }
9567 default:
9568 break;
9569 }
9570
9571 return NULL_TREE;
9572 }
9573
9574 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9575 ARG is the argument for the call. */
9576
9577 static tree
9578 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9579 {
9580 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9581 REAL_VALUE_TYPE r;
9582
9583 if (!validate_arg (arg, REAL_TYPE))
9584 return NULL_TREE;
9585
9586 switch (builtin_index)
9587 {
9588 case BUILT_IN_ISINF:
9589 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9590 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9591
9592 if (TREE_CODE (arg) == REAL_CST)
9593 {
9594 r = TREE_REAL_CST (arg);
9595 if (real_isinf (&r))
9596 return real_compare (GT_EXPR, &r, &dconst0)
9597 ? integer_one_node : integer_minus_one_node;
9598 else
9599 return integer_zero_node;
9600 }
9601
9602 return NULL_TREE;
9603
9604 case BUILT_IN_ISINF_SIGN:
9605 {
9606 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9607 /* In a boolean context, GCC will fold the inner COND_EXPR to
9608 1. So e.g. "if (isinf_sign(x))" would be folded to just
9609 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9610 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9611 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9612 tree tmp = NULL_TREE;
9613
9614 arg = builtin_save_expr (arg);
9615
9616 if (signbit_fn && isinf_fn)
9617 {
9618 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9619 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9620
9621 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9622 signbit_call, integer_zero_node);
9623 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9624 isinf_call, integer_zero_node);
9625
9626 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9627 integer_minus_one_node, integer_one_node);
9628 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9629 isinf_call, tmp,
9630 integer_zero_node);
9631 }
9632
9633 return tmp;
9634 }
9635
9636 case BUILT_IN_ISFINITE:
9637 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9638 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9639 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9640
9641 if (TREE_CODE (arg) == REAL_CST)
9642 {
9643 r = TREE_REAL_CST (arg);
9644 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9645 }
9646
9647 return NULL_TREE;
9648
9649 case BUILT_IN_ISNAN:
9650 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9651 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9652
9653 if (TREE_CODE (arg) == REAL_CST)
9654 {
9655 r = TREE_REAL_CST (arg);
9656 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9657 }
9658
9659 arg = builtin_save_expr (arg);
9660 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9661
9662 default:
9663 gcc_unreachable ();
9664 }
9665 }
9666
9667 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9668 This builtin will generate code to return the appropriate floating
9669 point classification depending on the value of the floating point
9670 number passed in. The possible return values must be supplied as
9671 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9672 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9673 one floating point argument which is "type generic". */
9674
9675 static tree
9676 fold_builtin_fpclassify (location_t loc, tree exp)
9677 {
9678 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9679 arg, type, res, tmp;
9680 enum machine_mode mode;
9681 REAL_VALUE_TYPE r;
9682 char buf[128];
9683
9684 /* Verify the required arguments in the original call. */
9685 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9686 INTEGER_TYPE, INTEGER_TYPE,
9687 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9688 return NULL_TREE;
9689
9690 fp_nan = CALL_EXPR_ARG (exp, 0);
9691 fp_infinite = CALL_EXPR_ARG (exp, 1);
9692 fp_normal = CALL_EXPR_ARG (exp, 2);
9693 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9694 fp_zero = CALL_EXPR_ARG (exp, 4);
9695 arg = CALL_EXPR_ARG (exp, 5);
9696 type = TREE_TYPE (arg);
9697 mode = TYPE_MODE (type);
9698 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9699
9700 /* fpclassify(x) ->
9701 isnan(x) ? FP_NAN :
9702 (fabs(x) == Inf ? FP_INFINITE :
9703 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9704 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9705
9706 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9707 build_real (type, dconst0));
9708 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9709 tmp, fp_zero, fp_subnormal);
9710
9711 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9712 real_from_string (&r, buf);
9713 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9714 arg, build_real (type, r));
9715 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9716
9717 if (HONOR_INFINITIES (mode))
9718 {
9719 real_inf (&r);
9720 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9721 build_real (type, r));
9722 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9723 fp_infinite, res);
9724 }
9725
9726 if (HONOR_NANS (mode))
9727 {
9728 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9729 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9730 }
9731
9732 return res;
9733 }
9734
9735 /* Fold a call to an unordered comparison function such as
9736 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9737 being called and ARG0 and ARG1 are the arguments for the call.
9738 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9739 the opposite of the desired result. UNORDERED_CODE is used
9740 for modes that can hold NaNs and ORDERED_CODE is used for
9741 the rest. */
9742
9743 static tree
9744 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9745 enum tree_code unordered_code,
9746 enum tree_code ordered_code)
9747 {
9748 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9749 enum tree_code code;
9750 tree type0, type1;
9751 enum tree_code code0, code1;
9752 tree cmp_type = NULL_TREE;
9753
9754 type0 = TREE_TYPE (arg0);
9755 type1 = TREE_TYPE (arg1);
9756
9757 code0 = TREE_CODE (type0);
9758 code1 = TREE_CODE (type1);
9759
9760 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9761 /* Choose the wider of two real types. */
9762 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9763 ? type0 : type1;
9764 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9765 cmp_type = type0;
9766 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9767 cmp_type = type1;
9768
9769 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9770 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9771
9772 if (unordered_code == UNORDERED_EXPR)
9773 {
9774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9775 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9776 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9777 }
9778
9779 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9780 : ordered_code;
9781 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9782 fold_build2_loc (loc, code, type, arg0, arg1));
9783 }
9784
9785 /* Fold a call to built-in function FNDECL with 0 arguments.
9786 IGNORE is true if the result of the function call is ignored. This
9787 function returns NULL_TREE if no simplification was possible. */
9788
9789 static tree
9790 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9791 {
9792 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9793 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9794 switch (fcode)
9795 {
9796 CASE_FLT_FN (BUILT_IN_INF):
9797 case BUILT_IN_INFD32:
9798 case BUILT_IN_INFD64:
9799 case BUILT_IN_INFD128:
9800 return fold_builtin_inf (loc, type, true);
9801
9802 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9803 return fold_builtin_inf (loc, type, false);
9804
9805 case BUILT_IN_CLASSIFY_TYPE:
9806 return fold_builtin_classify_type (NULL_TREE);
9807
9808 default:
9809 break;
9810 }
9811 return NULL_TREE;
9812 }
9813
9814 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9815 IGNORE is true if the result of the function call is ignored. This
9816 function returns NULL_TREE if no simplification was possible. */
9817
9818 static tree
9819 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9820 {
9821 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9822 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9823 switch (fcode)
9824 {
9825 case BUILT_IN_CONSTANT_P:
9826 {
9827 tree val = fold_builtin_constant_p (arg0);
9828
9829 /* Gimplification will pull the CALL_EXPR for the builtin out of
9830 an if condition. When not optimizing, we'll not CSE it back.
9831 To avoid link error types of regressions, return false now. */
9832 if (!val && !optimize)
9833 val = integer_zero_node;
9834
9835 return val;
9836 }
9837
9838 case BUILT_IN_CLASSIFY_TYPE:
9839 return fold_builtin_classify_type (arg0);
9840
9841 case BUILT_IN_STRLEN:
9842 return fold_builtin_strlen (loc, type, arg0);
9843
9844 CASE_FLT_FN (BUILT_IN_FABS):
9845 return fold_builtin_fabs (loc, arg0, type);
9846
9847 case BUILT_IN_ABS:
9848 case BUILT_IN_LABS:
9849 case BUILT_IN_LLABS:
9850 case BUILT_IN_IMAXABS:
9851 return fold_builtin_abs (loc, arg0, type);
9852
9853 CASE_FLT_FN (BUILT_IN_CONJ):
9854 if (validate_arg (arg0, COMPLEX_TYPE)
9855 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9856 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9857 break;
9858
9859 CASE_FLT_FN (BUILT_IN_CREAL):
9860 if (validate_arg (arg0, COMPLEX_TYPE)
9861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9862 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9863 break;
9864
9865 CASE_FLT_FN (BUILT_IN_CIMAG):
9866 if (validate_arg (arg0, COMPLEX_TYPE)
9867 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9868 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9869 break;
9870
9871 CASE_FLT_FN (BUILT_IN_CCOS):
9872 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9873
9874 CASE_FLT_FN (BUILT_IN_CCOSH):
9875 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9876
9877 CASE_FLT_FN (BUILT_IN_CPROJ):
9878 return fold_builtin_cproj(loc, arg0, type);
9879
9880 CASE_FLT_FN (BUILT_IN_CSIN):
9881 if (validate_arg (arg0, COMPLEX_TYPE)
9882 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9883 return do_mpc_arg1 (arg0, type, mpc_sin);
9884 break;
9885
9886 CASE_FLT_FN (BUILT_IN_CSINH):
9887 if (validate_arg (arg0, COMPLEX_TYPE)
9888 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9889 return do_mpc_arg1 (arg0, type, mpc_sinh);
9890 break;
9891
9892 CASE_FLT_FN (BUILT_IN_CTAN):
9893 if (validate_arg (arg0, COMPLEX_TYPE)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9895 return do_mpc_arg1 (arg0, type, mpc_tan);
9896 break;
9897
9898 CASE_FLT_FN (BUILT_IN_CTANH):
9899 if (validate_arg (arg0, COMPLEX_TYPE)
9900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9901 return do_mpc_arg1 (arg0, type, mpc_tanh);
9902 break;
9903
9904 CASE_FLT_FN (BUILT_IN_CLOG):
9905 if (validate_arg (arg0, COMPLEX_TYPE)
9906 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9907 return do_mpc_arg1 (arg0, type, mpc_log);
9908 break;
9909
9910 CASE_FLT_FN (BUILT_IN_CSQRT):
9911 if (validate_arg (arg0, COMPLEX_TYPE)
9912 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9913 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9914 break;
9915
9916 CASE_FLT_FN (BUILT_IN_CASIN):
9917 if (validate_arg (arg0, COMPLEX_TYPE)
9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9919 return do_mpc_arg1 (arg0, type, mpc_asin);
9920 break;
9921
9922 CASE_FLT_FN (BUILT_IN_CACOS):
9923 if (validate_arg (arg0, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9925 return do_mpc_arg1 (arg0, type, mpc_acos);
9926 break;
9927
9928 CASE_FLT_FN (BUILT_IN_CATAN):
9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9931 return do_mpc_arg1 (arg0, type, mpc_atan);
9932 break;
9933
9934 CASE_FLT_FN (BUILT_IN_CASINH):
9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9937 return do_mpc_arg1 (arg0, type, mpc_asinh);
9938 break;
9939
9940 CASE_FLT_FN (BUILT_IN_CACOSH):
9941 if (validate_arg (arg0, COMPLEX_TYPE)
9942 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9943 return do_mpc_arg1 (arg0, type, mpc_acosh);
9944 break;
9945
9946 CASE_FLT_FN (BUILT_IN_CATANH):
9947 if (validate_arg (arg0, COMPLEX_TYPE)
9948 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9949 return do_mpc_arg1 (arg0, type, mpc_atanh);
9950 break;
9951
9952 CASE_FLT_FN (BUILT_IN_CABS):
9953 return fold_builtin_cabs (loc, arg0, type, fndecl);
9954
9955 CASE_FLT_FN (BUILT_IN_CARG):
9956 return fold_builtin_carg (loc, arg0, type);
9957
9958 CASE_FLT_FN (BUILT_IN_SQRT):
9959 return fold_builtin_sqrt (loc, arg0, type);
9960
9961 CASE_FLT_FN (BUILT_IN_CBRT):
9962 return fold_builtin_cbrt (loc, arg0, type);
9963
9964 CASE_FLT_FN (BUILT_IN_ASIN):
9965 if (validate_arg (arg0, REAL_TYPE))
9966 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9967 &dconstm1, &dconst1, true);
9968 break;
9969
9970 CASE_FLT_FN (BUILT_IN_ACOS):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9973 &dconstm1, &dconst1, true);
9974 break;
9975
9976 CASE_FLT_FN (BUILT_IN_ATAN):
9977 if (validate_arg (arg0, REAL_TYPE))
9978 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9979 break;
9980
9981 CASE_FLT_FN (BUILT_IN_ASINH):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9984 break;
9985
9986 CASE_FLT_FN (BUILT_IN_ACOSH):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9989 &dconst1, NULL, true);
9990 break;
9991
9992 CASE_FLT_FN (BUILT_IN_ATANH):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9995 &dconstm1, &dconst1, false);
9996 break;
9997
9998 CASE_FLT_FN (BUILT_IN_SIN):
9999 if (validate_arg (arg0, REAL_TYPE))
10000 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10001 break;
10002
10003 CASE_FLT_FN (BUILT_IN_COS):
10004 return fold_builtin_cos (loc, arg0, type, fndecl);
10005
10006 CASE_FLT_FN (BUILT_IN_TAN):
10007 return fold_builtin_tan (arg0, type);
10008
10009 CASE_FLT_FN (BUILT_IN_CEXP):
10010 return fold_builtin_cexp (loc, arg0, type);
10011
10012 CASE_FLT_FN (BUILT_IN_CEXPI):
10013 if (validate_arg (arg0, REAL_TYPE))
10014 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10015 break;
10016
10017 CASE_FLT_FN (BUILT_IN_SINH):
10018 if (validate_arg (arg0, REAL_TYPE))
10019 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10020 break;
10021
10022 CASE_FLT_FN (BUILT_IN_COSH):
10023 return fold_builtin_cosh (loc, arg0, type, fndecl);
10024
10025 CASE_FLT_FN (BUILT_IN_TANH):
10026 if (validate_arg (arg0, REAL_TYPE))
10027 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10028 break;
10029
10030 CASE_FLT_FN (BUILT_IN_ERF):
10031 if (validate_arg (arg0, REAL_TYPE))
10032 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10033 break;
10034
10035 CASE_FLT_FN (BUILT_IN_ERFC):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10038 break;
10039
10040 CASE_FLT_FN (BUILT_IN_TGAMMA):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10043 break;
10044
10045 CASE_FLT_FN (BUILT_IN_EXP):
10046 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10047
10048 CASE_FLT_FN (BUILT_IN_EXP2):
10049 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10050
10051 CASE_FLT_FN (BUILT_IN_EXP10):
10052 CASE_FLT_FN (BUILT_IN_POW10):
10053 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10054
10055 CASE_FLT_FN (BUILT_IN_EXPM1):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10058 break;
10059
10060 CASE_FLT_FN (BUILT_IN_LOG):
10061 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10062
10063 CASE_FLT_FN (BUILT_IN_LOG2):
10064 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10065
10066 CASE_FLT_FN (BUILT_IN_LOG10):
10067 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10068
10069 CASE_FLT_FN (BUILT_IN_LOG1P):
10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10072 &dconstm1, NULL, false);
10073 break;
10074
10075 CASE_FLT_FN (BUILT_IN_J0):
10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10078 NULL, NULL, 0);
10079 break;
10080
10081 CASE_FLT_FN (BUILT_IN_J1):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10084 NULL, NULL, 0);
10085 break;
10086
10087 CASE_FLT_FN (BUILT_IN_Y0):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10090 &dconst0, NULL, false);
10091 break;
10092
10093 CASE_FLT_FN (BUILT_IN_Y1):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10096 &dconst0, NULL, false);
10097 break;
10098
10099 CASE_FLT_FN (BUILT_IN_NAN):
10100 case BUILT_IN_NAND32:
10101 case BUILT_IN_NAND64:
10102 case BUILT_IN_NAND128:
10103 return fold_builtin_nan (arg0, type, true);
10104
10105 CASE_FLT_FN (BUILT_IN_NANS):
10106 return fold_builtin_nan (arg0, type, false);
10107
10108 CASE_FLT_FN (BUILT_IN_FLOOR):
10109 return fold_builtin_floor (loc, fndecl, arg0);
10110
10111 CASE_FLT_FN (BUILT_IN_CEIL):
10112 return fold_builtin_ceil (loc, fndecl, arg0);
10113
10114 CASE_FLT_FN (BUILT_IN_TRUNC):
10115 return fold_builtin_trunc (loc, fndecl, arg0);
10116
10117 CASE_FLT_FN (BUILT_IN_ROUND):
10118 return fold_builtin_round (loc, fndecl, arg0);
10119
10120 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10121 CASE_FLT_FN (BUILT_IN_RINT):
10122 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10123
10124 CASE_FLT_FN (BUILT_IN_LCEIL):
10125 CASE_FLT_FN (BUILT_IN_LLCEIL):
10126 CASE_FLT_FN (BUILT_IN_LFLOOR):
10127 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10128 CASE_FLT_FN (BUILT_IN_LROUND):
10129 CASE_FLT_FN (BUILT_IN_LLROUND):
10130 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10131
10132 CASE_FLT_FN (BUILT_IN_LRINT):
10133 CASE_FLT_FN (BUILT_IN_LLRINT):
10134 return fold_fixed_mathfn (loc, fndecl, arg0);
10135
10136 case BUILT_IN_BSWAP32:
10137 case BUILT_IN_BSWAP64:
10138 return fold_builtin_bswap (fndecl, arg0);
10139
10140 CASE_INT_FN (BUILT_IN_FFS):
10141 CASE_INT_FN (BUILT_IN_CLZ):
10142 CASE_INT_FN (BUILT_IN_CTZ):
10143 CASE_INT_FN (BUILT_IN_POPCOUNT):
10144 CASE_INT_FN (BUILT_IN_PARITY):
10145 return fold_builtin_bitop (fndecl, arg0);
10146
10147 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10148 return fold_builtin_signbit (loc, arg0, type);
10149
10150 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10151 return fold_builtin_significand (loc, arg0, type);
10152
10153 CASE_FLT_FN (BUILT_IN_ILOGB):
10154 CASE_FLT_FN (BUILT_IN_LOGB):
10155 return fold_builtin_logb (loc, arg0, type);
10156
10157 case BUILT_IN_ISASCII:
10158 return fold_builtin_isascii (loc, arg0);
10159
10160 case BUILT_IN_TOASCII:
10161 return fold_builtin_toascii (loc, arg0);
10162
10163 case BUILT_IN_ISDIGIT:
10164 return fold_builtin_isdigit (loc, arg0);
10165
10166 CASE_FLT_FN (BUILT_IN_FINITE):
10167 case BUILT_IN_FINITED32:
10168 case BUILT_IN_FINITED64:
10169 case BUILT_IN_FINITED128:
10170 case BUILT_IN_ISFINITE:
10171 {
10172 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10173 if (ret)
10174 return ret;
10175 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10176 }
10177
10178 CASE_FLT_FN (BUILT_IN_ISINF):
10179 case BUILT_IN_ISINFD32:
10180 case BUILT_IN_ISINFD64:
10181 case BUILT_IN_ISINFD128:
10182 {
10183 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10184 if (ret)
10185 return ret;
10186 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10187 }
10188
10189 case BUILT_IN_ISNORMAL:
10190 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10191
10192 case BUILT_IN_ISINF_SIGN:
10193 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10194
10195 CASE_FLT_FN (BUILT_IN_ISNAN):
10196 case BUILT_IN_ISNAND32:
10197 case BUILT_IN_ISNAND64:
10198 case BUILT_IN_ISNAND128:
10199 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10200
10201 case BUILT_IN_PRINTF:
10202 case BUILT_IN_PRINTF_UNLOCKED:
10203 case BUILT_IN_VPRINTF:
10204 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10205
10206 case BUILT_IN_FREE:
10207 if (integer_zerop (arg0))
10208 return build_empty_stmt (loc);
10209 break;
10210
10211 default:
10212 break;
10213 }
10214
10215 return NULL_TREE;
10216
10217 }
10218
10219 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10220 IGNORE is true if the result of the function call is ignored. This
10221 function returns NULL_TREE if no simplification was possible. */
10222
10223 static tree
10224 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10225 {
10226 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10228
10229 switch (fcode)
10230 {
10231 CASE_FLT_FN (BUILT_IN_JN):
10232 if (validate_arg (arg0, INTEGER_TYPE)
10233 && validate_arg (arg1, REAL_TYPE))
10234 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10235 break;
10236
10237 CASE_FLT_FN (BUILT_IN_YN):
10238 if (validate_arg (arg0, INTEGER_TYPE)
10239 && validate_arg (arg1, REAL_TYPE))
10240 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10241 &dconst0, false);
10242 break;
10243
10244 CASE_FLT_FN (BUILT_IN_DREM):
10245 CASE_FLT_FN (BUILT_IN_REMAINDER):
10246 if (validate_arg (arg0, REAL_TYPE)
10247 && validate_arg(arg1, REAL_TYPE))
10248 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10249 break;
10250
10251 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10252 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10253 if (validate_arg (arg0, REAL_TYPE)
10254 && validate_arg(arg1, POINTER_TYPE))
10255 return do_mpfr_lgamma_r (arg0, arg1, type);
10256 break;
10257
10258 CASE_FLT_FN (BUILT_IN_ATAN2):
10259 if (validate_arg (arg0, REAL_TYPE)
10260 && validate_arg(arg1, REAL_TYPE))
10261 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10262 break;
10263
10264 CASE_FLT_FN (BUILT_IN_FDIM):
10265 if (validate_arg (arg0, REAL_TYPE)
10266 && validate_arg(arg1, REAL_TYPE))
10267 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10268 break;
10269
10270 CASE_FLT_FN (BUILT_IN_HYPOT):
10271 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10272
10273 CASE_FLT_FN (BUILT_IN_CPOW):
10274 if (validate_arg (arg0, COMPLEX_TYPE)
10275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10276 && validate_arg (arg1, COMPLEX_TYPE)
10277 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10278 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10279 break;
10280
10281 CASE_FLT_FN (BUILT_IN_LDEXP):
10282 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10283 CASE_FLT_FN (BUILT_IN_SCALBN):
10284 CASE_FLT_FN (BUILT_IN_SCALBLN):
10285 return fold_builtin_load_exponent (loc, arg0, arg1,
10286 type, /*ldexp=*/false);
10287
10288 CASE_FLT_FN (BUILT_IN_FREXP):
10289 return fold_builtin_frexp (loc, arg0, arg1, type);
10290
10291 CASE_FLT_FN (BUILT_IN_MODF):
10292 return fold_builtin_modf (loc, arg0, arg1, type);
10293
10294 case BUILT_IN_BZERO:
10295 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10296
10297 case BUILT_IN_FPUTS:
10298 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10299
10300 case BUILT_IN_FPUTS_UNLOCKED:
10301 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10302
10303 case BUILT_IN_STRSTR:
10304 return fold_builtin_strstr (loc, arg0, arg1, type);
10305
10306 case BUILT_IN_STRCAT:
10307 return fold_builtin_strcat (loc, arg0, arg1);
10308
10309 case BUILT_IN_STRSPN:
10310 return fold_builtin_strspn (loc, arg0, arg1);
10311
10312 case BUILT_IN_STRCSPN:
10313 return fold_builtin_strcspn (loc, arg0, arg1);
10314
10315 case BUILT_IN_STRCHR:
10316 case BUILT_IN_INDEX:
10317 return fold_builtin_strchr (loc, arg0, arg1, type);
10318
10319 case BUILT_IN_STRRCHR:
10320 case BUILT_IN_RINDEX:
10321 return fold_builtin_strrchr (loc, arg0, arg1, type);
10322
10323 case BUILT_IN_STRCPY:
10324 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10325
10326 case BUILT_IN_STPCPY:
10327 if (ignore)
10328 {
10329 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10330 if (!fn)
10331 break;
10332
10333 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10334 }
10335 else
10336 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10337 break;
10338
10339 case BUILT_IN_STRCMP:
10340 return fold_builtin_strcmp (loc, arg0, arg1);
10341
10342 case BUILT_IN_STRPBRK:
10343 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10344
10345 case BUILT_IN_EXPECT:
10346 return fold_builtin_expect (loc, arg0, arg1);
10347
10348 CASE_FLT_FN (BUILT_IN_POW):
10349 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10350
10351 CASE_FLT_FN (BUILT_IN_POWI):
10352 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10353
10354 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10355 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10356
10357 CASE_FLT_FN (BUILT_IN_FMIN):
10358 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10359
10360 CASE_FLT_FN (BUILT_IN_FMAX):
10361 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10362
10363 case BUILT_IN_ISGREATER:
10364 return fold_builtin_unordered_cmp (loc, fndecl,
10365 arg0, arg1, UNLE_EXPR, LE_EXPR);
10366 case BUILT_IN_ISGREATEREQUAL:
10367 return fold_builtin_unordered_cmp (loc, fndecl,
10368 arg0, arg1, UNLT_EXPR, LT_EXPR);
10369 case BUILT_IN_ISLESS:
10370 return fold_builtin_unordered_cmp (loc, fndecl,
10371 arg0, arg1, UNGE_EXPR, GE_EXPR);
10372 case BUILT_IN_ISLESSEQUAL:
10373 return fold_builtin_unordered_cmp (loc, fndecl,
10374 arg0, arg1, UNGT_EXPR, GT_EXPR);
10375 case BUILT_IN_ISLESSGREATER:
10376 return fold_builtin_unordered_cmp (loc, fndecl,
10377 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10378 case BUILT_IN_ISUNORDERED:
10379 return fold_builtin_unordered_cmp (loc, fndecl,
10380 arg0, arg1, UNORDERED_EXPR,
10381 NOP_EXPR);
10382
10383 /* We do the folding for va_start in the expander. */
10384 case BUILT_IN_VA_START:
10385 break;
10386
10387 case BUILT_IN_SPRINTF:
10388 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10389
10390 case BUILT_IN_OBJECT_SIZE:
10391 return fold_builtin_object_size (arg0, arg1);
10392
10393 case BUILT_IN_PRINTF:
10394 case BUILT_IN_PRINTF_UNLOCKED:
10395 case BUILT_IN_VPRINTF:
10396 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10397
10398 case BUILT_IN_PRINTF_CHK:
10399 case BUILT_IN_VPRINTF_CHK:
10400 if (!validate_arg (arg0, INTEGER_TYPE)
10401 || TREE_SIDE_EFFECTS (arg0))
10402 return NULL_TREE;
10403 else
10404 return fold_builtin_printf (loc, fndecl,
10405 arg1, NULL_TREE, ignore, fcode);
10406 break;
10407
10408 case BUILT_IN_FPRINTF:
10409 case BUILT_IN_FPRINTF_UNLOCKED:
10410 case BUILT_IN_VFPRINTF:
10411 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10412 ignore, fcode);
10413
10414 default:
10415 break;
10416 }
10417 return NULL_TREE;
10418 }
10419
10420 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10421 and ARG2. IGNORE is true if the result of the function call is ignored.
10422 This function returns NULL_TREE if no simplification was possible. */
10423
10424 static tree
10425 fold_builtin_3 (location_t loc, tree fndecl,
10426 tree arg0, tree arg1, tree arg2, bool ignore)
10427 {
10428 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10429 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10430 switch (fcode)
10431 {
10432
10433 CASE_FLT_FN (BUILT_IN_SINCOS):
10434 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10435
10436 CASE_FLT_FN (BUILT_IN_FMA):
10437 if (validate_arg (arg0, REAL_TYPE)
10438 && validate_arg(arg1, REAL_TYPE)
10439 && validate_arg(arg2, REAL_TYPE))
10440 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10441 break;
10442
10443 CASE_FLT_FN (BUILT_IN_REMQUO):
10444 if (validate_arg (arg0, REAL_TYPE)
10445 && validate_arg(arg1, REAL_TYPE)
10446 && validate_arg(arg2, POINTER_TYPE))
10447 return do_mpfr_remquo (arg0, arg1, arg2);
10448 break;
10449
10450 case BUILT_IN_MEMSET:
10451 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10452
10453 case BUILT_IN_BCOPY:
10454 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10455 void_type_node, true, /*endp=*/3);
10456
10457 case BUILT_IN_MEMCPY:
10458 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10459 type, ignore, /*endp=*/0);
10460
10461 case BUILT_IN_MEMPCPY:
10462 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10463 type, ignore, /*endp=*/1);
10464
10465 case BUILT_IN_MEMMOVE:
10466 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10467 type, ignore, /*endp=*/3);
10468
10469 case BUILT_IN_STRNCAT:
10470 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10471
10472 case BUILT_IN_STRNCPY:
10473 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10474
10475 case BUILT_IN_STRNCMP:
10476 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10477
10478 case BUILT_IN_MEMCHR:
10479 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10480
10481 case BUILT_IN_BCMP:
10482 case BUILT_IN_MEMCMP:
10483 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10484
10485 case BUILT_IN_SPRINTF:
10486 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10487
10488 case BUILT_IN_STRCPY_CHK:
10489 case BUILT_IN_STPCPY_CHK:
10490 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10491 ignore, fcode);
10492
10493 case BUILT_IN_STRCAT_CHK:
10494 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10495
10496 case BUILT_IN_PRINTF_CHK:
10497 case BUILT_IN_VPRINTF_CHK:
10498 if (!validate_arg (arg0, INTEGER_TYPE)
10499 || TREE_SIDE_EFFECTS (arg0))
10500 return NULL_TREE;
10501 else
10502 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10503 break;
10504
10505 case BUILT_IN_FPRINTF:
10506 case BUILT_IN_FPRINTF_UNLOCKED:
10507 case BUILT_IN_VFPRINTF:
10508 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10509 ignore, fcode);
10510
10511 case BUILT_IN_FPRINTF_CHK:
10512 case BUILT_IN_VFPRINTF_CHK:
10513 if (!validate_arg (arg1, INTEGER_TYPE)
10514 || TREE_SIDE_EFFECTS (arg1))
10515 return NULL_TREE;
10516 else
10517 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10518 ignore, fcode);
10519
10520 default:
10521 break;
10522 }
10523 return NULL_TREE;
10524 }
10525
10526 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10527 ARG2, and ARG3. IGNORE is true if the result of the function call is
10528 ignored. This function returns NULL_TREE if no simplification was
10529 possible. */
10530
10531 static tree
10532 fold_builtin_4 (location_t loc, tree fndecl,
10533 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10534 {
10535 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10536
10537 switch (fcode)
10538 {
10539 case BUILT_IN_MEMCPY_CHK:
10540 case BUILT_IN_MEMPCPY_CHK:
10541 case BUILT_IN_MEMMOVE_CHK:
10542 case BUILT_IN_MEMSET_CHK:
10543 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10544 NULL_TREE, ignore,
10545 DECL_FUNCTION_CODE (fndecl));
10546
10547 case BUILT_IN_STRNCPY_CHK:
10548 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10549
10550 case BUILT_IN_STRNCAT_CHK:
10551 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10552
10553 case BUILT_IN_FPRINTF_CHK:
10554 case BUILT_IN_VFPRINTF_CHK:
10555 if (!validate_arg (arg1, INTEGER_TYPE)
10556 || TREE_SIDE_EFFECTS (arg1))
10557 return NULL_TREE;
10558 else
10559 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10560 ignore, fcode);
10561 break;
10562
10563 default:
10564 break;
10565 }
10566 return NULL_TREE;
10567 }
10568
10569 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10570 arguments, where NARGS <= 4. IGNORE is true if the result of the
10571 function call is ignored. This function returns NULL_TREE if no
10572 simplification was possible. Note that this only folds builtins with
10573 fixed argument patterns. Foldings that do varargs-to-varargs
10574 transformations, or that match calls with more than 4 arguments,
10575 need to be handled with fold_builtin_varargs instead. */
10576
10577 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10578
10579 static tree
10580 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10581 {
10582 tree ret = NULL_TREE;
10583
10584 switch (nargs)
10585 {
10586 case 0:
10587 ret = fold_builtin_0 (loc, fndecl, ignore);
10588 break;
10589 case 1:
10590 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10591 break;
10592 case 2:
10593 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10594 break;
10595 case 3:
10596 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10597 break;
10598 case 4:
10599 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10600 ignore);
10601 break;
10602 default:
10603 break;
10604 }
10605 if (ret)
10606 {
10607 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10608 SET_EXPR_LOCATION (ret, loc);
10609 TREE_NO_WARNING (ret) = 1;
10610 return ret;
10611 }
10612 return NULL_TREE;
10613 }
10614
10615 /* Builtins with folding operations that operate on "..." arguments
10616 need special handling; we need to store the arguments in a convenient
10617 data structure before attempting any folding. Fortunately there are
10618 only a few builtins that fall into this category. FNDECL is the
10619 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10620 result of the function call is ignored. */
10621
10622 static tree
10623 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10624 bool ignore ATTRIBUTE_UNUSED)
10625 {
10626 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10627 tree ret = NULL_TREE;
10628
10629 switch (fcode)
10630 {
10631 case BUILT_IN_SPRINTF_CHK:
10632 case BUILT_IN_VSPRINTF_CHK:
10633 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10634 break;
10635
10636 case BUILT_IN_SNPRINTF_CHK:
10637 case BUILT_IN_VSNPRINTF_CHK:
10638 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10639 break;
10640
10641 case BUILT_IN_FPCLASSIFY:
10642 ret = fold_builtin_fpclassify (loc, exp);
10643 break;
10644
10645 default:
10646 break;
10647 }
10648 if (ret)
10649 {
10650 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10651 SET_EXPR_LOCATION (ret, loc);
10652 TREE_NO_WARNING (ret) = 1;
10653 return ret;
10654 }
10655 return NULL_TREE;
10656 }
10657
10658 /* Return true if FNDECL shouldn't be folded right now.
10659 If a built-in function has an inline attribute always_inline
10660 wrapper, defer folding it after always_inline functions have
10661 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10662 might not be performed. */
10663
10664 static bool
10665 avoid_folding_inline_builtin (tree fndecl)
10666 {
10667 return (DECL_DECLARED_INLINE_P (fndecl)
10668 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10669 && cfun
10670 && !cfun->always_inline_functions_inlined
10671 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10672 }
10673
10674 /* A wrapper function for builtin folding that prevents warnings for
10675 "statement without effect" and the like, caused by removing the
10676 call node earlier than the warning is generated. */
10677
10678 tree
10679 fold_call_expr (location_t loc, tree exp, bool ignore)
10680 {
10681 tree ret = NULL_TREE;
10682 tree fndecl = get_callee_fndecl (exp);
10683 if (fndecl
10684 && TREE_CODE (fndecl) == FUNCTION_DECL
10685 && DECL_BUILT_IN (fndecl)
10686 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10687 yet. Defer folding until we see all the arguments
10688 (after inlining). */
10689 && !CALL_EXPR_VA_ARG_PACK (exp))
10690 {
10691 int nargs = call_expr_nargs (exp);
10692
10693 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10694 instead last argument is __builtin_va_arg_pack (). Defer folding
10695 even in that case, until arguments are finalized. */
10696 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10697 {
10698 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10699 if (fndecl2
10700 && TREE_CODE (fndecl2) == FUNCTION_DECL
10701 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10702 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10703 return NULL_TREE;
10704 }
10705
10706 if (avoid_folding_inline_builtin (fndecl))
10707 return NULL_TREE;
10708
10709 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10710 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10711 CALL_EXPR_ARGP (exp), ignore);
10712 else
10713 {
10714 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10715 {
10716 tree *args = CALL_EXPR_ARGP (exp);
10717 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10718 }
10719 if (!ret)
10720 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10721 if (ret)
10722 return ret;
10723 }
10724 }
10725 return NULL_TREE;
10726 }
10727
10728 /* Conveniently construct a function call expression. FNDECL names the
10729 function to be called and ARGLIST is a TREE_LIST of arguments. */
10730
10731 tree
10732 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10733 {
10734 tree fntype = TREE_TYPE (fndecl);
10735 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10736 int n = list_length (arglist);
10737 tree *argarray = (tree *) alloca (n * sizeof (tree));
10738 int i;
10739
10740 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10741 argarray[i] = TREE_VALUE (arglist);
10742 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10743 }
10744
10745 /* Conveniently construct a function call expression. FNDECL names the
10746 function to be called, N is the number of arguments, and the "..."
10747 parameters are the argument expressions. */
10748
10749 tree
10750 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10751 {
10752 va_list ap;
10753 tree fntype = TREE_TYPE (fndecl);
10754 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10755 tree *argarray = (tree *) alloca (n * sizeof (tree));
10756 int i;
10757
10758 va_start (ap, n);
10759 for (i = 0; i < n; i++)
10760 argarray[i] = va_arg (ap, tree);
10761 va_end (ap);
10762 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10763 }
10764
10765 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10766 varargs macros aren't supported by all bootstrap compilers. */
10767
10768 tree
10769 build_call_expr (tree fndecl, int n, ...)
10770 {
10771 va_list ap;
10772 tree fntype = TREE_TYPE (fndecl);
10773 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10774 tree *argarray = (tree *) alloca (n * sizeof (tree));
10775 int i;
10776
10777 va_start (ap, n);
10778 for (i = 0; i < n; i++)
10779 argarray[i] = va_arg (ap, tree);
10780 va_end (ap);
10781 return fold_builtin_call_array (UNKNOWN_LOCATION, TREE_TYPE (fntype),
10782 fn, n, argarray);
10783 }
10784
10785 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10786 N arguments are passed in the array ARGARRAY. */
10787
10788 tree
10789 fold_builtin_call_array (location_t loc, tree type,
10790 tree fn,
10791 int n,
10792 tree *argarray)
10793 {
10794 tree ret = NULL_TREE;
10795 tree exp;
10796
10797 if (TREE_CODE (fn) == ADDR_EXPR)
10798 {
10799 tree fndecl = TREE_OPERAND (fn, 0);
10800 if (TREE_CODE (fndecl) == FUNCTION_DECL
10801 && DECL_BUILT_IN (fndecl))
10802 {
10803 /* If last argument is __builtin_va_arg_pack (), arguments to this
10804 function are not finalized yet. Defer folding until they are. */
10805 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10806 {
10807 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10808 if (fndecl2
10809 && TREE_CODE (fndecl2) == FUNCTION_DECL
10810 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10811 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10812 return build_call_array_loc (loc, type, fn, n, argarray);
10813 }
10814 if (avoid_folding_inline_builtin (fndecl))
10815 return build_call_array_loc (loc, type, fn, n, argarray);
10816 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10817 {
10818 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10819 if (ret)
10820 return ret;
10821
10822 return build_call_array_loc (loc, type, fn, n, argarray);
10823 }
10824 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10825 {
10826 /* First try the transformations that don't require consing up
10827 an exp. */
10828 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10829 if (ret)
10830 return ret;
10831 }
10832
10833 /* If we got this far, we need to build an exp. */
10834 exp = build_call_array_loc (loc, type, fn, n, argarray);
10835 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10836 return ret ? ret : exp;
10837 }
10838 }
10839
10840 return build_call_array_loc (loc, type, fn, n, argarray);
10841 }
10842
10843 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10844 along with N new arguments specified as the "..." parameters. SKIP
10845 is the number of arguments in EXP to be omitted. This function is used
10846 to do varargs-to-varargs transformations. */
10847
10848 static tree
10849 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10850 {
10851 int oldnargs = call_expr_nargs (exp);
10852 int nargs = oldnargs - skip + n;
10853 tree fntype = TREE_TYPE (fndecl);
10854 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10855 tree *buffer;
10856
10857 if (n > 0)
10858 {
10859 int i, j;
10860 va_list ap;
10861
10862 buffer = XALLOCAVEC (tree, nargs);
10863 va_start (ap, n);
10864 for (i = 0; i < n; i++)
10865 buffer[i] = va_arg (ap, tree);
10866 va_end (ap);
10867 for (j = skip; j < oldnargs; j++, i++)
10868 buffer[i] = CALL_EXPR_ARG (exp, j);
10869 }
10870 else
10871 buffer = CALL_EXPR_ARGP (exp) + skip;
10872
10873 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10874 }
10875
10876 /* Validate a single argument ARG against a tree code CODE representing
10877 a type. */
10878
10879 static bool
10880 validate_arg (const_tree arg, enum tree_code code)
10881 {
10882 if (!arg)
10883 return false;
10884 else if (code == POINTER_TYPE)
10885 return POINTER_TYPE_P (TREE_TYPE (arg));
10886 else if (code == INTEGER_TYPE)
10887 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10888 return code == TREE_CODE (TREE_TYPE (arg));
10889 }
10890
10891 /* This function validates the types of a function call argument list
10892 against a specified list of tree_codes. If the last specifier is a 0,
10893 that represents an ellipses, otherwise the last specifier must be a
10894 VOID_TYPE.
10895
10896 This is the GIMPLE version of validate_arglist. Eventually we want to
10897 completely convert builtins.c to work from GIMPLEs and the tree based
10898 validate_arglist will then be removed. */
10899
10900 bool
10901 validate_gimple_arglist (const_gimple call, ...)
10902 {
10903 enum tree_code code;
10904 bool res = 0;
10905 va_list ap;
10906 const_tree arg;
10907 size_t i;
10908
10909 va_start (ap, call);
10910 i = 0;
10911
10912 do
10913 {
10914 code = (enum tree_code) va_arg (ap, int);
10915 switch (code)
10916 {
10917 case 0:
10918 /* This signifies an ellipses, any further arguments are all ok. */
10919 res = true;
10920 goto end;
10921 case VOID_TYPE:
10922 /* This signifies an endlink, if no arguments remain, return
10923 true, otherwise return false. */
10924 res = (i == gimple_call_num_args (call));
10925 goto end;
10926 default:
10927 /* If no parameters remain or the parameter's code does not
10928 match the specified code, return false. Otherwise continue
10929 checking any remaining arguments. */
10930 arg = gimple_call_arg (call, i++);
10931 if (!validate_arg (arg, code))
10932 goto end;
10933 break;
10934 }
10935 }
10936 while (1);
10937
10938 /* We need gotos here since we can only have one VA_CLOSE in a
10939 function. */
10940 end: ;
10941 va_end (ap);
10942
10943 return res;
10944 }
10945
10946 /* This function validates the types of a function call argument list
10947 against a specified list of tree_codes. If the last specifier is a 0,
10948 that represents an ellipses, otherwise the last specifier must be a
10949 VOID_TYPE. */
10950
10951 bool
10952 validate_arglist (const_tree callexpr, ...)
10953 {
10954 enum tree_code code;
10955 bool res = 0;
10956 va_list ap;
10957 const_call_expr_arg_iterator iter;
10958 const_tree arg;
10959
10960 va_start (ap, callexpr);
10961 init_const_call_expr_arg_iterator (callexpr, &iter);
10962
10963 do
10964 {
10965 code = (enum tree_code) va_arg (ap, int);
10966 switch (code)
10967 {
10968 case 0:
10969 /* This signifies an ellipses, any further arguments are all ok. */
10970 res = true;
10971 goto end;
10972 case VOID_TYPE:
10973 /* This signifies an endlink, if no arguments remain, return
10974 true, otherwise return false. */
10975 res = !more_const_call_expr_args_p (&iter);
10976 goto end;
10977 default:
10978 /* If no parameters remain or the parameter's code does not
10979 match the specified code, return false. Otherwise continue
10980 checking any remaining arguments. */
10981 arg = next_const_call_expr_arg (&iter);
10982 if (!validate_arg (arg, code))
10983 goto end;
10984 break;
10985 }
10986 }
10987 while (1);
10988
10989 /* We need gotos here since we can only have one VA_CLOSE in a
10990 function. */
10991 end: ;
10992 va_end (ap);
10993
10994 return res;
10995 }
10996
10997 /* Default target-specific builtin expander that does nothing. */
10998
10999 rtx
11000 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11001 rtx target ATTRIBUTE_UNUSED,
11002 rtx subtarget ATTRIBUTE_UNUSED,
11003 enum machine_mode mode ATTRIBUTE_UNUSED,
11004 int ignore ATTRIBUTE_UNUSED)
11005 {
11006 return NULL_RTX;
11007 }
11008
11009 /* Returns true is EXP represents data that would potentially reside
11010 in a readonly section. */
11011
11012 static bool
11013 readonly_data_expr (tree exp)
11014 {
11015 STRIP_NOPS (exp);
11016
11017 if (TREE_CODE (exp) != ADDR_EXPR)
11018 return false;
11019
11020 exp = get_base_address (TREE_OPERAND (exp, 0));
11021 if (!exp)
11022 return false;
11023
11024 /* Make sure we call decl_readonly_section only for trees it
11025 can handle (since it returns true for everything it doesn't
11026 understand). */
11027 if (TREE_CODE (exp) == STRING_CST
11028 || TREE_CODE (exp) == CONSTRUCTOR
11029 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11030 return decl_readonly_section (exp, 0);
11031 else
11032 return false;
11033 }
11034
11035 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11036 to the call, and TYPE is its return type.
11037
11038 Return NULL_TREE if no simplification was possible, otherwise return the
11039 simplified form of the call as a tree.
11040
11041 The simplified form may be a constant or other expression which
11042 computes the same value, but in a more efficient manner (including
11043 calls to other builtin functions).
11044
11045 The call may contain arguments which need to be evaluated, but
11046 which are not useful to determine the result of the call. In
11047 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11048 COMPOUND_EXPR will be an argument which must be evaluated.
11049 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11050 COMPOUND_EXPR in the chain will contain the tree for the simplified
11051 form of the builtin function call. */
11052
11053 static tree
11054 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11055 {
11056 if (!validate_arg (s1, POINTER_TYPE)
11057 || !validate_arg (s2, POINTER_TYPE))
11058 return NULL_TREE;
11059 else
11060 {
11061 tree fn;
11062 const char *p1, *p2;
11063
11064 p2 = c_getstr (s2);
11065 if (p2 == NULL)
11066 return NULL_TREE;
11067
11068 p1 = c_getstr (s1);
11069 if (p1 != NULL)
11070 {
11071 const char *r = strstr (p1, p2);
11072 tree tem;
11073
11074 if (r == NULL)
11075 return build_int_cst (TREE_TYPE (s1), 0);
11076
11077 /* Return an offset into the constant string argument. */
11078 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11079 s1, size_int (r - p1));
11080 return fold_convert_loc (loc, type, tem);
11081 }
11082
11083 /* The argument is const char *, and the result is char *, so we need
11084 a type conversion here to avoid a warning. */
11085 if (p2[0] == '\0')
11086 return fold_convert_loc (loc, type, s1);
11087
11088 if (p2[1] != '\0')
11089 return NULL_TREE;
11090
11091 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11092 if (!fn)
11093 return NULL_TREE;
11094
11095 /* New argument list transforming strstr(s1, s2) to
11096 strchr(s1, s2[0]). */
11097 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11098 }
11099 }
11100
11101 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11102 the call, and TYPE is its return type.
11103
11104 Return NULL_TREE if no simplification was possible, otherwise return the
11105 simplified form of the call as a tree.
11106
11107 The simplified form may be a constant or other expression which
11108 computes the same value, but in a more efficient manner (including
11109 calls to other builtin functions).
11110
11111 The call may contain arguments which need to be evaluated, but
11112 which are not useful to determine the result of the call. In
11113 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11114 COMPOUND_EXPR will be an argument which must be evaluated.
11115 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11116 COMPOUND_EXPR in the chain will contain the tree for the simplified
11117 form of the builtin function call. */
11118
11119 static tree
11120 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11121 {
11122 if (!validate_arg (s1, POINTER_TYPE)
11123 || !validate_arg (s2, INTEGER_TYPE))
11124 return NULL_TREE;
11125 else
11126 {
11127 const char *p1;
11128
11129 if (TREE_CODE (s2) != INTEGER_CST)
11130 return NULL_TREE;
11131
11132 p1 = c_getstr (s1);
11133 if (p1 != NULL)
11134 {
11135 char c;
11136 const char *r;
11137 tree tem;
11138
11139 if (target_char_cast (s2, &c))
11140 return NULL_TREE;
11141
11142 r = strchr (p1, c);
11143
11144 if (r == NULL)
11145 return build_int_cst (TREE_TYPE (s1), 0);
11146
11147 /* Return an offset into the constant string argument. */
11148 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11149 s1, size_int (r - p1));
11150 return fold_convert_loc (loc, type, tem);
11151 }
11152 return NULL_TREE;
11153 }
11154 }
11155
11156 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11157 the call, and TYPE is its return type.
11158
11159 Return NULL_TREE if no simplification was possible, otherwise return the
11160 simplified form of the call as a tree.
11161
11162 The simplified form may be a constant or other expression which
11163 computes the same value, but in a more efficient manner (including
11164 calls to other builtin functions).
11165
11166 The call may contain arguments which need to be evaluated, but
11167 which are not useful to determine the result of the call. In
11168 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11169 COMPOUND_EXPR will be an argument which must be evaluated.
11170 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11171 COMPOUND_EXPR in the chain will contain the tree for the simplified
11172 form of the builtin function call. */
11173
11174 static tree
11175 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11176 {
11177 if (!validate_arg (s1, POINTER_TYPE)
11178 || !validate_arg (s2, INTEGER_TYPE))
11179 return NULL_TREE;
11180 else
11181 {
11182 tree fn;
11183 const char *p1;
11184
11185 if (TREE_CODE (s2) != INTEGER_CST)
11186 return NULL_TREE;
11187
11188 p1 = c_getstr (s1);
11189 if (p1 != NULL)
11190 {
11191 char c;
11192 const char *r;
11193 tree tem;
11194
11195 if (target_char_cast (s2, &c))
11196 return NULL_TREE;
11197
11198 r = strrchr (p1, c);
11199
11200 if (r == NULL)
11201 return build_int_cst (TREE_TYPE (s1), 0);
11202
11203 /* Return an offset into the constant string argument. */
11204 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11205 s1, size_int (r - p1));
11206 return fold_convert_loc (loc, type, tem);
11207 }
11208
11209 if (! integer_zerop (s2))
11210 return NULL_TREE;
11211
11212 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11213 if (!fn)
11214 return NULL_TREE;
11215
11216 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11217 return build_call_expr_loc (loc, fn, 2, s1, s2);
11218 }
11219 }
11220
11221 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11222 to the call, and TYPE is its return type.
11223
11224 Return NULL_TREE if no simplification was possible, otherwise return the
11225 simplified form of the call as a tree.
11226
11227 The simplified form may be a constant or other expression which
11228 computes the same value, but in a more efficient manner (including
11229 calls to other builtin functions).
11230
11231 The call may contain arguments which need to be evaluated, but
11232 which are not useful to determine the result of the call. In
11233 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11234 COMPOUND_EXPR will be an argument which must be evaluated.
11235 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11236 COMPOUND_EXPR in the chain will contain the tree for the simplified
11237 form of the builtin function call. */
11238
11239 static tree
11240 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11241 {
11242 if (!validate_arg (s1, POINTER_TYPE)
11243 || !validate_arg (s2, POINTER_TYPE))
11244 return NULL_TREE;
11245 else
11246 {
11247 tree fn;
11248 const char *p1, *p2;
11249
11250 p2 = c_getstr (s2);
11251 if (p2 == NULL)
11252 return NULL_TREE;
11253
11254 p1 = c_getstr (s1);
11255 if (p1 != NULL)
11256 {
11257 const char *r = strpbrk (p1, p2);
11258 tree tem;
11259
11260 if (r == NULL)
11261 return build_int_cst (TREE_TYPE (s1), 0);
11262
11263 /* Return an offset into the constant string argument. */
11264 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11265 s1, size_int (r - p1));
11266 return fold_convert_loc (loc, type, tem);
11267 }
11268
11269 if (p2[0] == '\0')
11270 /* strpbrk(x, "") == NULL.
11271 Evaluate and ignore s1 in case it had side-effects. */
11272 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11273
11274 if (p2[1] != '\0')
11275 return NULL_TREE; /* Really call strpbrk. */
11276
11277 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11278 if (!fn)
11279 return NULL_TREE;
11280
11281 /* New argument list transforming strpbrk(s1, s2) to
11282 strchr(s1, s2[0]). */
11283 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11284 }
11285 }
11286
11287 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11288 to the call.
11289
11290 Return NULL_TREE if no simplification was possible, otherwise return the
11291 simplified form of the call as a tree.
11292
11293 The simplified form may be a constant or other expression which
11294 computes the same value, but in a more efficient manner (including
11295 calls to other builtin functions).
11296
11297 The call may contain arguments which need to be evaluated, but
11298 which are not useful to determine the result of the call. In
11299 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11300 COMPOUND_EXPR will be an argument which must be evaluated.
11301 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11302 COMPOUND_EXPR in the chain will contain the tree for the simplified
11303 form of the builtin function call. */
11304
11305 static tree
11306 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11307 {
11308 if (!validate_arg (dst, POINTER_TYPE)
11309 || !validate_arg (src, POINTER_TYPE))
11310 return NULL_TREE;
11311 else
11312 {
11313 const char *p = c_getstr (src);
11314
11315 /* If the string length is zero, return the dst parameter. */
11316 if (p && *p == '\0')
11317 return dst;
11318
11319 if (optimize_insn_for_speed_p ())
11320 {
11321 /* See if we can store by pieces into (dst + strlen(dst)). */
11322 tree newdst, call;
11323 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11324 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11325
11326 if (!strlen_fn || !strcpy_fn)
11327 return NULL_TREE;
11328
11329 /* If we don't have a movstr we don't want to emit an strcpy
11330 call. We have to do that if the length of the source string
11331 isn't computable (in that case we can use memcpy probably
11332 later expanding to a sequence of mov instructions). If we
11333 have movstr instructions we can emit strcpy calls. */
11334 if (!HAVE_movstr)
11335 {
11336 tree len = c_strlen (src, 1);
11337 if (! len || TREE_SIDE_EFFECTS (len))
11338 return NULL_TREE;
11339 }
11340
11341 /* Stabilize the argument list. */
11342 dst = builtin_save_expr (dst);
11343
11344 /* Create strlen (dst). */
11345 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11346 /* Create (dst p+ strlen (dst)). */
11347
11348 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11349 TREE_TYPE (dst), dst, newdst);
11350 newdst = builtin_save_expr (newdst);
11351
11352 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11353 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11354 }
11355 return NULL_TREE;
11356 }
11357 }
11358
11359 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11360 arguments to the call.
11361
11362 Return NULL_TREE if no simplification was possible, otherwise return the
11363 simplified form of the call as a tree.
11364
11365 The simplified form may be a constant or other expression which
11366 computes the same value, but in a more efficient manner (including
11367 calls to other builtin functions).
11368
11369 The call may contain arguments which need to be evaluated, but
11370 which are not useful to determine the result of the call. In
11371 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11372 COMPOUND_EXPR will be an argument which must be evaluated.
11373 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11374 COMPOUND_EXPR in the chain will contain the tree for the simplified
11375 form of the builtin function call. */
11376
11377 static tree
11378 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11379 {
11380 if (!validate_arg (dst, POINTER_TYPE)
11381 || !validate_arg (src, POINTER_TYPE)
11382 || !validate_arg (len, INTEGER_TYPE))
11383 return NULL_TREE;
11384 else
11385 {
11386 const char *p = c_getstr (src);
11387
11388 /* If the requested length is zero, or the src parameter string
11389 length is zero, return the dst parameter. */
11390 if (integer_zerop (len) || (p && *p == '\0'))
11391 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11392
11393 /* If the requested len is greater than or equal to the string
11394 length, call strcat. */
11395 if (TREE_CODE (len) == INTEGER_CST && p
11396 && compare_tree_int (len, strlen (p)) >= 0)
11397 {
11398 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11399
11400 /* If the replacement _DECL isn't initialized, don't do the
11401 transformation. */
11402 if (!fn)
11403 return NULL_TREE;
11404
11405 return build_call_expr_loc (loc, fn, 2, dst, src);
11406 }
11407 return NULL_TREE;
11408 }
11409 }
11410
11411 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11412 to the call.
11413
11414 Return NULL_TREE if no simplification was possible, otherwise return the
11415 simplified form of the call as a tree.
11416
11417 The simplified form may be a constant or other expression which
11418 computes the same value, but in a more efficient manner (including
11419 calls to other builtin functions).
11420
11421 The call may contain arguments which need to be evaluated, but
11422 which are not useful to determine the result of the call. In
11423 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11424 COMPOUND_EXPR will be an argument which must be evaluated.
11425 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11426 COMPOUND_EXPR in the chain will contain the tree for the simplified
11427 form of the builtin function call. */
11428
11429 static tree
11430 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11431 {
11432 if (!validate_arg (s1, POINTER_TYPE)
11433 || !validate_arg (s2, POINTER_TYPE))
11434 return NULL_TREE;
11435 else
11436 {
11437 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11438
11439 /* If both arguments are constants, evaluate at compile-time. */
11440 if (p1 && p2)
11441 {
11442 const size_t r = strspn (p1, p2);
11443 return size_int (r);
11444 }
11445
11446 /* If either argument is "", return NULL_TREE. */
11447 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11448 /* Evaluate and ignore both arguments in case either one has
11449 side-effects. */
11450 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11451 s1, s2);
11452 return NULL_TREE;
11453 }
11454 }
11455
11456 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11457 to the call.
11458
11459 Return NULL_TREE if no simplification was possible, otherwise return the
11460 simplified form of the call as a tree.
11461
11462 The simplified form may be a constant or other expression which
11463 computes the same value, but in a more efficient manner (including
11464 calls to other builtin functions).
11465
11466 The call may contain arguments which need to be evaluated, but
11467 which are not useful to determine the result of the call. In
11468 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11469 COMPOUND_EXPR will be an argument which must be evaluated.
11470 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11471 COMPOUND_EXPR in the chain will contain the tree for the simplified
11472 form of the builtin function call. */
11473
11474 static tree
11475 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11476 {
11477 if (!validate_arg (s1, POINTER_TYPE)
11478 || !validate_arg (s2, POINTER_TYPE))
11479 return NULL_TREE;
11480 else
11481 {
11482 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11483
11484 /* If both arguments are constants, evaluate at compile-time. */
11485 if (p1 && p2)
11486 {
11487 const size_t r = strcspn (p1, p2);
11488 return size_int (r);
11489 }
11490
11491 /* If the first argument is "", return NULL_TREE. */
11492 if (p1 && *p1 == '\0')
11493 {
11494 /* Evaluate and ignore argument s2 in case it has
11495 side-effects. */
11496 return omit_one_operand_loc (loc, size_type_node,
11497 size_zero_node, s2);
11498 }
11499
11500 /* If the second argument is "", return __builtin_strlen(s1). */
11501 if (p2 && *p2 == '\0')
11502 {
11503 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11504
11505 /* If the replacement _DECL isn't initialized, don't do the
11506 transformation. */
11507 if (!fn)
11508 return NULL_TREE;
11509
11510 return build_call_expr_loc (loc, fn, 1, s1);
11511 }
11512 return NULL_TREE;
11513 }
11514 }
11515
11516 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11517 to the call. IGNORE is true if the value returned
11518 by the builtin will be ignored. UNLOCKED is true is true if this
11519 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11520 the known length of the string. Return NULL_TREE if no simplification
11521 was possible. */
11522
11523 tree
11524 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11525 bool ignore, bool unlocked, tree len)
11526 {
11527 /* If we're using an unlocked function, assume the other unlocked
11528 functions exist explicitly. */
11529 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11530 : implicit_built_in_decls[BUILT_IN_FPUTC];
11531 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11532 : implicit_built_in_decls[BUILT_IN_FWRITE];
11533
11534 /* If the return value is used, don't do the transformation. */
11535 if (!ignore)
11536 return NULL_TREE;
11537
11538 /* Verify the arguments in the original call. */
11539 if (!validate_arg (arg0, POINTER_TYPE)
11540 || !validate_arg (arg1, POINTER_TYPE))
11541 return NULL_TREE;
11542
11543 if (! len)
11544 len = c_strlen (arg0, 0);
11545
11546 /* Get the length of the string passed to fputs. If the length
11547 can't be determined, punt. */
11548 if (!len
11549 || TREE_CODE (len) != INTEGER_CST)
11550 return NULL_TREE;
11551
11552 switch (compare_tree_int (len, 1))
11553 {
11554 case -1: /* length is 0, delete the call entirely . */
11555 return omit_one_operand_loc (loc, integer_type_node,
11556 integer_zero_node, arg1);;
11557
11558 case 0: /* length is 1, call fputc. */
11559 {
11560 const char *p = c_getstr (arg0);
11561
11562 if (p != NULL)
11563 {
11564 if (fn_fputc)
11565 return build_call_expr_loc (loc, fn_fputc, 2,
11566 build_int_cst (NULL_TREE, p[0]), arg1);
11567 else
11568 return NULL_TREE;
11569 }
11570 }
11571 /* FALLTHROUGH */
11572 case 1: /* length is greater than 1, call fwrite. */
11573 {
11574 /* If optimizing for size keep fputs. */
11575 if (optimize_function_for_size_p (cfun))
11576 return NULL_TREE;
11577 /* New argument list transforming fputs(string, stream) to
11578 fwrite(string, 1, len, stream). */
11579 if (fn_fwrite)
11580 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11581 size_one_node, len, arg1);
11582 else
11583 return NULL_TREE;
11584 }
11585 default:
11586 gcc_unreachable ();
11587 }
11588 return NULL_TREE;
11589 }
11590
11591 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11592 produced. False otherwise. This is done so that we don't output the error
11593 or warning twice or three times. */
11594
11595 bool
11596 fold_builtin_next_arg (tree exp, bool va_start_p)
11597 {
11598 tree fntype = TREE_TYPE (current_function_decl);
11599 int nargs = call_expr_nargs (exp);
11600 tree arg;
11601
11602 if (TYPE_ARG_TYPES (fntype) == 0
11603 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11604 == void_type_node))
11605 {
11606 error ("%<va_start%> used in function with fixed args");
11607 return true;
11608 }
11609
11610 if (va_start_p)
11611 {
11612 if (va_start_p && (nargs != 2))
11613 {
11614 error ("wrong number of arguments to function %<va_start%>");
11615 return true;
11616 }
11617 arg = CALL_EXPR_ARG (exp, 1);
11618 }
11619 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11620 when we checked the arguments and if needed issued a warning. */
11621 else
11622 {
11623 if (nargs == 0)
11624 {
11625 /* Evidently an out of date version of <stdarg.h>; can't validate
11626 va_start's second argument, but can still work as intended. */
11627 warning (0, "%<__builtin_next_arg%> called without an argument");
11628 return true;
11629 }
11630 else if (nargs > 1)
11631 {
11632 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11633 return true;
11634 }
11635 arg = CALL_EXPR_ARG (exp, 0);
11636 }
11637
11638 if (TREE_CODE (arg) == SSA_NAME)
11639 arg = SSA_NAME_VAR (arg);
11640
11641 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11642 or __builtin_next_arg (0) the first time we see it, after checking
11643 the arguments and if needed issuing a warning. */
11644 if (!integer_zerop (arg))
11645 {
11646 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11647
11648 /* Strip off all nops for the sake of the comparison. This
11649 is not quite the same as STRIP_NOPS. It does more.
11650 We must also strip off INDIRECT_EXPR for C++ reference
11651 parameters. */
11652 while (CONVERT_EXPR_P (arg)
11653 || TREE_CODE (arg) == INDIRECT_REF)
11654 arg = TREE_OPERAND (arg, 0);
11655 if (arg != last_parm)
11656 {
11657 /* FIXME: Sometimes with the tree optimizers we can get the
11658 not the last argument even though the user used the last
11659 argument. We just warn and set the arg to be the last
11660 argument so that we will get wrong-code because of
11661 it. */
11662 warning (0, "second parameter of %<va_start%> not last named argument");
11663 }
11664
11665 /* Undefined by C99 7.15.1.4p4 (va_start):
11666 "If the parameter parmN is declared with the register storage
11667 class, with a function or array type, or with a type that is
11668 not compatible with the type that results after application of
11669 the default argument promotions, the behavior is undefined."
11670 */
11671 else if (DECL_REGISTER (arg))
11672 warning (0, "undefined behaviour when second parameter of "
11673 "%<va_start%> is declared with %<register%> storage");
11674
11675 /* We want to verify the second parameter just once before the tree
11676 optimizers are run and then avoid keeping it in the tree,
11677 as otherwise we could warn even for correct code like:
11678 void foo (int i, ...)
11679 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11680 if (va_start_p)
11681 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11682 else
11683 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11684 }
11685 return false;
11686 }
11687
11688
11689 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11690 ORIG may be null if this is a 2-argument call. We don't attempt to
11691 simplify calls with more than 3 arguments.
11692
11693 Return NULL_TREE if no simplification was possible, otherwise return the
11694 simplified form of the call as a tree. If IGNORED is true, it means that
11695 the caller does not use the returned value of the function. */
11696
11697 static tree
11698 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11699 tree orig, int ignored)
11700 {
11701 tree call, retval;
11702 const char *fmt_str = NULL;
11703
11704 /* Verify the required arguments in the original call. We deal with two
11705 types of sprintf() calls: 'sprintf (str, fmt)' and
11706 'sprintf (dest, "%s", orig)'. */
11707 if (!validate_arg (dest, POINTER_TYPE)
11708 || !validate_arg (fmt, POINTER_TYPE))
11709 return NULL_TREE;
11710 if (orig && !validate_arg (orig, POINTER_TYPE))
11711 return NULL_TREE;
11712
11713 /* Check whether the format is a literal string constant. */
11714 fmt_str = c_getstr (fmt);
11715 if (fmt_str == NULL)
11716 return NULL_TREE;
11717
11718 call = NULL_TREE;
11719 retval = NULL_TREE;
11720
11721 if (!init_target_chars ())
11722 return NULL_TREE;
11723
11724 /* If the format doesn't contain % args or %%, use strcpy. */
11725 if (strchr (fmt_str, target_percent) == NULL)
11726 {
11727 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11728
11729 if (!fn)
11730 return NULL_TREE;
11731
11732 /* Don't optimize sprintf (buf, "abc", ptr++). */
11733 if (orig)
11734 return NULL_TREE;
11735
11736 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11737 'format' is known to contain no % formats. */
11738 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11739 if (!ignored)
11740 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11741 }
11742
11743 /* If the format is "%s", use strcpy if the result isn't used. */
11744 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11745 {
11746 tree fn;
11747 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11748
11749 if (!fn)
11750 return NULL_TREE;
11751
11752 /* Don't crash on sprintf (str1, "%s"). */
11753 if (!orig)
11754 return NULL_TREE;
11755
11756 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11757 if (!ignored)
11758 {
11759 retval = c_strlen (orig, 1);
11760 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11761 return NULL_TREE;
11762 }
11763 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11764 }
11765
11766 if (call && retval)
11767 {
11768 retval = fold_convert_loc
11769 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11770 retval);
11771 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11772 }
11773 else
11774 return call;
11775 }
11776
11777 /* Expand a call EXP to __builtin_object_size. */
11778
11779 rtx
11780 expand_builtin_object_size (tree exp)
11781 {
11782 tree ost;
11783 int object_size_type;
11784 tree fndecl = get_callee_fndecl (exp);
11785
11786 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11787 {
11788 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11789 exp, fndecl);
11790 expand_builtin_trap ();
11791 return const0_rtx;
11792 }
11793
11794 ost = CALL_EXPR_ARG (exp, 1);
11795 STRIP_NOPS (ost);
11796
11797 if (TREE_CODE (ost) != INTEGER_CST
11798 || tree_int_cst_sgn (ost) < 0
11799 || compare_tree_int (ost, 3) > 0)
11800 {
11801 error ("%Klast argument of %D is not integer constant between 0 and 3",
11802 exp, fndecl);
11803 expand_builtin_trap ();
11804 return const0_rtx;
11805 }
11806
11807 object_size_type = tree_low_cst (ost, 0);
11808
11809 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11810 }
11811
11812 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11813 FCODE is the BUILT_IN_* to use.
11814 Return NULL_RTX if we failed; the caller should emit a normal call,
11815 otherwise try to get the result in TARGET, if convenient (and in
11816 mode MODE if that's convenient). */
11817
11818 static rtx
11819 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11820 enum built_in_function fcode)
11821 {
11822 tree dest, src, len, size;
11823
11824 if (!validate_arglist (exp,
11825 POINTER_TYPE,
11826 fcode == BUILT_IN_MEMSET_CHK
11827 ? INTEGER_TYPE : POINTER_TYPE,
11828 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11829 return NULL_RTX;
11830
11831 dest = CALL_EXPR_ARG (exp, 0);
11832 src = CALL_EXPR_ARG (exp, 1);
11833 len = CALL_EXPR_ARG (exp, 2);
11834 size = CALL_EXPR_ARG (exp, 3);
11835
11836 if (! host_integerp (size, 1))
11837 return NULL_RTX;
11838
11839 if (host_integerp (len, 1) || integer_all_onesp (size))
11840 {
11841 tree fn;
11842
11843 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11844 {
11845 warning_at (tree_nonartificial_location (exp),
11846 0, "%Kcall to %D will always overflow destination buffer",
11847 exp, get_callee_fndecl (exp));
11848 return NULL_RTX;
11849 }
11850
11851 fn = NULL_TREE;
11852 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11853 mem{cpy,pcpy,move,set} is available. */
11854 switch (fcode)
11855 {
11856 case BUILT_IN_MEMCPY_CHK:
11857 fn = built_in_decls[BUILT_IN_MEMCPY];
11858 break;
11859 case BUILT_IN_MEMPCPY_CHK:
11860 fn = built_in_decls[BUILT_IN_MEMPCPY];
11861 break;
11862 case BUILT_IN_MEMMOVE_CHK:
11863 fn = built_in_decls[BUILT_IN_MEMMOVE];
11864 break;
11865 case BUILT_IN_MEMSET_CHK:
11866 fn = built_in_decls[BUILT_IN_MEMSET];
11867 break;
11868 default:
11869 break;
11870 }
11871
11872 if (! fn)
11873 return NULL_RTX;
11874
11875 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11876 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11877 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11878 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11879 }
11880 else if (fcode == BUILT_IN_MEMSET_CHK)
11881 return NULL_RTX;
11882 else
11883 {
11884 unsigned int dest_align
11885 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11886
11887 /* If DEST is not a pointer type, call the normal function. */
11888 if (dest_align == 0)
11889 return NULL_RTX;
11890
11891 /* If SRC and DEST are the same (and not volatile), do nothing. */
11892 if (operand_equal_p (src, dest, 0))
11893 {
11894 tree expr;
11895
11896 if (fcode != BUILT_IN_MEMPCPY_CHK)
11897 {
11898 /* Evaluate and ignore LEN in case it has side-effects. */
11899 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11900 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11901 }
11902
11903 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11904 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11905 }
11906
11907 /* __memmove_chk special case. */
11908 if (fcode == BUILT_IN_MEMMOVE_CHK)
11909 {
11910 unsigned int src_align
11911 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11912
11913 if (src_align == 0)
11914 return NULL_RTX;
11915
11916 /* If src is categorized for a readonly section we can use
11917 normal __memcpy_chk. */
11918 if (readonly_data_expr (src))
11919 {
11920 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11921 if (!fn)
11922 return NULL_RTX;
11923 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11924 dest, src, len, size);
11925 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11926 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11927 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11928 }
11929 }
11930 return NULL_RTX;
11931 }
11932 }
11933
11934 /* Emit warning if a buffer overflow is detected at compile time. */
11935
11936 static void
11937 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11938 {
11939 int is_strlen = 0;
11940 tree len, size;
11941 location_t loc = tree_nonartificial_location (exp);
11942
11943 switch (fcode)
11944 {
11945 case BUILT_IN_STRCPY_CHK:
11946 case BUILT_IN_STPCPY_CHK:
11947 /* For __strcat_chk the warning will be emitted only if overflowing
11948 by at least strlen (dest) + 1 bytes. */
11949 case BUILT_IN_STRCAT_CHK:
11950 len = CALL_EXPR_ARG (exp, 1);
11951 size = CALL_EXPR_ARG (exp, 2);
11952 is_strlen = 1;
11953 break;
11954 case BUILT_IN_STRNCAT_CHK:
11955 case BUILT_IN_STRNCPY_CHK:
11956 len = CALL_EXPR_ARG (exp, 2);
11957 size = CALL_EXPR_ARG (exp, 3);
11958 break;
11959 case BUILT_IN_SNPRINTF_CHK:
11960 case BUILT_IN_VSNPRINTF_CHK:
11961 len = CALL_EXPR_ARG (exp, 1);
11962 size = CALL_EXPR_ARG (exp, 3);
11963 break;
11964 default:
11965 gcc_unreachable ();
11966 }
11967
11968 if (!len || !size)
11969 return;
11970
11971 if (! host_integerp (size, 1) || integer_all_onesp (size))
11972 return;
11973
11974 if (is_strlen)
11975 {
11976 len = c_strlen (len, 1);
11977 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11978 return;
11979 }
11980 else if (fcode == BUILT_IN_STRNCAT_CHK)
11981 {
11982 tree src = CALL_EXPR_ARG (exp, 1);
11983 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11984 return;
11985 src = c_strlen (src, 1);
11986 if (! src || ! host_integerp (src, 1))
11987 {
11988 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11989 exp, get_callee_fndecl (exp));
11990 return;
11991 }
11992 else if (tree_int_cst_lt (src, size))
11993 return;
11994 }
11995 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11996 return;
11997
11998 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11999 exp, get_callee_fndecl (exp));
12000 }
12001
12002 /* Emit warning if a buffer overflow is detected at compile time
12003 in __sprintf_chk/__vsprintf_chk calls. */
12004
12005 static void
12006 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12007 {
12008 tree size, len, fmt;
12009 const char *fmt_str;
12010 int nargs = call_expr_nargs (exp);
12011
12012 /* Verify the required arguments in the original call. */
12013
12014 if (nargs < 4)
12015 return;
12016 size = CALL_EXPR_ARG (exp, 2);
12017 fmt = CALL_EXPR_ARG (exp, 3);
12018
12019 if (! host_integerp (size, 1) || integer_all_onesp (size))
12020 return;
12021
12022 /* Check whether the format is a literal string constant. */
12023 fmt_str = c_getstr (fmt);
12024 if (fmt_str == NULL)
12025 return;
12026
12027 if (!init_target_chars ())
12028 return;
12029
12030 /* If the format doesn't contain % args or %%, we know its size. */
12031 if (strchr (fmt_str, target_percent) == 0)
12032 len = build_int_cstu (size_type_node, strlen (fmt_str));
12033 /* If the format is "%s" and first ... argument is a string literal,
12034 we know it too. */
12035 else if (fcode == BUILT_IN_SPRINTF_CHK
12036 && strcmp (fmt_str, target_percent_s) == 0)
12037 {
12038 tree arg;
12039
12040 if (nargs < 5)
12041 return;
12042 arg = CALL_EXPR_ARG (exp, 4);
12043 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12044 return;
12045
12046 len = c_strlen (arg, 1);
12047 if (!len || ! host_integerp (len, 1))
12048 return;
12049 }
12050 else
12051 return;
12052
12053 if (! tree_int_cst_lt (len, size))
12054 warning_at (tree_nonartificial_location (exp),
12055 0, "%Kcall to %D will always overflow destination buffer",
12056 exp, get_callee_fndecl (exp));
12057 }
12058
12059 /* Emit warning if a free is called with address of a variable. */
12060
12061 static void
12062 maybe_emit_free_warning (tree exp)
12063 {
12064 tree arg = CALL_EXPR_ARG (exp, 0);
12065
12066 STRIP_NOPS (arg);
12067 if (TREE_CODE (arg) != ADDR_EXPR)
12068 return;
12069
12070 arg = get_base_address (TREE_OPERAND (arg, 0));
12071 if (arg == NULL || INDIRECT_REF_P (arg))
12072 return;
12073
12074 if (SSA_VAR_P (arg))
12075 warning_at (tree_nonartificial_location (exp),
12076 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12077 else
12078 warning_at (tree_nonartificial_location (exp),
12079 0, "%Kattempt to free a non-heap object", exp);
12080 }
12081
12082 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12083 if possible. */
12084
12085 tree
12086 fold_builtin_object_size (tree ptr, tree ost)
12087 {
12088 tree ret = NULL_TREE;
12089 int object_size_type;
12090
12091 if (!validate_arg (ptr, POINTER_TYPE)
12092 || !validate_arg (ost, INTEGER_TYPE))
12093 return NULL_TREE;
12094
12095 STRIP_NOPS (ost);
12096
12097 if (TREE_CODE (ost) != INTEGER_CST
12098 || tree_int_cst_sgn (ost) < 0
12099 || compare_tree_int (ost, 3) > 0)
12100 return NULL_TREE;
12101
12102 object_size_type = tree_low_cst (ost, 0);
12103
12104 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12105 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12106 and (size_t) 0 for types 2 and 3. */
12107 if (TREE_SIDE_EFFECTS (ptr))
12108 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12109
12110 if (TREE_CODE (ptr) == ADDR_EXPR)
12111 ret = build_int_cstu (size_type_node,
12112 compute_builtin_object_size (ptr, object_size_type));
12113
12114 else if (TREE_CODE (ptr) == SSA_NAME)
12115 {
12116 unsigned HOST_WIDE_INT bytes;
12117
12118 /* If object size is not known yet, delay folding until
12119 later. Maybe subsequent passes will help determining
12120 it. */
12121 bytes = compute_builtin_object_size (ptr, object_size_type);
12122 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12123 ? -1 : 0))
12124 ret = build_int_cstu (size_type_node, bytes);
12125 }
12126
12127 if (ret)
12128 {
12129 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12130 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12131 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12132 ret = NULL_TREE;
12133 }
12134
12135 return ret;
12136 }
12137
12138 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12139 DEST, SRC, LEN, and SIZE are the arguments to the call.
12140 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12141 code of the builtin. If MAXLEN is not NULL, it is maximum length
12142 passed as third argument. */
12143
12144 tree
12145 fold_builtin_memory_chk (location_t loc, tree fndecl,
12146 tree dest, tree src, tree len, tree size,
12147 tree maxlen, bool ignore,
12148 enum built_in_function fcode)
12149 {
12150 tree fn;
12151
12152 if (!validate_arg (dest, POINTER_TYPE)
12153 || !validate_arg (src,
12154 (fcode == BUILT_IN_MEMSET_CHK
12155 ? INTEGER_TYPE : POINTER_TYPE))
12156 || !validate_arg (len, INTEGER_TYPE)
12157 || !validate_arg (size, INTEGER_TYPE))
12158 return NULL_TREE;
12159
12160 /* If SRC and DEST are the same (and not volatile), return DEST
12161 (resp. DEST+LEN for __mempcpy_chk). */
12162 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12163 {
12164 if (fcode != BUILT_IN_MEMPCPY_CHK)
12165 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12166 dest, len);
12167 else
12168 {
12169 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12170 dest, len);
12171 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12172 }
12173 }
12174
12175 if (! host_integerp (size, 1))
12176 return NULL_TREE;
12177
12178 if (! integer_all_onesp (size))
12179 {
12180 if (! host_integerp (len, 1))
12181 {
12182 /* If LEN is not constant, try MAXLEN too.
12183 For MAXLEN only allow optimizing into non-_ocs function
12184 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12185 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12186 {
12187 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12188 {
12189 /* (void) __mempcpy_chk () can be optimized into
12190 (void) __memcpy_chk (). */
12191 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12192 if (!fn)
12193 return NULL_TREE;
12194
12195 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12196 }
12197 return NULL_TREE;
12198 }
12199 }
12200 else
12201 maxlen = len;
12202
12203 if (tree_int_cst_lt (size, maxlen))
12204 return NULL_TREE;
12205 }
12206
12207 fn = NULL_TREE;
12208 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12209 mem{cpy,pcpy,move,set} is available. */
12210 switch (fcode)
12211 {
12212 case BUILT_IN_MEMCPY_CHK:
12213 fn = built_in_decls[BUILT_IN_MEMCPY];
12214 break;
12215 case BUILT_IN_MEMPCPY_CHK:
12216 fn = built_in_decls[BUILT_IN_MEMPCPY];
12217 break;
12218 case BUILT_IN_MEMMOVE_CHK:
12219 fn = built_in_decls[BUILT_IN_MEMMOVE];
12220 break;
12221 case BUILT_IN_MEMSET_CHK:
12222 fn = built_in_decls[BUILT_IN_MEMSET];
12223 break;
12224 default:
12225 break;
12226 }
12227
12228 if (!fn)
12229 return NULL_TREE;
12230
12231 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12232 }
12233
12234 /* Fold a call to the __st[rp]cpy_chk builtin.
12235 DEST, SRC, and SIZE are the arguments to the call.
12236 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12237 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12238 strings passed as second argument. */
12239
12240 tree
12241 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12242 tree src, tree size,
12243 tree maxlen, bool ignore,
12244 enum built_in_function fcode)
12245 {
12246 tree len, fn;
12247
12248 if (!validate_arg (dest, POINTER_TYPE)
12249 || !validate_arg (src, POINTER_TYPE)
12250 || !validate_arg (size, INTEGER_TYPE))
12251 return NULL_TREE;
12252
12253 /* If SRC and DEST are the same (and not volatile), return DEST. */
12254 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12255 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12256
12257 if (! host_integerp (size, 1))
12258 return NULL_TREE;
12259
12260 if (! integer_all_onesp (size))
12261 {
12262 len = c_strlen (src, 1);
12263 if (! len || ! host_integerp (len, 1))
12264 {
12265 /* If LEN is not constant, try MAXLEN too.
12266 For MAXLEN only allow optimizing into non-_ocs function
12267 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12268 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12269 {
12270 if (fcode == BUILT_IN_STPCPY_CHK)
12271 {
12272 if (! ignore)
12273 return NULL_TREE;
12274
12275 /* If return value of __stpcpy_chk is ignored,
12276 optimize into __strcpy_chk. */
12277 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12278 if (!fn)
12279 return NULL_TREE;
12280
12281 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12282 }
12283
12284 if (! len || TREE_SIDE_EFFECTS (len))
12285 return NULL_TREE;
12286
12287 /* If c_strlen returned something, but not a constant,
12288 transform __strcpy_chk into __memcpy_chk. */
12289 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12290 if (!fn)
12291 return NULL_TREE;
12292
12293 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12294 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12295 build_call_expr_loc (loc, fn, 4,
12296 dest, src, len, size));
12297 }
12298 }
12299 else
12300 maxlen = len;
12301
12302 if (! tree_int_cst_lt (maxlen, size))
12303 return NULL_TREE;
12304 }
12305
12306 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12307 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12308 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12309 if (!fn)
12310 return NULL_TREE;
12311
12312 return build_call_expr_loc (loc, fn, 2, dest, src);
12313 }
12314
12315 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12316 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12317 length passed as third argument. */
12318
12319 tree
12320 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12321 tree len, tree size, tree maxlen)
12322 {
12323 tree fn;
12324
12325 if (!validate_arg (dest, POINTER_TYPE)
12326 || !validate_arg (src, POINTER_TYPE)
12327 || !validate_arg (len, INTEGER_TYPE)
12328 || !validate_arg (size, INTEGER_TYPE))
12329 return NULL_TREE;
12330
12331 if (! host_integerp (size, 1))
12332 return NULL_TREE;
12333
12334 if (! integer_all_onesp (size))
12335 {
12336 if (! host_integerp (len, 1))
12337 {
12338 /* If LEN is not constant, try MAXLEN too.
12339 For MAXLEN only allow optimizing into non-_ocs function
12340 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12341 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12342 return NULL_TREE;
12343 }
12344 else
12345 maxlen = len;
12346
12347 if (tree_int_cst_lt (size, maxlen))
12348 return NULL_TREE;
12349 }
12350
12351 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12352 fn = built_in_decls[BUILT_IN_STRNCPY];
12353 if (!fn)
12354 return NULL_TREE;
12355
12356 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12357 }
12358
12359 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12360 are the arguments to the call. */
12361
12362 static tree
12363 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12364 tree src, tree size)
12365 {
12366 tree fn;
12367 const char *p;
12368
12369 if (!validate_arg (dest, POINTER_TYPE)
12370 || !validate_arg (src, POINTER_TYPE)
12371 || !validate_arg (size, INTEGER_TYPE))
12372 return NULL_TREE;
12373
12374 p = c_getstr (src);
12375 /* If the SRC parameter is "", return DEST. */
12376 if (p && *p == '\0')
12377 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12378
12379 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12380 return NULL_TREE;
12381
12382 /* If __builtin_strcat_chk is used, assume strcat is available. */
12383 fn = built_in_decls[BUILT_IN_STRCAT];
12384 if (!fn)
12385 return NULL_TREE;
12386
12387 return build_call_expr_loc (loc, fn, 2, dest, src);
12388 }
12389
12390 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12391 LEN, and SIZE. */
12392
12393 static tree
12394 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12395 tree dest, tree src, tree len, tree size)
12396 {
12397 tree fn;
12398 const char *p;
12399
12400 if (!validate_arg (dest, POINTER_TYPE)
12401 || !validate_arg (src, POINTER_TYPE)
12402 || !validate_arg (size, INTEGER_TYPE)
12403 || !validate_arg (size, INTEGER_TYPE))
12404 return NULL_TREE;
12405
12406 p = c_getstr (src);
12407 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12408 if (p && *p == '\0')
12409 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12410 else if (integer_zerop (len))
12411 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12412
12413 if (! host_integerp (size, 1))
12414 return NULL_TREE;
12415
12416 if (! integer_all_onesp (size))
12417 {
12418 tree src_len = c_strlen (src, 1);
12419 if (src_len
12420 && host_integerp (src_len, 1)
12421 && host_integerp (len, 1)
12422 && ! tree_int_cst_lt (len, src_len))
12423 {
12424 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12425 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12426 if (!fn)
12427 return NULL_TREE;
12428
12429 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12430 }
12431 return NULL_TREE;
12432 }
12433
12434 /* If __builtin_strncat_chk is used, assume strncat is available. */
12435 fn = built_in_decls[BUILT_IN_STRNCAT];
12436 if (!fn)
12437 return NULL_TREE;
12438
12439 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12440 }
12441
12442 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12443 a normal call should be emitted rather than expanding the function
12444 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12445
12446 static tree
12447 fold_builtin_sprintf_chk (location_t loc, tree exp,
12448 enum built_in_function fcode)
12449 {
12450 tree dest, size, len, fn, fmt, flag;
12451 const char *fmt_str;
12452 int nargs = call_expr_nargs (exp);
12453
12454 /* Verify the required arguments in the original call. */
12455 if (nargs < 4)
12456 return NULL_TREE;
12457 dest = CALL_EXPR_ARG (exp, 0);
12458 if (!validate_arg (dest, POINTER_TYPE))
12459 return NULL_TREE;
12460 flag = CALL_EXPR_ARG (exp, 1);
12461 if (!validate_arg (flag, INTEGER_TYPE))
12462 return NULL_TREE;
12463 size = CALL_EXPR_ARG (exp, 2);
12464 if (!validate_arg (size, INTEGER_TYPE))
12465 return NULL_TREE;
12466 fmt = CALL_EXPR_ARG (exp, 3);
12467 if (!validate_arg (fmt, POINTER_TYPE))
12468 return NULL_TREE;
12469
12470 if (! host_integerp (size, 1))
12471 return NULL_TREE;
12472
12473 len = NULL_TREE;
12474
12475 if (!init_target_chars ())
12476 return NULL_TREE;
12477
12478 /* Check whether the format is a literal string constant. */
12479 fmt_str = c_getstr (fmt);
12480 if (fmt_str != NULL)
12481 {
12482 /* If the format doesn't contain % args or %%, we know the size. */
12483 if (strchr (fmt_str, target_percent) == 0)
12484 {
12485 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12486 len = build_int_cstu (size_type_node, strlen (fmt_str));
12487 }
12488 /* If the format is "%s" and first ... argument is a string literal,
12489 we know the size too. */
12490 else if (fcode == BUILT_IN_SPRINTF_CHK
12491 && strcmp (fmt_str, target_percent_s) == 0)
12492 {
12493 tree arg;
12494
12495 if (nargs == 5)
12496 {
12497 arg = CALL_EXPR_ARG (exp, 4);
12498 if (validate_arg (arg, POINTER_TYPE))
12499 {
12500 len = c_strlen (arg, 1);
12501 if (! len || ! host_integerp (len, 1))
12502 len = NULL_TREE;
12503 }
12504 }
12505 }
12506 }
12507
12508 if (! integer_all_onesp (size))
12509 {
12510 if (! len || ! tree_int_cst_lt (len, size))
12511 return NULL_TREE;
12512 }
12513
12514 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12515 or if format doesn't contain % chars or is "%s". */
12516 if (! integer_zerop (flag))
12517 {
12518 if (fmt_str == NULL)
12519 return NULL_TREE;
12520 if (strchr (fmt_str, target_percent) != NULL
12521 && strcmp (fmt_str, target_percent_s))
12522 return NULL_TREE;
12523 }
12524
12525 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12526 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12527 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12528 if (!fn)
12529 return NULL_TREE;
12530
12531 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12532 }
12533
12534 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12535 a normal call should be emitted rather than expanding the function
12536 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12537 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12538 passed as second argument. */
12539
12540 tree
12541 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12542 enum built_in_function fcode)
12543 {
12544 tree dest, size, len, fn, fmt, flag;
12545 const char *fmt_str;
12546
12547 /* Verify the required arguments in the original call. */
12548 if (call_expr_nargs (exp) < 5)
12549 return NULL_TREE;
12550 dest = CALL_EXPR_ARG (exp, 0);
12551 if (!validate_arg (dest, POINTER_TYPE))
12552 return NULL_TREE;
12553 len = CALL_EXPR_ARG (exp, 1);
12554 if (!validate_arg (len, INTEGER_TYPE))
12555 return NULL_TREE;
12556 flag = CALL_EXPR_ARG (exp, 2);
12557 if (!validate_arg (flag, INTEGER_TYPE))
12558 return NULL_TREE;
12559 size = CALL_EXPR_ARG (exp, 3);
12560 if (!validate_arg (size, INTEGER_TYPE))
12561 return NULL_TREE;
12562 fmt = CALL_EXPR_ARG (exp, 4);
12563 if (!validate_arg (fmt, POINTER_TYPE))
12564 return NULL_TREE;
12565
12566 if (! host_integerp (size, 1))
12567 return NULL_TREE;
12568
12569 if (! integer_all_onesp (size))
12570 {
12571 if (! host_integerp (len, 1))
12572 {
12573 /* If LEN is not constant, try MAXLEN too.
12574 For MAXLEN only allow optimizing into non-_ocs function
12575 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12576 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12577 return NULL_TREE;
12578 }
12579 else
12580 maxlen = len;
12581
12582 if (tree_int_cst_lt (size, maxlen))
12583 return NULL_TREE;
12584 }
12585
12586 if (!init_target_chars ())
12587 return NULL_TREE;
12588
12589 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12590 or if format doesn't contain % chars or is "%s". */
12591 if (! integer_zerop (flag))
12592 {
12593 fmt_str = c_getstr (fmt);
12594 if (fmt_str == NULL)
12595 return NULL_TREE;
12596 if (strchr (fmt_str, target_percent) != NULL
12597 && strcmp (fmt_str, target_percent_s))
12598 return NULL_TREE;
12599 }
12600
12601 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12602 available. */
12603 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12604 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12605 if (!fn)
12606 return NULL_TREE;
12607
12608 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12609 }
12610
12611 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12612 FMT and ARG are the arguments to the call; we don't fold cases with
12613 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12614
12615 Return NULL_TREE if no simplification was possible, otherwise return the
12616 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12617 code of the function to be simplified. */
12618
12619 static tree
12620 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12621 tree arg, bool ignore,
12622 enum built_in_function fcode)
12623 {
12624 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12625 const char *fmt_str = NULL;
12626
12627 /* If the return value is used, don't do the transformation. */
12628 if (! ignore)
12629 return NULL_TREE;
12630
12631 /* Verify the required arguments in the original call. */
12632 if (!validate_arg (fmt, POINTER_TYPE))
12633 return NULL_TREE;
12634
12635 /* Check whether the format is a literal string constant. */
12636 fmt_str = c_getstr (fmt);
12637 if (fmt_str == NULL)
12638 return NULL_TREE;
12639
12640 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12641 {
12642 /* If we're using an unlocked function, assume the other
12643 unlocked functions exist explicitly. */
12644 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12645 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12646 }
12647 else
12648 {
12649 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12650 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12651 }
12652
12653 if (!init_target_chars ())
12654 return NULL_TREE;
12655
12656 if (strcmp (fmt_str, target_percent_s) == 0
12657 || strchr (fmt_str, target_percent) == NULL)
12658 {
12659 const char *str;
12660
12661 if (strcmp (fmt_str, target_percent_s) == 0)
12662 {
12663 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12664 return NULL_TREE;
12665
12666 if (!arg || !validate_arg (arg, POINTER_TYPE))
12667 return NULL_TREE;
12668
12669 str = c_getstr (arg);
12670 if (str == NULL)
12671 return NULL_TREE;
12672 }
12673 else
12674 {
12675 /* The format specifier doesn't contain any '%' characters. */
12676 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12677 && arg)
12678 return NULL_TREE;
12679 str = fmt_str;
12680 }
12681
12682 /* If the string was "", printf does nothing. */
12683 if (str[0] == '\0')
12684 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12685
12686 /* If the string has length of 1, call putchar. */
12687 if (str[1] == '\0')
12688 {
12689 /* Given printf("c"), (where c is any one character,)
12690 convert "c"[0] to an int and pass that to the replacement
12691 function. */
12692 newarg = build_int_cst (NULL_TREE, str[0]);
12693 if (fn_putchar)
12694 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12695 }
12696 else
12697 {
12698 /* If the string was "string\n", call puts("string"). */
12699 size_t len = strlen (str);
12700 if ((unsigned char)str[len - 1] == target_newline)
12701 {
12702 /* Create a NUL-terminated string that's one char shorter
12703 than the original, stripping off the trailing '\n'. */
12704 char *newstr = XALLOCAVEC (char, len);
12705 memcpy (newstr, str, len - 1);
12706 newstr[len - 1] = 0;
12707
12708 newarg = build_string_literal (len, newstr);
12709 if (fn_puts)
12710 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12711 }
12712 else
12713 /* We'd like to arrange to call fputs(string,stdout) here,
12714 but we need stdout and don't have a way to get it yet. */
12715 return NULL_TREE;
12716 }
12717 }
12718
12719 /* The other optimizations can be done only on the non-va_list variants. */
12720 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12721 return NULL_TREE;
12722
12723 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12724 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12725 {
12726 if (!arg || !validate_arg (arg, POINTER_TYPE))
12727 return NULL_TREE;
12728 if (fn_puts)
12729 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12730 }
12731
12732 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12733 else if (strcmp (fmt_str, target_percent_c) == 0)
12734 {
12735 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12736 return NULL_TREE;
12737 if (fn_putchar)
12738 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12739 }
12740
12741 if (!call)
12742 return NULL_TREE;
12743
12744 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12745 }
12746
12747 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12748 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12749 more than 3 arguments, and ARG may be null in the 2-argument case.
12750
12751 Return NULL_TREE if no simplification was possible, otherwise return the
12752 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12753 code of the function to be simplified. */
12754
12755 static tree
12756 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12757 tree fmt, tree arg, bool ignore,
12758 enum built_in_function fcode)
12759 {
12760 tree fn_fputc, fn_fputs, call = NULL_TREE;
12761 const char *fmt_str = NULL;
12762
12763 /* If the return value is used, don't do the transformation. */
12764 if (! ignore)
12765 return NULL_TREE;
12766
12767 /* Verify the required arguments in the original call. */
12768 if (!validate_arg (fp, POINTER_TYPE))
12769 return NULL_TREE;
12770 if (!validate_arg (fmt, POINTER_TYPE))
12771 return NULL_TREE;
12772
12773 /* Check whether the format is a literal string constant. */
12774 fmt_str = c_getstr (fmt);
12775 if (fmt_str == NULL)
12776 return NULL_TREE;
12777
12778 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12779 {
12780 /* If we're using an unlocked function, assume the other
12781 unlocked functions exist explicitly. */
12782 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12783 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12784 }
12785 else
12786 {
12787 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12788 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12789 }
12790
12791 if (!init_target_chars ())
12792 return NULL_TREE;
12793
12794 /* If the format doesn't contain % args or %%, use strcpy. */
12795 if (strchr (fmt_str, target_percent) == NULL)
12796 {
12797 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12798 && arg)
12799 return NULL_TREE;
12800
12801 /* If the format specifier was "", fprintf does nothing. */
12802 if (fmt_str[0] == '\0')
12803 {
12804 /* If FP has side-effects, just wait until gimplification is
12805 done. */
12806 if (TREE_SIDE_EFFECTS (fp))
12807 return NULL_TREE;
12808
12809 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12810 }
12811
12812 /* When "string" doesn't contain %, replace all cases of
12813 fprintf (fp, string) with fputs (string, fp). The fputs
12814 builtin will take care of special cases like length == 1. */
12815 if (fn_fputs)
12816 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12817 }
12818
12819 /* The other optimizations can be done only on the non-va_list variants. */
12820 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12821 return NULL_TREE;
12822
12823 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12824 else if (strcmp (fmt_str, target_percent_s) == 0)
12825 {
12826 if (!arg || !validate_arg (arg, POINTER_TYPE))
12827 return NULL_TREE;
12828 if (fn_fputs)
12829 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12830 }
12831
12832 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12833 else if (strcmp (fmt_str, target_percent_c) == 0)
12834 {
12835 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12836 return NULL_TREE;
12837 if (fn_fputc)
12838 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12839 }
12840
12841 if (!call)
12842 return NULL_TREE;
12843 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12844 }
12845
12846 /* Initialize format string characters in the target charset. */
12847
12848 static bool
12849 init_target_chars (void)
12850 {
12851 static bool init;
12852 if (!init)
12853 {
12854 target_newline = lang_hooks.to_target_charset ('\n');
12855 target_percent = lang_hooks.to_target_charset ('%');
12856 target_c = lang_hooks.to_target_charset ('c');
12857 target_s = lang_hooks.to_target_charset ('s');
12858 if (target_newline == 0 || target_percent == 0 || target_c == 0
12859 || target_s == 0)
12860 return false;
12861
12862 target_percent_c[0] = target_percent;
12863 target_percent_c[1] = target_c;
12864 target_percent_c[2] = '\0';
12865
12866 target_percent_s[0] = target_percent;
12867 target_percent_s[1] = target_s;
12868 target_percent_s[2] = '\0';
12869
12870 target_percent_s_newline[0] = target_percent;
12871 target_percent_s_newline[1] = target_s;
12872 target_percent_s_newline[2] = target_newline;
12873 target_percent_s_newline[3] = '\0';
12874
12875 init = true;
12876 }
12877 return true;
12878 }
12879
12880 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12881 and no overflow/underflow occurred. INEXACT is true if M was not
12882 exactly calculated. TYPE is the tree type for the result. This
12883 function assumes that you cleared the MPFR flags and then
12884 calculated M to see if anything subsequently set a flag prior to
12885 entering this function. Return NULL_TREE if any checks fail. */
12886
12887 static tree
12888 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12889 {
12890 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12891 overflow/underflow occurred. If -frounding-math, proceed iff the
12892 result of calling FUNC was exact. */
12893 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12894 && (!flag_rounding_math || !inexact))
12895 {
12896 REAL_VALUE_TYPE rr;
12897
12898 real_from_mpfr (&rr, m, type, GMP_RNDN);
12899 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12900 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12901 but the mpft_t is not, then we underflowed in the
12902 conversion. */
12903 if (real_isfinite (&rr)
12904 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12905 {
12906 REAL_VALUE_TYPE rmode;
12907
12908 real_convert (&rmode, TYPE_MODE (type), &rr);
12909 /* Proceed iff the specified mode can hold the value. */
12910 if (real_identical (&rmode, &rr))
12911 return build_real (type, rmode);
12912 }
12913 }
12914 return NULL_TREE;
12915 }
12916
12917 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12918 number and no overflow/underflow occurred. INEXACT is true if M
12919 was not exactly calculated. TYPE is the tree type for the result.
12920 This function assumes that you cleared the MPFR flags and then
12921 calculated M to see if anything subsequently set a flag prior to
12922 entering this function. Return NULL_TREE if any checks fail, if
12923 FORCE_CONVERT is true, then bypass the checks. */
12924
12925 static tree
12926 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12927 {
12928 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12929 overflow/underflow occurred. If -frounding-math, proceed iff the
12930 result of calling FUNC was exact. */
12931 if (force_convert
12932 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12933 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12934 && (!flag_rounding_math || !inexact)))
12935 {
12936 REAL_VALUE_TYPE re, im;
12937
12938 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12939 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12940 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12941 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12942 but the mpft_t is not, then we underflowed in the
12943 conversion. */
12944 if (force_convert
12945 || (real_isfinite (&re) && real_isfinite (&im)
12946 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12947 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12948 {
12949 REAL_VALUE_TYPE re_mode, im_mode;
12950
12951 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12952 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12953 /* Proceed iff the specified mode can hold the value. */
12954 if (force_convert
12955 || (real_identical (&re_mode, &re)
12956 && real_identical (&im_mode, &im)))
12957 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12958 build_real (TREE_TYPE (type), im_mode));
12959 }
12960 }
12961 return NULL_TREE;
12962 }
12963
12964 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12965 FUNC on it and return the resulting value as a tree with type TYPE.
12966 If MIN and/or MAX are not NULL, then the supplied ARG must be
12967 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12968 acceptable values, otherwise they are not. The mpfr precision is
12969 set to the precision of TYPE. We assume that function FUNC returns
12970 zero if the result could be calculated exactly within the requested
12971 precision. */
12972
12973 static tree
12974 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12975 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12976 bool inclusive)
12977 {
12978 tree result = NULL_TREE;
12979
12980 STRIP_NOPS (arg);
12981
12982 /* To proceed, MPFR must exactly represent the target floating point
12983 format, which only happens when the target base equals two. */
12984 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12985 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12986 {
12987 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12988
12989 if (real_isfinite (ra)
12990 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12991 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12992 {
12993 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12994 const int prec = fmt->p;
12995 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12996 int inexact;
12997 mpfr_t m;
12998
12999 mpfr_init2 (m, prec);
13000 mpfr_from_real (m, ra, GMP_RNDN);
13001 mpfr_clear_flags ();
13002 inexact = func (m, m, rnd);
13003 result = do_mpfr_ckconv (m, type, inexact);
13004 mpfr_clear (m);
13005 }
13006 }
13007
13008 return result;
13009 }
13010
13011 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13012 FUNC on it and return the resulting value as a tree with type TYPE.
13013 The mpfr precision is set to the precision of TYPE. We assume that
13014 function FUNC returns zero if the result could be calculated
13015 exactly within the requested precision. */
13016
13017 static tree
13018 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13019 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13020 {
13021 tree result = NULL_TREE;
13022
13023 STRIP_NOPS (arg1);
13024 STRIP_NOPS (arg2);
13025
13026 /* To proceed, MPFR must exactly represent the target floating point
13027 format, which only happens when the target base equals two. */
13028 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13029 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13030 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13031 {
13032 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13033 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13034
13035 if (real_isfinite (ra1) && real_isfinite (ra2))
13036 {
13037 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13038 const int prec = fmt->p;
13039 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13040 int inexact;
13041 mpfr_t m1, m2;
13042
13043 mpfr_inits2 (prec, m1, m2, NULL);
13044 mpfr_from_real (m1, ra1, GMP_RNDN);
13045 mpfr_from_real (m2, ra2, GMP_RNDN);
13046 mpfr_clear_flags ();
13047 inexact = func (m1, m1, m2, rnd);
13048 result = do_mpfr_ckconv (m1, type, inexact);
13049 mpfr_clears (m1, m2, NULL);
13050 }
13051 }
13052
13053 return result;
13054 }
13055
13056 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13057 FUNC on it and return the resulting value as a tree with type TYPE.
13058 The mpfr precision is set to the precision of TYPE. We assume that
13059 function FUNC returns zero if the result could be calculated
13060 exactly within the requested precision. */
13061
13062 static tree
13063 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13064 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13065 {
13066 tree result = NULL_TREE;
13067
13068 STRIP_NOPS (arg1);
13069 STRIP_NOPS (arg2);
13070 STRIP_NOPS (arg3);
13071
13072 /* To proceed, MPFR must exactly represent the target floating point
13073 format, which only happens when the target base equals two. */
13074 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13075 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13076 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13077 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13078 {
13079 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13080 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13081 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13082
13083 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13084 {
13085 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13086 const int prec = fmt->p;
13087 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13088 int inexact;
13089 mpfr_t m1, m2, m3;
13090
13091 mpfr_inits2 (prec, m1, m2, m3, NULL);
13092 mpfr_from_real (m1, ra1, GMP_RNDN);
13093 mpfr_from_real (m2, ra2, GMP_RNDN);
13094 mpfr_from_real (m3, ra3, GMP_RNDN);
13095 mpfr_clear_flags ();
13096 inexact = func (m1, m1, m2, m3, rnd);
13097 result = do_mpfr_ckconv (m1, type, inexact);
13098 mpfr_clears (m1, m2, m3, NULL);
13099 }
13100 }
13101
13102 return result;
13103 }
13104
13105 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13106 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13107 If ARG_SINP and ARG_COSP are NULL then the result is returned
13108 as a complex value.
13109 The type is taken from the type of ARG and is used for setting the
13110 precision of the calculation and results. */
13111
13112 static tree
13113 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13114 {
13115 tree const type = TREE_TYPE (arg);
13116 tree result = NULL_TREE;
13117
13118 STRIP_NOPS (arg);
13119
13120 /* To proceed, MPFR must exactly represent the target floating point
13121 format, which only happens when the target base equals two. */
13122 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13123 && TREE_CODE (arg) == REAL_CST
13124 && !TREE_OVERFLOW (arg))
13125 {
13126 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13127
13128 if (real_isfinite (ra))
13129 {
13130 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13131 const int prec = fmt->p;
13132 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13133 tree result_s, result_c;
13134 int inexact;
13135 mpfr_t m, ms, mc;
13136
13137 mpfr_inits2 (prec, m, ms, mc, NULL);
13138 mpfr_from_real (m, ra, GMP_RNDN);
13139 mpfr_clear_flags ();
13140 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13141 result_s = do_mpfr_ckconv (ms, type, inexact);
13142 result_c = do_mpfr_ckconv (mc, type, inexact);
13143 mpfr_clears (m, ms, mc, NULL);
13144 if (result_s && result_c)
13145 {
13146 /* If we are to return in a complex value do so. */
13147 if (!arg_sinp && !arg_cosp)
13148 return build_complex (build_complex_type (type),
13149 result_c, result_s);
13150
13151 /* Dereference the sin/cos pointer arguments. */
13152 arg_sinp = build_fold_indirect_ref (arg_sinp);
13153 arg_cosp = build_fold_indirect_ref (arg_cosp);
13154 /* Proceed if valid pointer type were passed in. */
13155 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13156 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13157 {
13158 /* Set the values. */
13159 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13160 result_s);
13161 TREE_SIDE_EFFECTS (result_s) = 1;
13162 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13163 result_c);
13164 TREE_SIDE_EFFECTS (result_c) = 1;
13165 /* Combine the assignments into a compound expr. */
13166 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13167 result_s, result_c));
13168 }
13169 }
13170 }
13171 }
13172 return result;
13173 }
13174
13175 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13176 two-argument mpfr order N Bessel function FUNC on them and return
13177 the resulting value as a tree with type TYPE. The mpfr precision
13178 is set to the precision of TYPE. We assume that function FUNC
13179 returns zero if the result could be calculated exactly within the
13180 requested precision. */
13181 static tree
13182 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13183 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13184 const REAL_VALUE_TYPE *min, bool inclusive)
13185 {
13186 tree result = NULL_TREE;
13187
13188 STRIP_NOPS (arg1);
13189 STRIP_NOPS (arg2);
13190
13191 /* To proceed, MPFR must exactly represent the target floating point
13192 format, which only happens when the target base equals two. */
13193 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13194 && host_integerp (arg1, 0)
13195 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13196 {
13197 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13198 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13199
13200 if (n == (long)n
13201 && real_isfinite (ra)
13202 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13203 {
13204 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13205 const int prec = fmt->p;
13206 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13207 int inexact;
13208 mpfr_t m;
13209
13210 mpfr_init2 (m, prec);
13211 mpfr_from_real (m, ra, GMP_RNDN);
13212 mpfr_clear_flags ();
13213 inexact = func (m, n, m, rnd);
13214 result = do_mpfr_ckconv (m, type, inexact);
13215 mpfr_clear (m);
13216 }
13217 }
13218
13219 return result;
13220 }
13221
13222 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13223 the pointer *(ARG_QUO) and return the result. The type is taken
13224 from the type of ARG0 and is used for setting the precision of the
13225 calculation and results. */
13226
13227 static tree
13228 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13229 {
13230 tree const type = TREE_TYPE (arg0);
13231 tree result = NULL_TREE;
13232
13233 STRIP_NOPS (arg0);
13234 STRIP_NOPS (arg1);
13235
13236 /* To proceed, MPFR must exactly represent the target floating point
13237 format, which only happens when the target base equals two. */
13238 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13239 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13240 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13241 {
13242 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13243 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13244
13245 if (real_isfinite (ra0) && real_isfinite (ra1))
13246 {
13247 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13248 const int prec = fmt->p;
13249 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13250 tree result_rem;
13251 long integer_quo;
13252 mpfr_t m0, m1;
13253
13254 mpfr_inits2 (prec, m0, m1, NULL);
13255 mpfr_from_real (m0, ra0, GMP_RNDN);
13256 mpfr_from_real (m1, ra1, GMP_RNDN);
13257 mpfr_clear_flags ();
13258 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13259 /* Remquo is independent of the rounding mode, so pass
13260 inexact=0 to do_mpfr_ckconv(). */
13261 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13262 mpfr_clears (m0, m1, NULL);
13263 if (result_rem)
13264 {
13265 /* MPFR calculates quo in the host's long so it may
13266 return more bits in quo than the target int can hold
13267 if sizeof(host long) > sizeof(target int). This can
13268 happen even for native compilers in LP64 mode. In
13269 these cases, modulo the quo value with the largest
13270 number that the target int can hold while leaving one
13271 bit for the sign. */
13272 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13273 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13274
13275 /* Dereference the quo pointer argument. */
13276 arg_quo = build_fold_indirect_ref (arg_quo);
13277 /* Proceed iff a valid pointer type was passed in. */
13278 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13279 {
13280 /* Set the value. */
13281 tree result_quo = fold_build2 (MODIFY_EXPR,
13282 TREE_TYPE (arg_quo), arg_quo,
13283 build_int_cst (NULL, integer_quo));
13284 TREE_SIDE_EFFECTS (result_quo) = 1;
13285 /* Combine the quo assignment with the rem. */
13286 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13287 result_quo, result_rem));
13288 }
13289 }
13290 }
13291 }
13292 return result;
13293 }
13294
13295 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13296 resulting value as a tree with type TYPE. The mpfr precision is
13297 set to the precision of TYPE. We assume that this mpfr function
13298 returns zero if the result could be calculated exactly within the
13299 requested precision. In addition, the integer pointer represented
13300 by ARG_SG will be dereferenced and set to the appropriate signgam
13301 (-1,1) value. */
13302
13303 static tree
13304 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13305 {
13306 tree result = NULL_TREE;
13307
13308 STRIP_NOPS (arg);
13309
13310 /* To proceed, MPFR must exactly represent the target floating point
13311 format, which only happens when the target base equals two. Also
13312 verify ARG is a constant and that ARG_SG is an int pointer. */
13313 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13314 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13315 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13316 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13317 {
13318 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13319
13320 /* In addition to NaN and Inf, the argument cannot be zero or a
13321 negative integer. */
13322 if (real_isfinite (ra)
13323 && ra->cl != rvc_zero
13324 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13325 {
13326 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13327 const int prec = fmt->p;
13328 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13329 int inexact, sg;
13330 mpfr_t m;
13331 tree result_lg;
13332
13333 mpfr_init2 (m, prec);
13334 mpfr_from_real (m, ra, GMP_RNDN);
13335 mpfr_clear_flags ();
13336 inexact = mpfr_lgamma (m, &sg, m, rnd);
13337 result_lg = do_mpfr_ckconv (m, type, inexact);
13338 mpfr_clear (m);
13339 if (result_lg)
13340 {
13341 tree result_sg;
13342
13343 /* Dereference the arg_sg pointer argument. */
13344 arg_sg = build_fold_indirect_ref (arg_sg);
13345 /* Assign the signgam value into *arg_sg. */
13346 result_sg = fold_build2 (MODIFY_EXPR,
13347 TREE_TYPE (arg_sg), arg_sg,
13348 build_int_cst (NULL, sg));
13349 TREE_SIDE_EFFECTS (result_sg) = 1;
13350 /* Combine the signgam assignment with the lgamma result. */
13351 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13352 result_sg, result_lg));
13353 }
13354 }
13355 }
13356
13357 return result;
13358 }
13359
13360 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13361 function FUNC on it and return the resulting value as a tree with
13362 type TYPE. The mpfr precision is set to the precision of TYPE. We
13363 assume that function FUNC returns zero if the result could be
13364 calculated exactly within the requested precision. */
13365
13366 static tree
13367 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13368 {
13369 tree result = NULL_TREE;
13370
13371 STRIP_NOPS (arg);
13372
13373 /* To proceed, MPFR must exactly represent the target floating point
13374 format, which only happens when the target base equals two. */
13375 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13377 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13378 {
13379 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13380 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13381
13382 if (real_isfinite (re) && real_isfinite (im))
13383 {
13384 const struct real_format *const fmt =
13385 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13386 const int prec = fmt->p;
13387 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13388 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13389 int inexact;
13390 mpc_t m;
13391
13392 mpc_init2 (m, prec);
13393 mpfr_from_real (mpc_realref(m), re, rnd);
13394 mpfr_from_real (mpc_imagref(m), im, rnd);
13395 mpfr_clear_flags ();
13396 inexact = func (m, m, crnd);
13397 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13398 mpc_clear (m);
13399 }
13400 }
13401
13402 return result;
13403 }
13404
13405 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13406 mpc function FUNC on it and return the resulting value as a tree
13407 with type TYPE. The mpfr precision is set to the precision of
13408 TYPE. We assume that function FUNC returns zero if the result
13409 could be calculated exactly within the requested precision. If
13410 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13411 in the arguments and/or results. */
13412
13413 tree
13414 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13415 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13416 {
13417 tree result = NULL_TREE;
13418
13419 STRIP_NOPS (arg0);
13420 STRIP_NOPS (arg1);
13421
13422 /* To proceed, MPFR must exactly represent the target floating point
13423 format, which only happens when the target base equals two. */
13424 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13426 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13427 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13428 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13429 {
13430 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13431 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13432 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13433 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13434
13435 if (do_nonfinite
13436 || (real_isfinite (re0) && real_isfinite (im0)
13437 && real_isfinite (re1) && real_isfinite (im1)))
13438 {
13439 const struct real_format *const fmt =
13440 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13441 const int prec = fmt->p;
13442 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13443 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13444 int inexact;
13445 mpc_t m0, m1;
13446
13447 mpc_init2 (m0, prec);
13448 mpc_init2 (m1, prec);
13449 mpfr_from_real (mpc_realref(m0), re0, rnd);
13450 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13451 mpfr_from_real (mpc_realref(m1), re1, rnd);
13452 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13453 mpfr_clear_flags ();
13454 inexact = func (m0, m0, m1, crnd);
13455 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13456 mpc_clear (m0);
13457 mpc_clear (m1);
13458 }
13459 }
13460
13461 return result;
13462 }
13463
13464 /* FIXME tuples.
13465 The functions below provide an alternate interface for folding
13466 builtin function calls presented as GIMPLE_CALL statements rather
13467 than as CALL_EXPRs. The folded result is still expressed as a
13468 tree. There is too much code duplication in the handling of
13469 varargs functions, and a more intrusive re-factoring would permit
13470 better sharing of code between the tree and statement-based
13471 versions of these functions. */
13472
13473 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13474 along with N new arguments specified as the "..." parameters. SKIP
13475 is the number of arguments in STMT to be omitted. This function is used
13476 to do varargs-to-varargs transformations. */
13477
13478 static tree
13479 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13480 {
13481 int oldnargs = gimple_call_num_args (stmt);
13482 int nargs = oldnargs - skip + n;
13483 tree fntype = TREE_TYPE (fndecl);
13484 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13485 tree *buffer;
13486 int i, j;
13487 va_list ap;
13488 location_t loc = gimple_location (stmt);
13489
13490 buffer = XALLOCAVEC (tree, nargs);
13491 va_start (ap, n);
13492 for (i = 0; i < n; i++)
13493 buffer[i] = va_arg (ap, tree);
13494 va_end (ap);
13495 for (j = skip; j < oldnargs; j++, i++)
13496 buffer[i] = gimple_call_arg (stmt, j);
13497
13498 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13499 }
13500
13501 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13502 a normal call should be emitted rather than expanding the function
13503 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13504
13505 static tree
13506 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13507 {
13508 tree dest, size, len, fn, fmt, flag;
13509 const char *fmt_str;
13510 int nargs = gimple_call_num_args (stmt);
13511
13512 /* Verify the required arguments in the original call. */
13513 if (nargs < 4)
13514 return NULL_TREE;
13515 dest = gimple_call_arg (stmt, 0);
13516 if (!validate_arg (dest, POINTER_TYPE))
13517 return NULL_TREE;
13518 flag = gimple_call_arg (stmt, 1);
13519 if (!validate_arg (flag, INTEGER_TYPE))
13520 return NULL_TREE;
13521 size = gimple_call_arg (stmt, 2);
13522 if (!validate_arg (size, INTEGER_TYPE))
13523 return NULL_TREE;
13524 fmt = gimple_call_arg (stmt, 3);
13525 if (!validate_arg (fmt, POINTER_TYPE))
13526 return NULL_TREE;
13527
13528 if (! host_integerp (size, 1))
13529 return NULL_TREE;
13530
13531 len = NULL_TREE;
13532
13533 if (!init_target_chars ())
13534 return NULL_TREE;
13535
13536 /* Check whether the format is a literal string constant. */
13537 fmt_str = c_getstr (fmt);
13538 if (fmt_str != NULL)
13539 {
13540 /* If the format doesn't contain % args or %%, we know the size. */
13541 if (strchr (fmt_str, target_percent) == 0)
13542 {
13543 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13544 len = build_int_cstu (size_type_node, strlen (fmt_str));
13545 }
13546 /* If the format is "%s" and first ... argument is a string literal,
13547 we know the size too. */
13548 else if (fcode == BUILT_IN_SPRINTF_CHK
13549 && strcmp (fmt_str, target_percent_s) == 0)
13550 {
13551 tree arg;
13552
13553 if (nargs == 5)
13554 {
13555 arg = gimple_call_arg (stmt, 4);
13556 if (validate_arg (arg, POINTER_TYPE))
13557 {
13558 len = c_strlen (arg, 1);
13559 if (! len || ! host_integerp (len, 1))
13560 len = NULL_TREE;
13561 }
13562 }
13563 }
13564 }
13565
13566 if (! integer_all_onesp (size))
13567 {
13568 if (! len || ! tree_int_cst_lt (len, size))
13569 return NULL_TREE;
13570 }
13571
13572 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13573 or if format doesn't contain % chars or is "%s". */
13574 if (! integer_zerop (flag))
13575 {
13576 if (fmt_str == NULL)
13577 return NULL_TREE;
13578 if (strchr (fmt_str, target_percent) != NULL
13579 && strcmp (fmt_str, target_percent_s))
13580 return NULL_TREE;
13581 }
13582
13583 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13584 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13585 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13586 if (!fn)
13587 return NULL_TREE;
13588
13589 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13590 }
13591
13592 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13593 a normal call should be emitted rather than expanding the function
13594 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13595 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13596 passed as second argument. */
13597
13598 tree
13599 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13600 enum built_in_function fcode)
13601 {
13602 tree dest, size, len, fn, fmt, flag;
13603 const char *fmt_str;
13604
13605 /* Verify the required arguments in the original call. */
13606 if (gimple_call_num_args (stmt) < 5)
13607 return NULL_TREE;
13608 dest = gimple_call_arg (stmt, 0);
13609 if (!validate_arg (dest, POINTER_TYPE))
13610 return NULL_TREE;
13611 len = gimple_call_arg (stmt, 1);
13612 if (!validate_arg (len, INTEGER_TYPE))
13613 return NULL_TREE;
13614 flag = gimple_call_arg (stmt, 2);
13615 if (!validate_arg (flag, INTEGER_TYPE))
13616 return NULL_TREE;
13617 size = gimple_call_arg (stmt, 3);
13618 if (!validate_arg (size, INTEGER_TYPE))
13619 return NULL_TREE;
13620 fmt = gimple_call_arg (stmt, 4);
13621 if (!validate_arg (fmt, POINTER_TYPE))
13622 return NULL_TREE;
13623
13624 if (! host_integerp (size, 1))
13625 return NULL_TREE;
13626
13627 if (! integer_all_onesp (size))
13628 {
13629 if (! host_integerp (len, 1))
13630 {
13631 /* If LEN is not constant, try MAXLEN too.
13632 For MAXLEN only allow optimizing into non-_ocs function
13633 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13634 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13635 return NULL_TREE;
13636 }
13637 else
13638 maxlen = len;
13639
13640 if (tree_int_cst_lt (size, maxlen))
13641 return NULL_TREE;
13642 }
13643
13644 if (!init_target_chars ())
13645 return NULL_TREE;
13646
13647 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13648 or if format doesn't contain % chars or is "%s". */
13649 if (! integer_zerop (flag))
13650 {
13651 fmt_str = c_getstr (fmt);
13652 if (fmt_str == NULL)
13653 return NULL_TREE;
13654 if (strchr (fmt_str, target_percent) != NULL
13655 && strcmp (fmt_str, target_percent_s))
13656 return NULL_TREE;
13657 }
13658
13659 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13660 available. */
13661 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13662 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13663 if (!fn)
13664 return NULL_TREE;
13665
13666 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13667 }
13668
13669 /* Builtins with folding operations that operate on "..." arguments
13670 need special handling; we need to store the arguments in a convenient
13671 data structure before attempting any folding. Fortunately there are
13672 only a few builtins that fall into this category. FNDECL is the
13673 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13674 result of the function call is ignored. */
13675
13676 static tree
13677 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13678 bool ignore ATTRIBUTE_UNUSED)
13679 {
13680 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13681 tree ret = NULL_TREE;
13682
13683 switch (fcode)
13684 {
13685 case BUILT_IN_SPRINTF_CHK:
13686 case BUILT_IN_VSPRINTF_CHK:
13687 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13688 break;
13689
13690 case BUILT_IN_SNPRINTF_CHK:
13691 case BUILT_IN_VSNPRINTF_CHK:
13692 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13693
13694 default:
13695 break;
13696 }
13697 if (ret)
13698 {
13699 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13700 TREE_NO_WARNING (ret) = 1;
13701 return ret;
13702 }
13703 return NULL_TREE;
13704 }
13705
13706 /* A wrapper function for builtin folding that prevents warnings for
13707 "statement without effect" and the like, caused by removing the
13708 call node earlier than the warning is generated. */
13709
13710 tree
13711 fold_call_stmt (gimple stmt, bool ignore)
13712 {
13713 tree ret = NULL_TREE;
13714 tree fndecl = gimple_call_fndecl (stmt);
13715 location_t loc = gimple_location (stmt);
13716 if (fndecl
13717 && TREE_CODE (fndecl) == FUNCTION_DECL
13718 && DECL_BUILT_IN (fndecl)
13719 && !gimple_call_va_arg_pack_p (stmt))
13720 {
13721 int nargs = gimple_call_num_args (stmt);
13722
13723 if (avoid_folding_inline_builtin (fndecl))
13724 return NULL_TREE;
13725 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13726 {
13727 return targetm.fold_builtin (fndecl, nargs,
13728 (nargs > 0
13729 ? gimple_call_arg_ptr (stmt, 0)
13730 : &error_mark_node), ignore);
13731 }
13732 else
13733 {
13734 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13735 {
13736 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13737 int i;
13738 for (i = 0; i < nargs; i++)
13739 args[i] = gimple_call_arg (stmt, i);
13740 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13741 }
13742 if (!ret)
13743 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13744 if (ret)
13745 {
13746 /* Propagate location information from original call to
13747 expansion of builtin. Otherwise things like
13748 maybe_emit_chk_warning, that operate on the expansion
13749 of a builtin, will use the wrong location information. */
13750 if (gimple_has_location (stmt))
13751 {
13752 tree realret = ret;
13753 if (TREE_CODE (ret) == NOP_EXPR)
13754 realret = TREE_OPERAND (ret, 0);
13755 if (CAN_HAVE_LOCATION_P (realret)
13756 && !EXPR_HAS_LOCATION (realret))
13757 SET_EXPR_LOCATION (realret, loc);
13758 return realret;
13759 }
13760 return ret;
13761 }
13762 }
13763 }
13764 return NULL_TREE;
13765 }
13766
13767 /* Look up the function in built_in_decls that corresponds to DECL
13768 and set ASMSPEC as its user assembler name. DECL must be a
13769 function decl that declares a builtin. */
13770
13771 void
13772 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13773 {
13774 tree builtin;
13775 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13776 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13777 && asmspec != 0);
13778
13779 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13780 set_user_assembler_name (builtin, asmspec);
13781 switch (DECL_FUNCTION_CODE (decl))
13782 {
13783 case BUILT_IN_MEMCPY:
13784 init_block_move_fn (asmspec);
13785 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13786 break;
13787 case BUILT_IN_MEMSET:
13788 init_block_clear_fn (asmspec);
13789 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13790 break;
13791 case BUILT_IN_MEMMOVE:
13792 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13793 break;
13794 case BUILT_IN_MEMCMP:
13795 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13796 break;
13797 case BUILT_IN_ABORT:
13798 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13799 break;
13800 case BUILT_IN_FFS:
13801 if (INT_TYPE_SIZE < BITS_PER_WORD)
13802 {
13803 set_user_assembler_libfunc ("ffs", asmspec);
13804 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13805 MODE_INT, 0), "ffs");
13806 }
13807 break;
13808 default:
13809 break;
13810 }
13811 }