ed97d485167c60a43e9c4ddc95475bd3733f4d06
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
53
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
64
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
68
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
71 {
72 #include "builtins.def"
73 };
74 #undef DEF_BUILTIN
75
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
83
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list_loc (location_t, tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (location_t, tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (location_t, tree);
158 static tree fold_builtin_inf (location_t, tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (location_t, tree, tree);
168 static tree fold_builtin_cbrt (location_t, tree, tree);
169 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_cos (location_t, tree, tree, tree);
172 static tree fold_builtin_cosh (location_t, tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (location_t, tree, tree);
175 static tree fold_builtin_floor (location_t, tree, tree);
176 static tree fold_builtin_ceil (location_t, tree, tree);
177 static tree fold_builtin_round (location_t, tree, tree);
178 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (location_t, tree, tree, tree);
182 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
184 static tree fold_builtin_strcmp (location_t, tree, tree);
185 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
186 static tree fold_builtin_signbit (location_t, tree, tree);
187 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
188 static tree fold_builtin_isascii (location_t, tree);
189 static tree fold_builtin_toascii (location_t, tree);
190 static tree fold_builtin_isdigit (location_t, tree);
191 static tree fold_builtin_fabs (location_t, tree, tree);
192 static tree fold_builtin_abs (location_t, tree, tree);
193 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
194 enum tree_code);
195 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
196 static tree fold_builtin_0 (location_t, tree, bool);
197 static tree fold_builtin_1 (location_t, tree, tree, bool);
198 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
199 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (location_t, tree, tree, bool);
202
203 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
204 static tree fold_builtin_strstr (location_t, tree, tree, tree);
205 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
206 static tree fold_builtin_strcat (location_t, tree, tree);
207 static tree fold_builtin_strncat (location_t, tree, tree, tree);
208 static tree fold_builtin_strspn (location_t, tree, tree);
209 static tree fold_builtin_strcspn (location_t, tree, tree);
210 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
211
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
222 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
226
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
246
247 bool
248 is_builtin_name (const char *name)
249 {
250 if (strncmp (name, "__builtin_", 10) == 0)
251 return true;
252 if (strncmp (name, "__sync_", 7) == 0)
253 return true;
254 return false;
255 }
256
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
260
261 static bool
262 called_as_built_in (tree node)
263 {
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
266 will have. */
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
269 }
270
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
274
275 int
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
277 {
278 unsigned int inner;
279
280 inner = max_align;
281 if (handled_component_p (exp))
282 {
283 HOST_WIDE_INT bitsize, bitpos;
284 tree offset;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
287
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
290 if (bitpos)
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
292 while (offset)
293 {
294 tree next_offset;
295
296 if (TREE_CODE (offset) == PLUS_EXPR)
297 {
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
300 }
301 else
302 next_offset = NULL;
303 if (host_integerp (offset, 1))
304 {
305 /* Any overflow in calculating offset_bits won't change
306 the alignment. */
307 unsigned offset_bits
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
309
310 if (offset_bits)
311 inner = MIN (inner, (offset_bits & -offset_bits));
312 }
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
315 {
316 /* Any overflow in calculating offset_factor won't change
317 the alignment. */
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 * BITS_PER_UNIT);
321
322 if (offset_factor)
323 inner = MIN (inner, (offset_factor & -offset_factor));
324 }
325 else
326 {
327 inner = MIN (inner, BITS_PER_UNIT);
328 break;
329 }
330 offset = next_offset;
331 }
332 }
333 if (DECL_P (exp))
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
338 #endif
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
342 else
343 align = MIN (align, inner);
344 return MIN (align, max_align);
345 }
346
347 /* Returns true iff we can trust that alignment information has been
348 calculated properly. */
349
350 bool
351 can_trust_pointer_alignment (void)
352 {
353 /* We rely on TER to compute accurate alignment information. */
354 return (optimize && flag_tree_ter);
355 }
356
357 /* Return the alignment in bits of EXP, a pointer valued expression.
358 But don't return more than MAX_ALIGN no matter what.
359 The alignment returned is, by default, the alignment of the thing that
360 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
361
362 Otherwise, look at the expression to see if we can do better, i.e., if the
363 expression is actually pointing at an object whose alignment is tighter. */
364
365 int
366 get_pointer_alignment (tree exp, unsigned int max_align)
367 {
368 unsigned int align, inner;
369
370 if (!can_trust_pointer_alignment ())
371 return 0;
372
373 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
374 return 0;
375
376 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (align, max_align);
378
379 while (1)
380 {
381 switch (TREE_CODE (exp))
382 {
383 CASE_CONVERT:
384 exp = TREE_OPERAND (exp, 0);
385 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
386 return align;
387
388 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
389 align = MIN (inner, max_align);
390 break;
391
392 case POINTER_PLUS_EXPR:
393 /* If sum of pointer + int, restrict our maximum alignment to that
394 imposed by the integer. If not, we can't do any better than
395 ALIGN. */
396 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
397 return align;
398
399 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
400 & (max_align / BITS_PER_UNIT - 1))
401 != 0)
402 max_align >>= 1;
403
404 exp = TREE_OPERAND (exp, 0);
405 break;
406
407 case ADDR_EXPR:
408 /* See what we are pointing at and look at its alignment. */
409 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
410
411 default:
412 return align;
413 }
414 }
415 }
416
417 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
418 way, because it could contain a zero byte in the middle.
419 TREE_STRING_LENGTH is the size of the character array, not the string.
420
421 ONLY_VALUE should be nonzero if the result is not going to be emitted
422 into the instruction stream and zero if it is going to be expanded.
423 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
424 is returned, otherwise NULL, since
425 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
426 evaluate the side-effects.
427
428 The value returned is of type `ssizetype'.
429
430 Unfortunately, string_constant can't access the values of const char
431 arrays with initializers, so neither can we do so here. */
432
433 tree
434 c_strlen (tree src, int only_value)
435 {
436 tree offset_node;
437 HOST_WIDE_INT offset;
438 int max;
439 const char *ptr;
440
441 STRIP_NOPS (src);
442 if (TREE_CODE (src) == COND_EXPR
443 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
444 {
445 tree len1, len2;
446
447 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
448 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
449 if (tree_int_cst_equal (len1, len2))
450 return len1;
451 }
452
453 if (TREE_CODE (src) == COMPOUND_EXPR
454 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
455 return c_strlen (TREE_OPERAND (src, 1), only_value);
456
457 src = string_constant (src, &offset_node);
458 if (src == 0)
459 return NULL_TREE;
460
461 max = TREE_STRING_LENGTH (src) - 1;
462 ptr = TREE_STRING_POINTER (src);
463
464 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
465 {
466 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
467 compute the offset to the following null if we don't know where to
468 start searching for it. */
469 int i;
470
471 for (i = 0; i < max; i++)
472 if (ptr[i] == 0)
473 return NULL_TREE;
474
475 /* We don't know the starting offset, but we do know that the string
476 has no internal zero bytes. We can assume that the offset falls
477 within the bounds of the string; otherwise, the programmer deserves
478 what he gets. Subtract the offset from the length of the string,
479 and return that. This would perhaps not be valid if we were dealing
480 with named arrays in addition to literal string constants. */
481
482 return size_diffop_loc (input_location, size_int (max), offset_node);
483 }
484
485 /* We have a known offset into the string. Start searching there for
486 a null character if we can represent it as a single HOST_WIDE_INT. */
487 if (offset_node == 0)
488 offset = 0;
489 else if (! host_integerp (offset_node, 0))
490 offset = -1;
491 else
492 offset = tree_low_cst (offset_node, 0);
493
494 /* If the offset is known to be out of bounds, warn, and call strlen at
495 runtime. */
496 if (offset < 0 || offset > max)
497 {
498 /* Suppress multiple warnings for propagated constant strings. */
499 if (! TREE_NO_WARNING (src))
500 {
501 warning (0, "offset outside bounds of constant string");
502 TREE_NO_WARNING (src) = 1;
503 }
504 return NULL_TREE;
505 }
506
507 /* Use strlen to search for the first zero byte. Since any strings
508 constructed with build_string will have nulls appended, we win even
509 if we get handed something like (char[4])"abcd".
510
511 Since OFFSET is our starting index into the string, no further
512 calculation is needed. */
513 return ssize_int (strlen (ptr + offset));
514 }
515
516 /* Return a char pointer for a C string if it is a string constant
517 or sum of string constant and integer constant. */
518
519 static const char *
520 c_getstr (tree src)
521 {
522 tree offset_node;
523
524 src = string_constant (src, &offset_node);
525 if (src == 0)
526 return 0;
527
528 if (offset_node == 0)
529 return TREE_STRING_POINTER (src);
530 else if (!host_integerp (offset_node, 1)
531 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
532 return 0;
533
534 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
535 }
536
537 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
538 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
539
540 static rtx
541 c_readstr (const char *str, enum machine_mode mode)
542 {
543 HOST_WIDE_INT c[2];
544 HOST_WIDE_INT ch;
545 unsigned int i, j;
546
547 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
548
549 c[0] = 0;
550 c[1] = 0;
551 ch = 1;
552 for (i = 0; i < GET_MODE_SIZE (mode); i++)
553 {
554 j = i;
555 if (WORDS_BIG_ENDIAN)
556 j = GET_MODE_SIZE (mode) - i - 1;
557 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
558 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
559 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
560 j *= BITS_PER_UNIT;
561 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
562
563 if (ch)
564 ch = (unsigned char) str[i];
565 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
566 }
567 return immed_double_const (c[0], c[1], mode);
568 }
569
570 /* Cast a target constant CST to target CHAR and if that value fits into
571 host char type, return zero and put that value into variable pointed to by
572 P. */
573
574 static int
575 target_char_cast (tree cst, char *p)
576 {
577 unsigned HOST_WIDE_INT val, hostval;
578
579 if (!host_integerp (cst, 1)
580 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
581 return 1;
582
583 val = tree_low_cst (cst, 1);
584 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
585 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
586
587 hostval = val;
588 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
589 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
590
591 if (val != hostval)
592 return 1;
593
594 *p = hostval;
595 return 0;
596 }
597
598 /* Similar to save_expr, but assumes that arbitrary code is not executed
599 in between the multiple evaluations. In particular, we assume that a
600 non-addressable local variable will not be modified. */
601
602 static tree
603 builtin_save_expr (tree exp)
604 {
605 if (TREE_ADDRESSABLE (exp) == 0
606 && (TREE_CODE (exp) == PARM_DECL
607 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
608 return exp;
609
610 return save_expr (exp);
611 }
612
613 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
614 times to get the address of either a higher stack frame, or a return
615 address located within it (depending on FNDECL_CODE). */
616
617 static rtx
618 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
619 {
620 int i;
621
622 #ifdef INITIAL_FRAME_ADDRESS_RTX
623 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
624 #else
625 rtx tem;
626
627 /* For a zero count with __builtin_return_address, we don't care what
628 frame address we return, because target-specific definitions will
629 override us. Therefore frame pointer elimination is OK, and using
630 the soft frame pointer is OK.
631
632 For a nonzero count, or a zero count with __builtin_frame_address,
633 we require a stable offset from the current frame pointer to the
634 previous one, so we must use the hard frame pointer, and
635 we must disable frame pointer elimination. */
636 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
637 tem = frame_pointer_rtx;
638 else
639 {
640 tem = hard_frame_pointer_rtx;
641
642 /* Tell reload not to eliminate the frame pointer. */
643 crtl->accesses_prior_frames = 1;
644 }
645 #endif
646
647 /* Some machines need special handling before we can access
648 arbitrary frames. For example, on the SPARC, we must first flush
649 all register windows to the stack. */
650 #ifdef SETUP_FRAME_ADDRESSES
651 if (count > 0)
652 SETUP_FRAME_ADDRESSES ();
653 #endif
654
655 /* On the SPARC, the return address is not in the frame, it is in a
656 register. There is no way to access it off of the current frame
657 pointer, but it can be accessed off the previous frame pointer by
658 reading the value from the register window save area. */
659 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
660 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
661 count--;
662 #endif
663
664 /* Scan back COUNT frames to the specified frame. */
665 for (i = 0; i < count; i++)
666 {
667 /* Assume the dynamic chain pointer is in the word that the
668 frame address points to, unless otherwise specified. */
669 #ifdef DYNAMIC_CHAIN_ADDRESS
670 tem = DYNAMIC_CHAIN_ADDRESS (tem);
671 #endif
672 tem = memory_address (Pmode, tem);
673 tem = gen_frame_mem (Pmode, tem);
674 tem = copy_to_reg (tem);
675 }
676
677 /* For __builtin_frame_address, return what we've got. But, on
678 the SPARC for example, we may have to add a bias. */
679 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
680 #ifdef FRAME_ADDR_RTX
681 return FRAME_ADDR_RTX (tem);
682 #else
683 return tem;
684 #endif
685
686 /* For __builtin_return_address, get the return address from that frame. */
687 #ifdef RETURN_ADDR_RTX
688 tem = RETURN_ADDR_RTX (count, tem);
689 #else
690 tem = memory_address (Pmode,
691 plus_constant (tem, GET_MODE_SIZE (Pmode)));
692 tem = gen_frame_mem (Pmode, tem);
693 #endif
694 return tem;
695 }
696
697 /* Alias set used for setjmp buffer. */
698 static alias_set_type setjmp_alias_set = -1;
699
700 /* Construct the leading half of a __builtin_setjmp call. Control will
701 return to RECEIVER_LABEL. This is also called directly by the SJLJ
702 exception handling code. */
703
704 void
705 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
706 {
707 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
708 rtx stack_save;
709 rtx mem;
710
711 if (setjmp_alias_set == -1)
712 setjmp_alias_set = new_alias_set ();
713
714 buf_addr = convert_memory_address (Pmode, buf_addr);
715
716 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
717
718 /* We store the frame pointer and the address of receiver_label in
719 the buffer and use the rest of it for the stack save area, which
720 is machine-dependent. */
721
722 mem = gen_rtx_MEM (Pmode, buf_addr);
723 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
725
726 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
727 set_mem_alias_set (mem, setjmp_alias_set);
728
729 emit_move_insn (validize_mem (mem),
730 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
731
732 stack_save = gen_rtx_MEM (sa_mode,
733 plus_constant (buf_addr,
734 2 * GET_MODE_SIZE (Pmode)));
735 set_mem_alias_set (stack_save, setjmp_alias_set);
736 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
737
738 /* If there is further processing to do, do it. */
739 #ifdef HAVE_builtin_setjmp_setup
740 if (HAVE_builtin_setjmp_setup)
741 emit_insn (gen_builtin_setjmp_setup (buf_addr));
742 #endif
743
744 /* Tell optimize_save_area_alloca that extra work is going to
745 need to go on during alloca. */
746 cfun->calls_setjmp = 1;
747
748 /* We have a nonlocal label. */
749 cfun->has_nonlocal_label = 1;
750 }
751
752 /* Construct the trailing part of a __builtin_setjmp call. This is
753 also called directly by the SJLJ exception handling code. */
754
755 void
756 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
757 {
758 /* Clobber the FP when we get here, so we have to make sure it's
759 marked as used by this function. */
760 emit_use (hard_frame_pointer_rtx);
761
762 /* Mark the static chain as clobbered here so life information
763 doesn't get messed up for it. */
764 emit_clobber (static_chain_rtx);
765
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
770 #endif
771 {
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
776 }
777
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
780 {
781 #ifdef ELIMINABLE_REGS
782 size_t i;
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
784
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
788 break;
789
790 if (i == ARRAY_SIZE (elim_regs))
791 #endif
792 {
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
797 }
798 }
799 #endif
800
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
804 else
805 #endif
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
809 else
810 #endif
811 { /* Nothing */ }
812
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
817 }
818
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
823
824 static void
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
826 {
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829
830 /* DRAP is needed for stack realign if longjmp is expanded to current
831 function */
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
834
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
837
838 buf_addr = convert_memory_address (Pmode, buf_addr);
839
840 buf_addr = force_reg (Pmode, buf_addr);
841
842 /* We used to store value in static_chain_rtx, but that fails if pointers
843 are smaller than integers. We instead require that the user must pass
844 a second argument of 1, because that is what builtin_setjmp will
845 return. This also makes EH slightly more efficient, since we are no
846 longer copying around a value that we don't care about. */
847 gcc_assert (value == const1_rtx);
848
849 last = get_last_insn ();
850 #ifdef HAVE_builtin_longjmp
851 if (HAVE_builtin_longjmp)
852 emit_insn (gen_builtin_longjmp (buf_addr));
853 else
854 #endif
855 {
856 fp = gen_rtx_MEM (Pmode, buf_addr);
857 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
858 GET_MODE_SIZE (Pmode)));
859
860 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
861 2 * GET_MODE_SIZE (Pmode)));
862 set_mem_alias_set (fp, setjmp_alias_set);
863 set_mem_alias_set (lab, setjmp_alias_set);
864 set_mem_alias_set (stack, setjmp_alias_set);
865
866 /* Pick up FP, label, and SP from the block and jump. This code is
867 from expand_goto in stmt.c; see there for detailed comments. */
868 #ifdef HAVE_nonlocal_goto
869 if (HAVE_nonlocal_goto)
870 /* We have to pass a value to the nonlocal_goto pattern that will
871 get copied into the static_chain pointer, but it does not matter
872 what that value is, because builtin_setjmp does not use it. */
873 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
874 else
875 #endif
876 {
877 lab = copy_to_reg (lab);
878
879 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
880 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
881
882 emit_move_insn (hard_frame_pointer_rtx, fp);
883 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
884
885 emit_use (hard_frame_pointer_rtx);
886 emit_use (stack_pointer_rtx);
887 emit_indirect_jump (lab);
888 }
889 }
890
891 /* Search backwards and mark the jump insn as a non-local goto.
892 Note that this precludes the use of __builtin_longjmp to a
893 __builtin_setjmp target in the same function. However, we've
894 already cautioned the user that these functions are for
895 internal exception handling use only. */
896 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
897 {
898 gcc_assert (insn != last);
899
900 if (JUMP_P (insn))
901 {
902 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
903 break;
904 }
905 else if (CALL_P (insn))
906 break;
907 }
908 }
909
910 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
911 and the address of the save area. */
912
913 static rtx
914 expand_builtin_nonlocal_goto (tree exp)
915 {
916 tree t_label, t_save_area;
917 rtx r_label, r_save_area, r_fp, r_sp, insn;
918
919 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
920 return NULL_RTX;
921
922 t_label = CALL_EXPR_ARG (exp, 0);
923 t_save_area = CALL_EXPR_ARG (exp, 1);
924
925 r_label = expand_normal (t_label);
926 r_label = convert_memory_address (Pmode, r_label);
927 r_save_area = expand_normal (t_save_area);
928 r_save_area = convert_memory_address (Pmode, r_save_area);
929 /* Copy the address of the save location to a register just in case it was based
930 on the frame pointer. */
931 r_save_area = copy_to_reg (r_save_area);
932 r_fp = gen_rtx_MEM (Pmode, r_save_area);
933 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
934 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
935
936 crtl->has_nonlocal_goto = 1;
937
938 #ifdef HAVE_nonlocal_goto
939 /* ??? We no longer need to pass the static chain value, afaik. */
940 if (HAVE_nonlocal_goto)
941 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
942 else
943 #endif
944 {
945 r_label = copy_to_reg (r_label);
946
947 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
948 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
949
950 /* Restore frame pointer for containing function.
951 This sets the actual hard register used for the frame pointer
952 to the location of the function's incoming static chain info.
953 The non-local goto handler will then adjust it to contain the
954 proper value and reload the argument pointer, if needed. */
955 emit_move_insn (hard_frame_pointer_rtx, r_fp);
956 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
957
958 /* USE of hard_frame_pointer_rtx added for consistency;
959 not clear if really needed. */
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
962
963 /* If the architecture is using a GP register, we must
964 conservatively assume that the target function makes use of it.
965 The prologue of functions with nonlocal gotos must therefore
966 initialize the GP register to the appropriate value, and we
967 must then make sure that this value is live at the point
968 of the jump. (Note that this doesn't necessarily apply
969 to targets with a nonlocal_goto pattern; they are free
970 to implement it in their own way. Note also that this is
971 a no-op if the GP register is a global invariant.) */
972 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
973 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
974 emit_use (pic_offset_table_rtx);
975
976 emit_indirect_jump (r_label);
977 }
978
979 /* Search backwards to the jump insn and mark it as a
980 non-local goto. */
981 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
982 {
983 if (JUMP_P (insn))
984 {
985 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
986 break;
987 }
988 else if (CALL_P (insn))
989 break;
990 }
991
992 return const0_rtx;
993 }
994
995 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
996 (not all will be used on all machines) that was passed to __builtin_setjmp.
997 It updates the stack pointer in that block to correspond to the current
998 stack pointer. */
999
1000 static void
1001 expand_builtin_update_setjmp_buf (rtx buf_addr)
1002 {
1003 enum machine_mode sa_mode = Pmode;
1004 rtx stack_save;
1005
1006
1007 #ifdef HAVE_save_stack_nonlocal
1008 if (HAVE_save_stack_nonlocal)
1009 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1010 #endif
1011 #ifdef STACK_SAVEAREA_MODE
1012 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1013 #endif
1014
1015 stack_save
1016 = gen_rtx_MEM (sa_mode,
1017 memory_address
1018 (sa_mode,
1019 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1020
1021 #ifdef HAVE_setjmp
1022 if (HAVE_setjmp)
1023 emit_insn (gen_setjmp ());
1024 #endif
1025
1026 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1027 }
1028
1029 /* Expand a call to __builtin_prefetch. For a target that does not support
1030 data prefetch, evaluate the memory address argument in case it has side
1031 effects. */
1032
1033 static void
1034 expand_builtin_prefetch (tree exp)
1035 {
1036 tree arg0, arg1, arg2;
1037 int nargs;
1038 rtx op0, op1, op2;
1039
1040 if (!validate_arglist (exp, POINTER_TYPE, 0))
1041 return;
1042
1043 arg0 = CALL_EXPR_ARG (exp, 0);
1044
1045 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1046 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1047 locality). */
1048 nargs = call_expr_nargs (exp);
1049 if (nargs > 1)
1050 arg1 = CALL_EXPR_ARG (exp, 1);
1051 else
1052 arg1 = integer_zero_node;
1053 if (nargs > 2)
1054 arg2 = CALL_EXPR_ARG (exp, 2);
1055 else
1056 arg2 = build_int_cst (NULL_TREE, 3);
1057
1058 /* Argument 0 is an address. */
1059 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1060
1061 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1062 if (TREE_CODE (arg1) != INTEGER_CST)
1063 {
1064 error ("second argument to %<__builtin_prefetch%> must be a constant");
1065 arg1 = integer_zero_node;
1066 }
1067 op1 = expand_normal (arg1);
1068 /* Argument 1 must be either zero or one. */
1069 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1070 {
1071 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1072 " using zero");
1073 op1 = const0_rtx;
1074 }
1075
1076 /* Argument 2 (locality) must be a compile-time constant int. */
1077 if (TREE_CODE (arg2) != INTEGER_CST)
1078 {
1079 error ("third argument to %<__builtin_prefetch%> must be a constant");
1080 arg2 = integer_zero_node;
1081 }
1082 op2 = expand_normal (arg2);
1083 /* Argument 2 must be 0, 1, 2, or 3. */
1084 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1085 {
1086 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1087 op2 = const0_rtx;
1088 }
1089
1090 #ifdef HAVE_prefetch
1091 if (HAVE_prefetch)
1092 {
1093 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1094 (op0,
1095 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1096 || (GET_MODE (op0) != Pmode))
1097 {
1098 op0 = convert_memory_address (Pmode, op0);
1099 op0 = force_reg (Pmode, op0);
1100 }
1101 emit_insn (gen_prefetch (op0, op1, op2));
1102 }
1103 #endif
1104
1105 /* Don't do anything with direct references to volatile memory, but
1106 generate code to handle other side effects. */
1107 if (!MEM_P (op0) && side_effects_p (op0))
1108 emit_insn (op0);
1109 }
1110
1111 /* Get a MEM rtx for expression EXP which is the address of an operand
1112 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1113 the maximum length of the block of memory that might be accessed or
1114 NULL if unknown. */
1115
1116 static rtx
1117 get_memory_rtx (tree exp, tree len)
1118 {
1119 tree orig_exp = exp;
1120 rtx addr, mem;
1121 HOST_WIDE_INT off;
1122
1123 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1124 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1125 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1126 exp = TREE_OPERAND (exp, 0);
1127
1128 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1129 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1130
1131 /* Get an expression we can use to find the attributes to assign to MEM.
1132 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1133 we can. First remove any nops. */
1134 while (CONVERT_EXPR_P (exp)
1135 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1136 exp = TREE_OPERAND (exp, 0);
1137
1138 off = 0;
1139 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1140 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1141 && host_integerp (TREE_OPERAND (exp, 1), 0)
1142 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1143 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1144 else if (TREE_CODE (exp) == ADDR_EXPR)
1145 exp = TREE_OPERAND (exp, 0);
1146 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1147 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1148 else
1149 exp = NULL;
1150
1151 /* Honor attributes derived from exp, except for the alias set
1152 (as builtin stringops may alias with anything) and the size
1153 (as stringops may access multiple array elements). */
1154 if (exp)
1155 {
1156 set_mem_attributes (mem, exp, 0);
1157
1158 if (off)
1159 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1160
1161 /* Allow the string and memory builtins to overflow from one
1162 field into another, see http://gcc.gnu.org/PR23561.
1163 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1164 memory accessed by the string or memory builtin will fit
1165 within the field. */
1166 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1167 {
1168 tree mem_expr = MEM_EXPR (mem);
1169 HOST_WIDE_INT offset = -1, length = -1;
1170 tree inner = exp;
1171
1172 while (TREE_CODE (inner) == ARRAY_REF
1173 || CONVERT_EXPR_P (inner)
1174 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1175 || TREE_CODE (inner) == SAVE_EXPR)
1176 inner = TREE_OPERAND (inner, 0);
1177
1178 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1179
1180 if (MEM_OFFSET (mem)
1181 && CONST_INT_P (MEM_OFFSET (mem)))
1182 offset = INTVAL (MEM_OFFSET (mem));
1183
1184 if (offset >= 0 && len && host_integerp (len, 0))
1185 length = tree_low_cst (len, 0);
1186
1187 while (TREE_CODE (inner) == COMPONENT_REF)
1188 {
1189 tree field = TREE_OPERAND (inner, 1);
1190 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1191 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1192
1193 /* Bitfields are generally not byte-addressable. */
1194 gcc_assert (!DECL_BIT_FIELD (field)
1195 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1196 % BITS_PER_UNIT) == 0
1197 && host_integerp (DECL_SIZE (field), 0)
1198 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1199 % BITS_PER_UNIT) == 0));
1200
1201 /* If we can prove that the memory starting at XEXP (mem, 0) and
1202 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1203 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1204 fields without DECL_SIZE_UNIT like flexible array members. */
1205 if (length >= 0
1206 && DECL_SIZE_UNIT (field)
1207 && host_integerp (DECL_SIZE_UNIT (field), 0))
1208 {
1209 HOST_WIDE_INT size
1210 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1211 if (offset <= size
1212 && length <= size
1213 && offset + length <= size)
1214 break;
1215 }
1216
1217 if (offset >= 0
1218 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1219 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1220 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1221 / BITS_PER_UNIT;
1222 else
1223 {
1224 offset = -1;
1225 length = -1;
1226 }
1227
1228 mem_expr = TREE_OPERAND (mem_expr, 0);
1229 inner = TREE_OPERAND (inner, 0);
1230 }
1231
1232 if (mem_expr == NULL)
1233 offset = -1;
1234 if (mem_expr != MEM_EXPR (mem))
1235 {
1236 set_mem_expr (mem, mem_expr);
1237 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1238 }
1239 }
1240 set_mem_alias_set (mem, 0);
1241 set_mem_size (mem, NULL_RTX);
1242 }
1243
1244 return mem;
1245 }
1246 \f
1247 /* Built-in functions to perform an untyped call and return. */
1248
1249 /* For each register that may be used for calling a function, this
1250 gives a mode used to copy the register's value. VOIDmode indicates
1251 the register is not used for calling a function. If the machine
1252 has register windows, this gives only the outbound registers.
1253 INCOMING_REGNO gives the corresponding inbound register. */
1254 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1255
1256 /* For each register that may be used for returning values, this gives
1257 a mode used to copy the register's value. VOIDmode indicates the
1258 register is not used for returning values. If the machine has
1259 register windows, this gives only the outbound registers.
1260 INCOMING_REGNO gives the corresponding inbound register. */
1261 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1262
1263 /* For each register that may be used for calling a function, this
1264 gives the offset of that register into the block returned by
1265 __builtin_apply_args. 0 indicates that the register is not
1266 used for calling a function. */
1267 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1268
1269 /* Return the size required for the block returned by __builtin_apply_args,
1270 and initialize apply_args_mode. */
1271
1272 static int
1273 apply_args_size (void)
1274 {
1275 static int size = -1;
1276 int align;
1277 unsigned int regno;
1278 enum machine_mode mode;
1279
1280 /* The values computed by this function never change. */
1281 if (size < 0)
1282 {
1283 /* The first value is the incoming arg-pointer. */
1284 size = GET_MODE_SIZE (Pmode);
1285
1286 /* The second value is the structure value address unless this is
1287 passed as an "invisible" first argument. */
1288 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1289 size += GET_MODE_SIZE (Pmode);
1290
1291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1292 if (FUNCTION_ARG_REGNO_P (regno))
1293 {
1294 mode = reg_raw_mode[regno];
1295
1296 gcc_assert (mode != VOIDmode);
1297
1298 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1299 if (size % align != 0)
1300 size = CEIL (size, align) * align;
1301 apply_args_reg_offset[regno] = size;
1302 size += GET_MODE_SIZE (mode);
1303 apply_args_mode[regno] = mode;
1304 }
1305 else
1306 {
1307 apply_args_mode[regno] = VOIDmode;
1308 apply_args_reg_offset[regno] = 0;
1309 }
1310 }
1311 return size;
1312 }
1313
1314 /* Return the size required for the block returned by __builtin_apply,
1315 and initialize apply_result_mode. */
1316
1317 static int
1318 apply_result_size (void)
1319 {
1320 static int size = -1;
1321 int align, regno;
1322 enum machine_mode mode;
1323
1324 /* The values computed by this function never change. */
1325 if (size < 0)
1326 {
1327 size = 0;
1328
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if (FUNCTION_VALUE_REGNO_P (regno))
1331 {
1332 mode = reg_raw_mode[regno];
1333
1334 gcc_assert (mode != VOIDmode);
1335
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1339 size += GET_MODE_SIZE (mode);
1340 apply_result_mode[regno] = mode;
1341 }
1342 else
1343 apply_result_mode[regno] = VOIDmode;
1344
1345 /* Allow targets that use untyped_call and untyped_return to override
1346 the size so that machine-specific information can be stored here. */
1347 #ifdef APPLY_RESULT_SIZE
1348 size = APPLY_RESULT_SIZE;
1349 #endif
1350 }
1351 return size;
1352 }
1353
1354 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1355 /* Create a vector describing the result block RESULT. If SAVEP is true,
1356 the result block is used to save the values; otherwise it is used to
1357 restore the values. */
1358
1359 static rtx
1360 result_vector (int savep, rtx result)
1361 {
1362 int regno, size, align, nelts;
1363 enum machine_mode mode;
1364 rtx reg, mem;
1365 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1366
1367 size = nelts = 0;
1368 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1369 if ((mode = apply_result_mode[regno]) != VOIDmode)
1370 {
1371 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1372 if (size % align != 0)
1373 size = CEIL (size, align) * align;
1374 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1375 mem = adjust_address (result, mode, size);
1376 savevec[nelts++] = (savep
1377 ? gen_rtx_SET (VOIDmode, mem, reg)
1378 : gen_rtx_SET (VOIDmode, reg, mem));
1379 size += GET_MODE_SIZE (mode);
1380 }
1381 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1382 }
1383 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1384
1385 /* Save the state required to perform an untyped call with the same
1386 arguments as were passed to the current function. */
1387
1388 static rtx
1389 expand_builtin_apply_args_1 (void)
1390 {
1391 rtx registers, tem;
1392 int size, align, regno;
1393 enum machine_mode mode;
1394 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1395
1396 /* Create a block where the arg-pointer, structure value address,
1397 and argument registers can be saved. */
1398 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1399
1400 /* Walk past the arg-pointer and structure value address. */
1401 size = GET_MODE_SIZE (Pmode);
1402 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1403 size += GET_MODE_SIZE (Pmode);
1404
1405 /* Save each register used in calling a function to the block. */
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if ((mode = apply_args_mode[regno]) != VOIDmode)
1408 {
1409 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1410 if (size % align != 0)
1411 size = CEIL (size, align) * align;
1412
1413 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1414
1415 emit_move_insn (adjust_address (registers, mode, size), tem);
1416 size += GET_MODE_SIZE (mode);
1417 }
1418
1419 /* Save the arg pointer to the block. */
1420 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1421 #ifdef STACK_GROWS_DOWNWARD
1422 /* We need the pointer as the caller actually passed them to us, not
1423 as we might have pretended they were passed. Make sure it's a valid
1424 operand, as emit_move_insn isn't expected to handle a PLUS. */
1425 tem
1426 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1427 NULL_RTX);
1428 #endif
1429 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1430
1431 size = GET_MODE_SIZE (Pmode);
1432
1433 /* Save the structure value address unless this is passed as an
1434 "invisible" first argument. */
1435 if (struct_incoming_value)
1436 {
1437 emit_move_insn (adjust_address (registers, Pmode, size),
1438 copy_to_reg (struct_incoming_value));
1439 size += GET_MODE_SIZE (Pmode);
1440 }
1441
1442 /* Return the address of the block. */
1443 return copy_addr_to_reg (XEXP (registers, 0));
1444 }
1445
1446 /* __builtin_apply_args returns block of memory allocated on
1447 the stack into which is stored the arg pointer, structure
1448 value address, static chain, and all the registers that might
1449 possibly be used in performing a function call. The code is
1450 moved to the start of the function so the incoming values are
1451 saved. */
1452
1453 static rtx
1454 expand_builtin_apply_args (void)
1455 {
1456 /* Don't do __builtin_apply_args more than once in a function.
1457 Save the result of the first call and reuse it. */
1458 if (apply_args_value != 0)
1459 return apply_args_value;
1460 {
1461 /* When this function is called, it means that registers must be
1462 saved on entry to this function. So we migrate the
1463 call to the first insn of this function. */
1464 rtx temp;
1465 rtx seq;
1466
1467 start_sequence ();
1468 temp = expand_builtin_apply_args_1 ();
1469 seq = get_insns ();
1470 end_sequence ();
1471
1472 apply_args_value = temp;
1473
1474 /* Put the insns after the NOTE that starts the function.
1475 If this is inside a start_sequence, make the outer-level insn
1476 chain current, so the code is placed at the start of the
1477 function. If internal_arg_pointer is a non-virtual pseudo,
1478 it needs to be placed after the function that initializes
1479 that pseudo. */
1480 push_topmost_sequence ();
1481 if (REG_P (crtl->args.internal_arg_pointer)
1482 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1483 emit_insn_before (seq, parm_birth_insn);
1484 else
1485 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1486 pop_topmost_sequence ();
1487 return temp;
1488 }
1489 }
1490
1491 /* Perform an untyped call and save the state required to perform an
1492 untyped return of whatever value was returned by the given function. */
1493
1494 static rtx
1495 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1496 {
1497 int size, align, regno;
1498 enum machine_mode mode;
1499 rtx incoming_args, result, reg, dest, src, call_insn;
1500 rtx old_stack_level = 0;
1501 rtx call_fusage = 0;
1502 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1503
1504 arguments = convert_memory_address (Pmode, arguments);
1505
1506 /* Create a block where the return registers can be saved. */
1507 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1508
1509 /* Fetch the arg pointer from the ARGUMENTS block. */
1510 incoming_args = gen_reg_rtx (Pmode);
1511 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1512 #ifndef STACK_GROWS_DOWNWARD
1513 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1514 incoming_args, 0, OPTAB_LIB_WIDEN);
1515 #endif
1516
1517 /* Push a new argument block and copy the arguments. Do not allow
1518 the (potential) memcpy call below to interfere with our stack
1519 manipulations. */
1520 do_pending_stack_adjust ();
1521 NO_DEFER_POP;
1522
1523 /* Save the stack with nonlocal if available. */
1524 #ifdef HAVE_save_stack_nonlocal
1525 if (HAVE_save_stack_nonlocal)
1526 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1527 else
1528 #endif
1529 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1530
1531 /* Allocate a block of memory onto the stack and copy the memory
1532 arguments to the outgoing arguments address. */
1533 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1534
1535 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1536 may have already set current_function_calls_alloca to true.
1537 current_function_calls_alloca won't be set if argsize is zero,
1538 so we have to guarantee need_drap is true here. */
1539 if (SUPPORTS_STACK_ALIGNMENT)
1540 crtl->need_drap = true;
1541
1542 dest = virtual_outgoing_args_rtx;
1543 #ifndef STACK_GROWS_DOWNWARD
1544 if (CONST_INT_P (argsize))
1545 dest = plus_constant (dest, -INTVAL (argsize));
1546 else
1547 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1548 #endif
1549 dest = gen_rtx_MEM (BLKmode, dest);
1550 set_mem_align (dest, PARM_BOUNDARY);
1551 src = gen_rtx_MEM (BLKmode, incoming_args);
1552 set_mem_align (src, PARM_BOUNDARY);
1553 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1554
1555 /* Refer to the argument block. */
1556 apply_args_size ();
1557 arguments = gen_rtx_MEM (BLKmode, arguments);
1558 set_mem_align (arguments, PARM_BOUNDARY);
1559
1560 /* Walk past the arg-pointer and structure value address. */
1561 size = GET_MODE_SIZE (Pmode);
1562 if (struct_value)
1563 size += GET_MODE_SIZE (Pmode);
1564
1565 /* Restore each of the registers previously saved. Make USE insns
1566 for each of these registers for use in making the call. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_args_mode[regno]) != VOIDmode)
1569 {
1570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1571 if (size % align != 0)
1572 size = CEIL (size, align) * align;
1573 reg = gen_rtx_REG (mode, regno);
1574 emit_move_insn (reg, adjust_address (arguments, mode, size));
1575 use_reg (&call_fusage, reg);
1576 size += GET_MODE_SIZE (mode);
1577 }
1578
1579 /* Restore the structure value address unless this is passed as an
1580 "invisible" first argument. */
1581 size = GET_MODE_SIZE (Pmode);
1582 if (struct_value)
1583 {
1584 rtx value = gen_reg_rtx (Pmode);
1585 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1586 emit_move_insn (struct_value, value);
1587 if (REG_P (struct_value))
1588 use_reg (&call_fusage, struct_value);
1589 size += GET_MODE_SIZE (Pmode);
1590 }
1591
1592 /* All arguments and registers used for the call are set up by now! */
1593 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1594
1595 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1596 and we don't want to load it into a register as an optimization,
1597 because prepare_call_address already did it if it should be done. */
1598 if (GET_CODE (function) != SYMBOL_REF)
1599 function = memory_address (FUNCTION_MODE, function);
1600
1601 /* Generate the actual call instruction and save the return value. */
1602 #ifdef HAVE_untyped_call
1603 if (HAVE_untyped_call)
1604 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1605 result, result_vector (1, result)));
1606 else
1607 #endif
1608 #ifdef HAVE_call_value
1609 if (HAVE_call_value)
1610 {
1611 rtx valreg = 0;
1612
1613 /* Locate the unique return register. It is not possible to
1614 express a call that sets more than one return register using
1615 call_value; use untyped_call for that. In fact, untyped_call
1616 only needs to save the return registers in the given block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_result_mode[regno]) != VOIDmode)
1619 {
1620 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1621
1622 valreg = gen_rtx_REG (mode, regno);
1623 }
1624
1625 emit_call_insn (GEN_CALL_VALUE (valreg,
1626 gen_rtx_MEM (FUNCTION_MODE, function),
1627 const0_rtx, NULL_RTX, const0_rtx));
1628
1629 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1630 }
1631 else
1632 #endif
1633 gcc_unreachable ();
1634
1635 /* Find the CALL insn we just emitted, and attach the register usage
1636 information. */
1637 call_insn = last_call_insn ();
1638 add_function_usage_to (call_insn, call_fusage);
1639
1640 /* Restore the stack. */
1641 #ifdef HAVE_save_stack_nonlocal
1642 if (HAVE_save_stack_nonlocal)
1643 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1644 else
1645 #endif
1646 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1647
1648 OK_DEFER_POP;
1649
1650 /* Return the address of the result block. */
1651 result = copy_addr_to_reg (XEXP (result, 0));
1652 return convert_memory_address (ptr_mode, result);
1653 }
1654
1655 /* Perform an untyped return. */
1656
1657 static void
1658 expand_builtin_return (rtx result)
1659 {
1660 int size, align, regno;
1661 enum machine_mode mode;
1662 rtx reg;
1663 rtx call_fusage = 0;
1664
1665 result = convert_memory_address (Pmode, result);
1666
1667 apply_result_size ();
1668 result = gen_rtx_MEM (BLKmode, result);
1669
1670 #ifdef HAVE_untyped_return
1671 if (HAVE_untyped_return)
1672 {
1673 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1674 emit_barrier ();
1675 return;
1676 }
1677 #endif
1678
1679 /* Restore the return value and note that each value is used. */
1680 size = 0;
1681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1682 if ((mode = apply_result_mode[regno]) != VOIDmode)
1683 {
1684 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1685 if (size % align != 0)
1686 size = CEIL (size, align) * align;
1687 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1688 emit_move_insn (reg, adjust_address (result, mode, size));
1689
1690 push_to_sequence (call_fusage);
1691 emit_use (reg);
1692 call_fusage = get_insns ();
1693 end_sequence ();
1694 size += GET_MODE_SIZE (mode);
1695 }
1696
1697 /* Put the USE insns before the return. */
1698 emit_insn (call_fusage);
1699
1700 /* Return whatever values was restored by jumping directly to the end
1701 of the function. */
1702 expand_naked_return ();
1703 }
1704
1705 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1706
1707 static enum type_class
1708 type_to_class (tree type)
1709 {
1710 switch (TREE_CODE (type))
1711 {
1712 case VOID_TYPE: return void_type_class;
1713 case INTEGER_TYPE: return integer_type_class;
1714 case ENUMERAL_TYPE: return enumeral_type_class;
1715 case BOOLEAN_TYPE: return boolean_type_class;
1716 case POINTER_TYPE: return pointer_type_class;
1717 case REFERENCE_TYPE: return reference_type_class;
1718 case OFFSET_TYPE: return offset_type_class;
1719 case REAL_TYPE: return real_type_class;
1720 case COMPLEX_TYPE: return complex_type_class;
1721 case FUNCTION_TYPE: return function_type_class;
1722 case METHOD_TYPE: return method_type_class;
1723 case RECORD_TYPE: return record_type_class;
1724 case UNION_TYPE:
1725 case QUAL_UNION_TYPE: return union_type_class;
1726 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1727 ? string_type_class : array_type_class);
1728 case LANG_TYPE: return lang_type_class;
1729 default: return no_type_class;
1730 }
1731 }
1732
1733 /* Expand a call EXP to __builtin_classify_type. */
1734
1735 static rtx
1736 expand_builtin_classify_type (tree exp)
1737 {
1738 if (call_expr_nargs (exp))
1739 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1740 return GEN_INT (no_type_class);
1741 }
1742
1743 /* This helper macro, meant to be used in mathfn_built_in below,
1744 determines which among a set of three builtin math functions is
1745 appropriate for a given type mode. The `F' and `L' cases are
1746 automatically generated from the `double' case. */
1747 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1749 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1750 fcodel = BUILT_IN_MATHFN##L ; break;
1751 /* Similar to above, but appends _R after any F/L suffix. */
1752 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1753 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1754 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1755 fcodel = BUILT_IN_MATHFN##L_R ; break;
1756
1757 /* Return mathematic function equivalent to FN but operating directly
1758 on TYPE, if available. If IMPLICIT is true find the function in
1759 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1760 can't do the conversion, return zero. */
1761
1762 static tree
1763 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1764 {
1765 tree const *const fn_arr
1766 = implicit ? implicit_built_in_decls : built_in_decls;
1767 enum built_in_function fcode, fcodef, fcodel;
1768
1769 switch (fn)
1770 {
1771 CASE_MATHFN (BUILT_IN_ACOS)
1772 CASE_MATHFN (BUILT_IN_ACOSH)
1773 CASE_MATHFN (BUILT_IN_ASIN)
1774 CASE_MATHFN (BUILT_IN_ASINH)
1775 CASE_MATHFN (BUILT_IN_ATAN)
1776 CASE_MATHFN (BUILT_IN_ATAN2)
1777 CASE_MATHFN (BUILT_IN_ATANH)
1778 CASE_MATHFN (BUILT_IN_CBRT)
1779 CASE_MATHFN (BUILT_IN_CEIL)
1780 CASE_MATHFN (BUILT_IN_CEXPI)
1781 CASE_MATHFN (BUILT_IN_COPYSIGN)
1782 CASE_MATHFN (BUILT_IN_COS)
1783 CASE_MATHFN (BUILT_IN_COSH)
1784 CASE_MATHFN (BUILT_IN_DREM)
1785 CASE_MATHFN (BUILT_IN_ERF)
1786 CASE_MATHFN (BUILT_IN_ERFC)
1787 CASE_MATHFN (BUILT_IN_EXP)
1788 CASE_MATHFN (BUILT_IN_EXP10)
1789 CASE_MATHFN (BUILT_IN_EXP2)
1790 CASE_MATHFN (BUILT_IN_EXPM1)
1791 CASE_MATHFN (BUILT_IN_FABS)
1792 CASE_MATHFN (BUILT_IN_FDIM)
1793 CASE_MATHFN (BUILT_IN_FLOOR)
1794 CASE_MATHFN (BUILT_IN_FMA)
1795 CASE_MATHFN (BUILT_IN_FMAX)
1796 CASE_MATHFN (BUILT_IN_FMIN)
1797 CASE_MATHFN (BUILT_IN_FMOD)
1798 CASE_MATHFN (BUILT_IN_FREXP)
1799 CASE_MATHFN (BUILT_IN_GAMMA)
1800 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1801 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1802 CASE_MATHFN (BUILT_IN_HYPOT)
1803 CASE_MATHFN (BUILT_IN_ILOGB)
1804 CASE_MATHFN (BUILT_IN_INF)
1805 CASE_MATHFN (BUILT_IN_ISINF)
1806 CASE_MATHFN (BUILT_IN_J0)
1807 CASE_MATHFN (BUILT_IN_J1)
1808 CASE_MATHFN (BUILT_IN_JN)
1809 CASE_MATHFN (BUILT_IN_LCEIL)
1810 CASE_MATHFN (BUILT_IN_LDEXP)
1811 CASE_MATHFN (BUILT_IN_LFLOOR)
1812 CASE_MATHFN (BUILT_IN_LGAMMA)
1813 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1814 CASE_MATHFN (BUILT_IN_LLCEIL)
1815 CASE_MATHFN (BUILT_IN_LLFLOOR)
1816 CASE_MATHFN (BUILT_IN_LLRINT)
1817 CASE_MATHFN (BUILT_IN_LLROUND)
1818 CASE_MATHFN (BUILT_IN_LOG)
1819 CASE_MATHFN (BUILT_IN_LOG10)
1820 CASE_MATHFN (BUILT_IN_LOG1P)
1821 CASE_MATHFN (BUILT_IN_LOG2)
1822 CASE_MATHFN (BUILT_IN_LOGB)
1823 CASE_MATHFN (BUILT_IN_LRINT)
1824 CASE_MATHFN (BUILT_IN_LROUND)
1825 CASE_MATHFN (BUILT_IN_MODF)
1826 CASE_MATHFN (BUILT_IN_NAN)
1827 CASE_MATHFN (BUILT_IN_NANS)
1828 CASE_MATHFN (BUILT_IN_NEARBYINT)
1829 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1830 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1831 CASE_MATHFN (BUILT_IN_POW)
1832 CASE_MATHFN (BUILT_IN_POWI)
1833 CASE_MATHFN (BUILT_IN_POW10)
1834 CASE_MATHFN (BUILT_IN_REMAINDER)
1835 CASE_MATHFN (BUILT_IN_REMQUO)
1836 CASE_MATHFN (BUILT_IN_RINT)
1837 CASE_MATHFN (BUILT_IN_ROUND)
1838 CASE_MATHFN (BUILT_IN_SCALB)
1839 CASE_MATHFN (BUILT_IN_SCALBLN)
1840 CASE_MATHFN (BUILT_IN_SCALBN)
1841 CASE_MATHFN (BUILT_IN_SIGNBIT)
1842 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1843 CASE_MATHFN (BUILT_IN_SIN)
1844 CASE_MATHFN (BUILT_IN_SINCOS)
1845 CASE_MATHFN (BUILT_IN_SINH)
1846 CASE_MATHFN (BUILT_IN_SQRT)
1847 CASE_MATHFN (BUILT_IN_TAN)
1848 CASE_MATHFN (BUILT_IN_TANH)
1849 CASE_MATHFN (BUILT_IN_TGAMMA)
1850 CASE_MATHFN (BUILT_IN_TRUNC)
1851 CASE_MATHFN (BUILT_IN_Y0)
1852 CASE_MATHFN (BUILT_IN_Y1)
1853 CASE_MATHFN (BUILT_IN_YN)
1854
1855 default:
1856 return NULL_TREE;
1857 }
1858
1859 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1860 return fn_arr[fcode];
1861 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1862 return fn_arr[fcodef];
1863 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1864 return fn_arr[fcodel];
1865 else
1866 return NULL_TREE;
1867 }
1868
1869 /* Like mathfn_built_in_1(), but always use the implicit array. */
1870
1871 tree
1872 mathfn_built_in (tree type, enum built_in_function fn)
1873 {
1874 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1875 }
1876
1877 /* If errno must be maintained, expand the RTL to check if the result,
1878 TARGET, of a built-in function call, EXP, is NaN, and if so set
1879 errno to EDOM. */
1880
1881 static void
1882 expand_errno_check (tree exp, rtx target)
1883 {
1884 rtx lab = gen_label_rtx ();
1885
1886 /* Test the result; if it is NaN, set errno=EDOM because
1887 the argument was not in the domain. */
1888 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1889 NULL_RTX, NULL_RTX, lab);
1890
1891 #ifdef TARGET_EDOM
1892 /* If this built-in doesn't throw an exception, set errno directly. */
1893 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1894 {
1895 #ifdef GEN_ERRNO_RTX
1896 rtx errno_rtx = GEN_ERRNO_RTX;
1897 #else
1898 rtx errno_rtx
1899 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1900 #endif
1901 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1902 emit_label (lab);
1903 return;
1904 }
1905 #endif
1906
1907 /* Make sure the library call isn't expanded as a tail call. */
1908 CALL_EXPR_TAILCALL (exp) = 0;
1909
1910 /* We can't set errno=EDOM directly; let the library call do it.
1911 Pop the arguments right away in case the call gets deleted. */
1912 NO_DEFER_POP;
1913 expand_call (exp, target, 0);
1914 OK_DEFER_POP;
1915 emit_label (lab);
1916 }
1917
1918 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1919 Return NULL_RTX if a normal call should be emitted rather than expanding
1920 the function in-line. EXP is the expression that is a call to the builtin
1921 function; if convenient, the result should be placed in TARGET.
1922 SUBTARGET may be used as the target for computing one of EXP's operands. */
1923
1924 static rtx
1925 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1926 {
1927 optab builtin_optab;
1928 rtx op0, insns, before_call;
1929 tree fndecl = get_callee_fndecl (exp);
1930 enum machine_mode mode;
1931 bool errno_set = false;
1932 tree arg;
1933
1934 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1935 return NULL_RTX;
1936
1937 arg = CALL_EXPR_ARG (exp, 0);
1938
1939 switch (DECL_FUNCTION_CODE (fndecl))
1940 {
1941 CASE_FLT_FN (BUILT_IN_SQRT):
1942 errno_set = ! tree_expr_nonnegative_p (arg);
1943 builtin_optab = sqrt_optab;
1944 break;
1945 CASE_FLT_FN (BUILT_IN_EXP):
1946 errno_set = true; builtin_optab = exp_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP10):
1948 CASE_FLT_FN (BUILT_IN_POW10):
1949 errno_set = true; builtin_optab = exp10_optab; break;
1950 CASE_FLT_FN (BUILT_IN_EXP2):
1951 errno_set = true; builtin_optab = exp2_optab; break;
1952 CASE_FLT_FN (BUILT_IN_EXPM1):
1953 errno_set = true; builtin_optab = expm1_optab; break;
1954 CASE_FLT_FN (BUILT_IN_LOGB):
1955 errno_set = true; builtin_optab = logb_optab; break;
1956 CASE_FLT_FN (BUILT_IN_LOG):
1957 errno_set = true; builtin_optab = log_optab; break;
1958 CASE_FLT_FN (BUILT_IN_LOG10):
1959 errno_set = true; builtin_optab = log10_optab; break;
1960 CASE_FLT_FN (BUILT_IN_LOG2):
1961 errno_set = true; builtin_optab = log2_optab; break;
1962 CASE_FLT_FN (BUILT_IN_LOG1P):
1963 errno_set = true; builtin_optab = log1p_optab; break;
1964 CASE_FLT_FN (BUILT_IN_ASIN):
1965 builtin_optab = asin_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ACOS):
1967 builtin_optab = acos_optab; break;
1968 CASE_FLT_FN (BUILT_IN_TAN):
1969 builtin_optab = tan_optab; break;
1970 CASE_FLT_FN (BUILT_IN_ATAN):
1971 builtin_optab = atan_optab; break;
1972 CASE_FLT_FN (BUILT_IN_FLOOR):
1973 builtin_optab = floor_optab; break;
1974 CASE_FLT_FN (BUILT_IN_CEIL):
1975 builtin_optab = ceil_optab; break;
1976 CASE_FLT_FN (BUILT_IN_TRUNC):
1977 builtin_optab = btrunc_optab; break;
1978 CASE_FLT_FN (BUILT_IN_ROUND):
1979 builtin_optab = round_optab; break;
1980 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1981 builtin_optab = nearbyint_optab;
1982 if (flag_trapping_math)
1983 break;
1984 /* Else fallthrough and expand as rint. */
1985 CASE_FLT_FN (BUILT_IN_RINT):
1986 builtin_optab = rint_optab; break;
1987 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1988 builtin_optab = significand_optab; break;
1989 default:
1990 gcc_unreachable ();
1991 }
1992
1993 /* Make a suitable register to place result in. */
1994 mode = TYPE_MODE (TREE_TYPE (exp));
1995
1996 if (! flag_errno_math || ! HONOR_NANS (mode))
1997 errno_set = false;
1998
1999 /* Before working hard, check whether the instruction is available. */
2000 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2001 {
2002 target = gen_reg_rtx (mode);
2003
2004 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2005 need to expand the argument again. This way, we will not perform
2006 side-effects more the once. */
2007 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2008
2009 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2010
2011 start_sequence ();
2012
2013 /* Compute into TARGET.
2014 Set TARGET to wherever the result comes back. */
2015 target = expand_unop (mode, builtin_optab, op0, target, 0);
2016
2017 if (target != 0)
2018 {
2019 if (errno_set)
2020 expand_errno_check (exp, target);
2021
2022 /* Output the entire sequence. */
2023 insns = get_insns ();
2024 end_sequence ();
2025 emit_insn (insns);
2026 return target;
2027 }
2028
2029 /* If we were unable to expand via the builtin, stop the sequence
2030 (without outputting the insns) and call to the library function
2031 with the stabilized argument list. */
2032 end_sequence ();
2033 }
2034
2035 before_call = get_last_insn ();
2036
2037 return expand_call (exp, target, target == const0_rtx);
2038 }
2039
2040 /* Expand a call to the builtin binary math functions (pow and atan2).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding the
2042 function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's
2045 operands. */
2046
2047 static rtx
2048 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2049 {
2050 optab builtin_optab;
2051 rtx op0, op1, insns;
2052 int op1_type = REAL_TYPE;
2053 tree fndecl = get_callee_fndecl (exp);
2054 tree arg0, arg1;
2055 enum machine_mode mode;
2056 bool errno_set = true;
2057
2058 switch (DECL_FUNCTION_CODE (fndecl))
2059 {
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 CASE_FLT_FN (BUILT_IN_LDEXP):
2063 op1_type = INTEGER_TYPE;
2064 default:
2065 break;
2066 }
2067
2068 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2069 return NULL_RTX;
2070
2071 arg0 = CALL_EXPR_ARG (exp, 0);
2072 arg1 = CALL_EXPR_ARG (exp, 1);
2073
2074 switch (DECL_FUNCTION_CODE (fndecl))
2075 {
2076 CASE_FLT_FN (BUILT_IN_POW):
2077 builtin_optab = pow_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ATAN2):
2079 builtin_optab = atan2_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SCALB):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 builtin_optab = scalb_optab; break;
2084 CASE_FLT_FN (BUILT_IN_SCALBN):
2085 CASE_FLT_FN (BUILT_IN_SCALBLN):
2086 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2087 return 0;
2088 /* Fall through... */
2089 CASE_FLT_FN (BUILT_IN_LDEXP):
2090 builtin_optab = ldexp_optab; break;
2091 CASE_FLT_FN (BUILT_IN_FMOD):
2092 builtin_optab = fmod_optab; break;
2093 CASE_FLT_FN (BUILT_IN_REMAINDER):
2094 CASE_FLT_FN (BUILT_IN_DREM):
2095 builtin_optab = remainder_optab; break;
2096 default:
2097 gcc_unreachable ();
2098 }
2099
2100 /* Make a suitable register to place result in. */
2101 mode = TYPE_MODE (TREE_TYPE (exp));
2102
2103 /* Before working hard, check whether the instruction is available. */
2104 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2105 return NULL_RTX;
2106
2107 target = gen_reg_rtx (mode);
2108
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2110 errno_set = false;
2111
2112 /* Always stabilize the argument list. */
2113 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2114 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2115
2116 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2117 op1 = expand_normal (arg1);
2118
2119 start_sequence ();
2120
2121 /* Compute into TARGET.
2122 Set TARGET to wherever the result comes back. */
2123 target = expand_binop (mode, builtin_optab, op0, op1,
2124 target, 0, OPTAB_DIRECT);
2125
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2129 if (target == 0)
2130 {
2131 end_sequence ();
2132 return expand_call (exp, target, target == const0_rtx);
2133 }
2134
2135 if (errno_set)
2136 expand_errno_check (exp, target);
2137
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2140 end_sequence ();
2141 emit_insn (insns);
2142
2143 return target;
2144 }
2145
2146 /* Expand a call to the builtin sin and cos math functions.
2147 Return NULL_RTX if a normal call should be emitted rather than expanding the
2148 function in-line. EXP is the expression that is a call to the builtin
2149 function; if convenient, the result should be placed in TARGET.
2150 SUBTARGET may be used as the target for computing one of EXP's
2151 operands. */
2152
2153 static rtx
2154 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2155 {
2156 optab builtin_optab;
2157 rtx op0, insns;
2158 tree fndecl = get_callee_fndecl (exp);
2159 enum machine_mode mode;
2160 tree arg;
2161
2162 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2163 return NULL_RTX;
2164
2165 arg = CALL_EXPR_ARG (exp, 0);
2166
2167 switch (DECL_FUNCTION_CODE (fndecl))
2168 {
2169 CASE_FLT_FN (BUILT_IN_SIN):
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 builtin_optab = sincos_optab; break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175
2176 /* Make a suitable register to place result in. */
2177 mode = TYPE_MODE (TREE_TYPE (exp));
2178
2179 /* Check if sincos insn is available, otherwise fallback
2180 to sin or cos insn. */
2181 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2182 switch (DECL_FUNCTION_CODE (fndecl))
2183 {
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 builtin_optab = sin_optab; break;
2186 CASE_FLT_FN (BUILT_IN_COS):
2187 builtin_optab = cos_optab; break;
2188 default:
2189 gcc_unreachable ();
2190 }
2191
2192 /* Before working hard, check whether the instruction is available. */
2193 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2194 {
2195 target = gen_reg_rtx (mode);
2196
2197 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2198 need to expand the argument again. This way, we will not perform
2199 side-effects more the once. */
2200 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2201
2202 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2203
2204 start_sequence ();
2205
2206 /* Compute into TARGET.
2207 Set TARGET to wherever the result comes back. */
2208 if (builtin_optab == sincos_optab)
2209 {
2210 int result;
2211
2212 switch (DECL_FUNCTION_CODE (fndecl))
2213 {
2214 CASE_FLT_FN (BUILT_IN_SIN):
2215 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2216 break;
2217 CASE_FLT_FN (BUILT_IN_COS):
2218 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2219 break;
2220 default:
2221 gcc_unreachable ();
2222 }
2223 gcc_assert (result);
2224 }
2225 else
2226 {
2227 target = expand_unop (mode, builtin_optab, op0, target, 0);
2228 }
2229
2230 if (target != 0)
2231 {
2232 /* Output the entire sequence. */
2233 insns = get_insns ();
2234 end_sequence ();
2235 emit_insn (insns);
2236 return target;
2237 }
2238
2239 /* If we were unable to expand via the builtin, stop the sequence
2240 (without outputting the insns) and call to the library function
2241 with the stabilized argument list. */
2242 end_sequence ();
2243 }
2244
2245 target = expand_call (exp, target, target == const0_rtx);
2246
2247 return target;
2248 }
2249
2250 /* Expand a call to one of the builtin math functions that operate on
2251 floating point argument and output an integer result (ilogb, isinf,
2252 isnan, etc).
2253 Return 0 if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's operands. */
2257
2258 static rtx
2259 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2260 {
2261 optab builtin_optab = 0;
2262 enum insn_code icode = CODE_FOR_nothing;
2263 rtx op0;
2264 tree fndecl = get_callee_fndecl (exp);
2265 enum machine_mode mode;
2266 bool errno_set = false;
2267 tree arg;
2268 location_t loc = EXPR_LOCATION (exp);
2269
2270 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2271 return NULL_RTX;
2272
2273 arg = CALL_EXPR_ARG (exp, 0);
2274
2275 switch (DECL_FUNCTION_CODE (fndecl))
2276 {
2277 CASE_FLT_FN (BUILT_IN_ILOGB):
2278 errno_set = true; builtin_optab = ilogb_optab; break;
2279 CASE_FLT_FN (BUILT_IN_ISINF):
2280 builtin_optab = isinf_optab; break;
2281 case BUILT_IN_ISNORMAL:
2282 case BUILT_IN_ISFINITE:
2283 CASE_FLT_FN (BUILT_IN_FINITE):
2284 /* These builtins have no optabs (yet). */
2285 break;
2286 default:
2287 gcc_unreachable ();
2288 }
2289
2290 /* There's no easy way to detect the case we need to set EDOM. */
2291 if (flag_errno_math && errno_set)
2292 return NULL_RTX;
2293
2294 /* Optab mode depends on the mode of the input argument. */
2295 mode = TYPE_MODE (TREE_TYPE (arg));
2296
2297 if (builtin_optab)
2298 icode = optab_handler (builtin_optab, mode)->insn_code;
2299
2300 /* Before working hard, check whether the instruction is available. */
2301 if (icode != CODE_FOR_nothing)
2302 {
2303 /* Make a suitable register to place result in. */
2304 if (!target
2305 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2306 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2307
2308 gcc_assert (insn_data[icode].operand[0].predicate
2309 (target, GET_MODE (target)));
2310
2311 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2312 need to expand the argument again. This way, we will not perform
2313 side-effects more the once. */
2314 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2315
2316 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2317
2318 if (mode != GET_MODE (op0))
2319 op0 = convert_to_mode (mode, op0, 0);
2320
2321 /* Compute into TARGET.
2322 Set TARGET to wherever the result comes back. */
2323 emit_unop_insn (icode, target, op0, UNKNOWN);
2324 return target;
2325 }
2326
2327 /* If there is no optab, try generic code. */
2328 switch (DECL_FUNCTION_CODE (fndecl))
2329 {
2330 tree result;
2331
2332 CASE_FLT_FN (BUILT_IN_ISINF):
2333 {
2334 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2335 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2336 tree const type = TREE_TYPE (arg);
2337 REAL_VALUE_TYPE r;
2338 char buf[128];
2339
2340 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2341 real_from_string (&r, buf);
2342 result = build_call_expr (isgr_fn, 2,
2343 fold_build1_loc (loc, ABS_EXPR, type, arg),
2344 build_real (type, r));
2345 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2346 }
2347 CASE_FLT_FN (BUILT_IN_FINITE):
2348 case BUILT_IN_ISFINITE:
2349 {
2350 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2351 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2352 tree const type = TREE_TYPE (arg);
2353 REAL_VALUE_TYPE r;
2354 char buf[128];
2355
2356 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2357 real_from_string (&r, buf);
2358 result = build_call_expr (isle_fn, 2,
2359 fold_build1_loc (loc, ABS_EXPR, type, arg),
2360 build_real (type, r));
2361 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2362 }
2363 case BUILT_IN_ISNORMAL:
2364 {
2365 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2366 islessequal(fabs(x),DBL_MAX). */
2367 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2368 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2369 tree const type = TREE_TYPE (arg);
2370 REAL_VALUE_TYPE rmax, rmin;
2371 char buf[128];
2372
2373 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2374 real_from_string (&rmax, buf);
2375 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2376 real_from_string (&rmin, buf);
2377 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
2378 result = build_call_expr (isle_fn, 2, arg,
2379 build_real (type, rmax));
2380 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2381 build_call_expr (isge_fn, 2, arg,
2382 build_real (type, rmin)));
2383 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2384 }
2385 default:
2386 break;
2387 }
2388
2389 target = expand_call (exp, target, target == const0_rtx);
2390
2391 return target;
2392 }
2393
2394 /* Expand a call to the builtin sincos math function.
2395 Return NULL_RTX if a normal call should be emitted rather than expanding the
2396 function in-line. EXP is the expression that is a call to the builtin
2397 function. */
2398
2399 static rtx
2400 expand_builtin_sincos (tree exp)
2401 {
2402 rtx op0, op1, op2, target1, target2;
2403 enum machine_mode mode;
2404 tree arg, sinp, cosp;
2405 int result;
2406 location_t loc = EXPR_LOCATION (exp);
2407
2408 if (!validate_arglist (exp, REAL_TYPE,
2409 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2410 return NULL_RTX;
2411
2412 arg = CALL_EXPR_ARG (exp, 0);
2413 sinp = CALL_EXPR_ARG (exp, 1);
2414 cosp = CALL_EXPR_ARG (exp, 2);
2415
2416 /* Make a suitable register to place result in. */
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2418
2419 /* Check if sincos insn is available, otherwise emit the call. */
2420 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2421 return NULL_RTX;
2422
2423 target1 = gen_reg_rtx (mode);
2424 target2 = gen_reg_rtx (mode);
2425
2426 op0 = expand_normal (arg);
2427 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2428 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2429
2430 /* Compute into target1 and target2.
2431 Set TARGET to wherever the result comes back. */
2432 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2433 gcc_assert (result);
2434
2435 /* Move target1 and target2 to the memory locations indicated
2436 by op1 and op2. */
2437 emit_move_insn (op1, target1);
2438 emit_move_insn (op2, target2);
2439
2440 return const0_rtx;
2441 }
2442
2443 /* Expand a call to the internal cexpi builtin to the sincos math function.
2444 EXP is the expression that is a call to the builtin function; if convenient,
2445 the result should be placed in TARGET. SUBTARGET may be used as the target
2446 for computing one of EXP's operands. */
2447
2448 static rtx
2449 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2450 {
2451 tree fndecl = get_callee_fndecl (exp);
2452 tree arg, type;
2453 enum machine_mode mode;
2454 rtx op0, op1, op2;
2455 location_t loc = EXPR_LOCATION (exp);
2456
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2459
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 type = TREE_TYPE (arg);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2463
2464 /* Try expanding via a sincos optab, fall back to emitting a libcall
2465 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2466 is only generated from sincos, cexp or if we have either of them. */
2467 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2468 {
2469 op1 = gen_reg_rtx (mode);
2470 op2 = gen_reg_rtx (mode);
2471
2472 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2473
2474 /* Compute into op1 and op2. */
2475 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2476 }
2477 else if (TARGET_HAS_SINCOS)
2478 {
2479 tree call, fn = NULL_TREE;
2480 tree top1, top2;
2481 rtx op1a, op2a;
2482
2483 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2484 fn = built_in_decls[BUILT_IN_SINCOSF];
2485 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2486 fn = built_in_decls[BUILT_IN_SINCOS];
2487 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2488 fn = built_in_decls[BUILT_IN_SINCOSL];
2489 else
2490 gcc_unreachable ();
2491
2492 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2493 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2494 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2495 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2496 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2497 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2498
2499 /* Make sure not to fold the sincos call again. */
2500 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2501 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2502 call, 3, arg, top1, top2));
2503 }
2504 else
2505 {
2506 tree call, fn = NULL_TREE, narg;
2507 tree ctype = build_complex_type (type);
2508
2509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2510 fn = built_in_decls[BUILT_IN_CEXPF];
2511 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2512 fn = built_in_decls[BUILT_IN_CEXP];
2513 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2514 fn = built_in_decls[BUILT_IN_CEXPL];
2515 else
2516 gcc_unreachable ();
2517
2518 /* If we don't have a decl for cexp create one. This is the
2519 friendliest fallback if the user calls __builtin_cexpi
2520 without full target C99 function support. */
2521 if (fn == NULL_TREE)
2522 {
2523 tree fntype;
2524 const char *name = NULL;
2525
2526 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2527 name = "cexpf";
2528 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2529 name = "cexp";
2530 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2531 name = "cexpl";
2532
2533 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2534 fn = build_fn_decl (name, fntype);
2535 }
2536
2537 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2538 build_real (type, dconst0), arg);
2539
2540 /* Make sure not to fold the cexp call again. */
2541 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2542 return expand_expr (build_call_nary (ctype, call, 1, narg),
2543 target, VOIDmode, EXPAND_NORMAL);
2544 }
2545
2546 /* Now build the proper return type. */
2547 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2548 make_tree (TREE_TYPE (arg), op2),
2549 make_tree (TREE_TYPE (arg), op1)),
2550 target, VOIDmode, EXPAND_NORMAL);
2551 }
2552
2553 /* Expand a call to one of the builtin rounding functions gcc defines
2554 as an extension (lfloor and lceil). As these are gcc extensions we
2555 do not need to worry about setting errno to EDOM.
2556 If expanding via optab fails, lower expression to (int)(floor(x)).
2557 EXP is the expression that is a call to the builtin function;
2558 if convenient, the result should be placed in TARGET. */
2559
2560 static rtx
2561 expand_builtin_int_roundingfn (tree exp, rtx target)
2562 {
2563 convert_optab builtin_optab;
2564 rtx op0, insns, tmp;
2565 tree fndecl = get_callee_fndecl (exp);
2566 enum built_in_function fallback_fn;
2567 tree fallback_fndecl;
2568 enum machine_mode mode;
2569 tree arg;
2570
2571 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2572 gcc_unreachable ();
2573
2574 arg = CALL_EXPR_ARG (exp, 0);
2575
2576 switch (DECL_FUNCTION_CODE (fndecl))
2577 {
2578 CASE_FLT_FN (BUILT_IN_LCEIL):
2579 CASE_FLT_FN (BUILT_IN_LLCEIL):
2580 builtin_optab = lceil_optab;
2581 fallback_fn = BUILT_IN_CEIL;
2582 break;
2583
2584 CASE_FLT_FN (BUILT_IN_LFLOOR):
2585 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2586 builtin_optab = lfloor_optab;
2587 fallback_fn = BUILT_IN_FLOOR;
2588 break;
2589
2590 default:
2591 gcc_unreachable ();
2592 }
2593
2594 /* Make a suitable register to place result in. */
2595 mode = TYPE_MODE (TREE_TYPE (exp));
2596
2597 target = gen_reg_rtx (mode);
2598
2599 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2600 need to expand the argument again. This way, we will not perform
2601 side-effects more the once. */
2602 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2603
2604 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2605
2606 start_sequence ();
2607
2608 /* Compute into TARGET. */
2609 if (expand_sfix_optab (target, op0, builtin_optab))
2610 {
2611 /* Output the entire sequence. */
2612 insns = get_insns ();
2613 end_sequence ();
2614 emit_insn (insns);
2615 return target;
2616 }
2617
2618 /* If we were unable to expand via the builtin, stop the sequence
2619 (without outputting the insns). */
2620 end_sequence ();
2621
2622 /* Fall back to floating point rounding optab. */
2623 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2624
2625 /* For non-C99 targets we may end up without a fallback fndecl here
2626 if the user called __builtin_lfloor directly. In this case emit
2627 a call to the floor/ceil variants nevertheless. This should result
2628 in the best user experience for not full C99 targets. */
2629 if (fallback_fndecl == NULL_TREE)
2630 {
2631 tree fntype;
2632 const char *name = NULL;
2633
2634 switch (DECL_FUNCTION_CODE (fndecl))
2635 {
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
2640 case BUILT_IN_LCEILF:
2641 case BUILT_IN_LLCEILF:
2642 name = "ceilf";
2643 break;
2644 case BUILT_IN_LCEILL:
2645 case BUILT_IN_LLCEILL:
2646 name = "ceill";
2647 break;
2648 case BUILT_IN_LFLOOR:
2649 case BUILT_IN_LLFLOOR:
2650 name = "floor";
2651 break;
2652 case BUILT_IN_LFLOORF:
2653 case BUILT_IN_LLFLOORF:
2654 name = "floorf";
2655 break;
2656 case BUILT_IN_LFLOORL:
2657 case BUILT_IN_LLFLOORL:
2658 name = "floorl";
2659 break;
2660 default:
2661 gcc_unreachable ();
2662 }
2663
2664 fntype = build_function_type_list (TREE_TYPE (arg),
2665 TREE_TYPE (arg), NULL_TREE);
2666 fallback_fndecl = build_fn_decl (name, fntype);
2667 }
2668
2669 exp = build_call_expr (fallback_fndecl, 1, arg);
2670
2671 tmp = expand_normal (exp);
2672
2673 /* Truncate the result of floating point optab to integer
2674 via expand_fix (). */
2675 target = gen_reg_rtx (mode);
2676 expand_fix (target, tmp, 0);
2677
2678 return target;
2679 }
2680
2681 /* Expand a call to one of the builtin math functions doing integer
2682 conversion (lrint).
2683 Return 0 if a normal call should be emitted rather than expanding the
2684 function in-line. EXP is the expression that is a call to the builtin
2685 function; if convenient, the result should be placed in TARGET. */
2686
2687 static rtx
2688 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2689 {
2690 convert_optab builtin_optab;
2691 rtx op0, insns;
2692 tree fndecl = get_callee_fndecl (exp);
2693 tree arg;
2694 enum machine_mode mode;
2695
2696 /* There's no easy way to detect the case we need to set EDOM. */
2697 if (flag_errno_math)
2698 return NULL_RTX;
2699
2700 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2701 gcc_unreachable ();
2702
2703 arg = CALL_EXPR_ARG (exp, 0);
2704
2705 switch (DECL_FUNCTION_CODE (fndecl))
2706 {
2707 CASE_FLT_FN (BUILT_IN_LRINT):
2708 CASE_FLT_FN (BUILT_IN_LLRINT):
2709 builtin_optab = lrint_optab; break;
2710 CASE_FLT_FN (BUILT_IN_LROUND):
2711 CASE_FLT_FN (BUILT_IN_LLROUND):
2712 builtin_optab = lround_optab; break;
2713 default:
2714 gcc_unreachable ();
2715 }
2716
2717 /* Make a suitable register to place result in. */
2718 mode = TYPE_MODE (TREE_TYPE (exp));
2719
2720 target = gen_reg_rtx (mode);
2721
2722 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2723 need to expand the argument again. This way, we will not perform
2724 side-effects more the once. */
2725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2726
2727 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2728
2729 start_sequence ();
2730
2731 if (expand_sfix_optab (target, op0, builtin_optab))
2732 {
2733 /* Output the entire sequence. */
2734 insns = get_insns ();
2735 end_sequence ();
2736 emit_insn (insns);
2737 return target;
2738 }
2739
2740 /* If we were unable to expand via the builtin, stop the sequence
2741 (without outputting the insns) and call to the library function
2742 with the stabilized argument list. */
2743 end_sequence ();
2744
2745 target = expand_call (exp, target, target == const0_rtx);
2746
2747 return target;
2748 }
2749
2750 /* To evaluate powi(x,n), the floating point value x raised to the
2751 constant integer exponent n, we use a hybrid algorithm that
2752 combines the "window method" with look-up tables. For an
2753 introduction to exponentiation algorithms and "addition chains",
2754 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2755 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2756 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2757 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2758
2759 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2760 multiplications to inline before calling the system library's pow
2761 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2762 so this default never requires calling pow, powf or powl. */
2763
2764 #ifndef POWI_MAX_MULTS
2765 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2766 #endif
2767
2768 /* The size of the "optimal power tree" lookup table. All
2769 exponents less than this value are simply looked up in the
2770 powi_table below. This threshold is also used to size the
2771 cache of pseudo registers that hold intermediate results. */
2772 #define POWI_TABLE_SIZE 256
2773
2774 /* The size, in bits of the window, used in the "window method"
2775 exponentiation algorithm. This is equivalent to a radix of
2776 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2777 #define POWI_WINDOW_SIZE 3
2778
2779 /* The following table is an efficient representation of an
2780 "optimal power tree". For each value, i, the corresponding
2781 value, j, in the table states than an optimal evaluation
2782 sequence for calculating pow(x,i) can be found by evaluating
2783 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2784 100 integers is given in Knuth's "Seminumerical algorithms". */
2785
2786 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2787 {
2788 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2789 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2790 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2791 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2792 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2793 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2794 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2795 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2796 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2797 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2798 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2799 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2800 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2801 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2802 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2803 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2804 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2805 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2806 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2807 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2808 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2809 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2810 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2811 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2812 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2813 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2814 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2815 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2816 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2817 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2818 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2819 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2820 };
2821
2822
2823 /* Return the number of multiplications required to calculate
2824 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2825 subroutine of powi_cost. CACHE is an array indicating
2826 which exponents have already been calculated. */
2827
2828 static int
2829 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2830 {
2831 /* If we've already calculated this exponent, then this evaluation
2832 doesn't require any additional multiplications. */
2833 if (cache[n])
2834 return 0;
2835
2836 cache[n] = true;
2837 return powi_lookup_cost (n - powi_table[n], cache)
2838 + powi_lookup_cost (powi_table[n], cache) + 1;
2839 }
2840
2841 /* Return the number of multiplications required to calculate
2842 powi(x,n) for an arbitrary x, given the exponent N. This
2843 function needs to be kept in sync with expand_powi below. */
2844
2845 static int
2846 powi_cost (HOST_WIDE_INT n)
2847 {
2848 bool cache[POWI_TABLE_SIZE];
2849 unsigned HOST_WIDE_INT digit;
2850 unsigned HOST_WIDE_INT val;
2851 int result;
2852
2853 if (n == 0)
2854 return 0;
2855
2856 /* Ignore the reciprocal when calculating the cost. */
2857 val = (n < 0) ? -n : n;
2858
2859 /* Initialize the exponent cache. */
2860 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2861 cache[1] = true;
2862
2863 result = 0;
2864
2865 while (val >= POWI_TABLE_SIZE)
2866 {
2867 if (val & 1)
2868 {
2869 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2870 result += powi_lookup_cost (digit, cache)
2871 + POWI_WINDOW_SIZE + 1;
2872 val >>= POWI_WINDOW_SIZE;
2873 }
2874 else
2875 {
2876 val >>= 1;
2877 result++;
2878 }
2879 }
2880
2881 return result + powi_lookup_cost (val, cache);
2882 }
2883
2884 /* Recursive subroutine of expand_powi. This function takes the array,
2885 CACHE, of already calculated exponents and an exponent N and returns
2886 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2887
2888 static rtx
2889 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2890 {
2891 unsigned HOST_WIDE_INT digit;
2892 rtx target, result;
2893 rtx op0, op1;
2894
2895 if (n < POWI_TABLE_SIZE)
2896 {
2897 if (cache[n])
2898 return cache[n];
2899
2900 target = gen_reg_rtx (mode);
2901 cache[n] = target;
2902
2903 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2904 op1 = expand_powi_1 (mode, powi_table[n], cache);
2905 }
2906 else if (n & 1)
2907 {
2908 target = gen_reg_rtx (mode);
2909 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2910 op0 = expand_powi_1 (mode, n - digit, cache);
2911 op1 = expand_powi_1 (mode, digit, cache);
2912 }
2913 else
2914 {
2915 target = gen_reg_rtx (mode);
2916 op0 = expand_powi_1 (mode, n >> 1, cache);
2917 op1 = op0;
2918 }
2919
2920 result = expand_mult (mode, op0, op1, target, 0);
2921 if (result != target)
2922 emit_move_insn (target, result);
2923 return target;
2924 }
2925
2926 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2927 floating point operand in mode MODE, and N is the exponent. This
2928 function needs to be kept in sync with powi_cost above. */
2929
2930 static rtx
2931 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2932 {
2933 unsigned HOST_WIDE_INT val;
2934 rtx cache[POWI_TABLE_SIZE];
2935 rtx result;
2936
2937 if (n == 0)
2938 return CONST1_RTX (mode);
2939
2940 val = (n < 0) ? -n : n;
2941
2942 memset (cache, 0, sizeof (cache));
2943 cache[1] = x;
2944
2945 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2946
2947 /* If the original exponent was negative, reciprocate the result. */
2948 if (n < 0)
2949 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2950 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2951
2952 return result;
2953 }
2954
2955 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2956 a normal call should be emitted rather than expanding the function
2957 in-line. EXP is the expression that is a call to the builtin
2958 function; if convenient, the result should be placed in TARGET. */
2959
2960 static rtx
2961 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2962 {
2963 tree arg0, arg1;
2964 tree fn, narg0;
2965 tree type = TREE_TYPE (exp);
2966 REAL_VALUE_TYPE cint, c, c2;
2967 HOST_WIDE_INT n;
2968 rtx op, op2;
2969 enum machine_mode mode = TYPE_MODE (type);
2970
2971 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX;
2973
2974 arg0 = CALL_EXPR_ARG (exp, 0);
2975 arg1 = CALL_EXPR_ARG (exp, 1);
2976
2977 if (TREE_CODE (arg1) != REAL_CST
2978 || TREE_OVERFLOW (arg1))
2979 return expand_builtin_mathfn_2 (exp, target, subtarget);
2980
2981 /* Handle constant exponents. */
2982
2983 /* For integer valued exponents we can expand to an optimal multiplication
2984 sequence using expand_powi. */
2985 c = TREE_REAL_CST (arg1);
2986 n = real_to_integer (&c);
2987 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2988 if (real_identical (&c, &cint)
2989 && ((n >= -1 && n <= 2)
2990 || (flag_unsafe_math_optimizations
2991 && optimize_insn_for_speed_p ()
2992 && powi_cost (n) <= POWI_MAX_MULTS)))
2993 {
2994 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2995 if (n != 1)
2996 {
2997 op = force_reg (mode, op);
2998 op = expand_powi (op, mode, n);
2999 }
3000 return op;
3001 }
3002
3003 narg0 = builtin_save_expr (arg0);
3004
3005 /* If the exponent is not integer valued, check if it is half of an integer.
3006 In this case we can expand to sqrt (x) * x**(n/2). */
3007 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3008 if (fn != NULL_TREE)
3009 {
3010 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3011 n = real_to_integer (&c2);
3012 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3013 if (real_identical (&c2, &cint)
3014 && ((flag_unsafe_math_optimizations
3015 && optimize_insn_for_speed_p ()
3016 && powi_cost (n/2) <= POWI_MAX_MULTS)
3017 || n == 1))
3018 {
3019 tree call_expr = build_call_expr (fn, 1, narg0);
3020 /* Use expand_expr in case the newly built call expression
3021 was folded to a non-call. */
3022 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3023 if (n != 1)
3024 {
3025 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3026 op2 = force_reg (mode, op2);
3027 op2 = expand_powi (op2, mode, abs (n / 2));
3028 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3029 0, OPTAB_LIB_WIDEN);
3030 /* If the original exponent was negative, reciprocate the
3031 result. */
3032 if (n < 0)
3033 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3034 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3035 }
3036 return op;
3037 }
3038 }
3039
3040 /* Try if the exponent is a third of an integer. In this case
3041 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3042 different from pow (x, 1./3.) due to rounding and behavior
3043 with negative x we need to constrain this transformation to
3044 unsafe math and positive x or finite math. */
3045 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3046 if (fn != NULL_TREE
3047 && flag_unsafe_math_optimizations
3048 && (tree_expr_nonnegative_p (arg0)
3049 || !HONOR_NANS (mode)))
3050 {
3051 REAL_VALUE_TYPE dconst3;
3052 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3053 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3054 real_round (&c2, mode, &c2);
3055 n = real_to_integer (&c2);
3056 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3057 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3058 real_convert (&c2, mode, &c2);
3059 if (real_identical (&c2, &c)
3060 && ((optimize_insn_for_speed_p ()
3061 && powi_cost (n/3) <= POWI_MAX_MULTS)
3062 || n == 1))
3063 {
3064 tree call_expr = build_call_expr (fn, 1,narg0);
3065 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3066 if (abs (n) % 3 == 2)
3067 op = expand_simple_binop (mode, MULT, op, op, op,
3068 0, OPTAB_LIB_WIDEN);
3069 if (n != 1)
3070 {
3071 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3072 op2 = force_reg (mode, op2);
3073 op2 = expand_powi (op2, mode, abs (n / 3));
3074 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3075 0, OPTAB_LIB_WIDEN);
3076 /* If the original exponent was negative, reciprocate the
3077 result. */
3078 if (n < 0)
3079 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3080 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3081 }
3082 return op;
3083 }
3084 }
3085
3086 /* Fall back to optab expansion. */
3087 return expand_builtin_mathfn_2 (exp, target, subtarget);
3088 }
3089
3090 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3091 a normal call should be emitted rather than expanding the function
3092 in-line. EXP is the expression that is a call to the builtin
3093 function; if convenient, the result should be placed in TARGET. */
3094
3095 static rtx
3096 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3097 {
3098 tree arg0, arg1;
3099 rtx op0, op1;
3100 enum machine_mode mode;
3101 enum machine_mode mode2;
3102
3103 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3104 return NULL_RTX;
3105
3106 arg0 = CALL_EXPR_ARG (exp, 0);
3107 arg1 = CALL_EXPR_ARG (exp, 1);
3108 mode = TYPE_MODE (TREE_TYPE (exp));
3109
3110 /* Handle constant power. */
3111
3112 if (TREE_CODE (arg1) == INTEGER_CST
3113 && !TREE_OVERFLOW (arg1))
3114 {
3115 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3116
3117 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3118 Otherwise, check the number of multiplications required. */
3119 if ((TREE_INT_CST_HIGH (arg1) == 0
3120 || TREE_INT_CST_HIGH (arg1) == -1)
3121 && ((n >= -1 && n <= 2)
3122 || (optimize_insn_for_speed_p ()
3123 && powi_cost (n) <= POWI_MAX_MULTS)))
3124 {
3125 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3126 op0 = force_reg (mode, op0);
3127 return expand_powi (op0, mode, n);
3128 }
3129 }
3130
3131 /* Emit a libcall to libgcc. */
3132
3133 /* Mode of the 2nd argument must match that of an int. */
3134 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3135
3136 if (target == NULL_RTX)
3137 target = gen_reg_rtx (mode);
3138
3139 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3140 if (GET_MODE (op0) != mode)
3141 op0 = convert_to_mode (mode, op0, 0);
3142 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3143 if (GET_MODE (op1) != mode2)
3144 op1 = convert_to_mode (mode2, op1, 0);
3145
3146 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3147 target, LCT_CONST, mode, 2,
3148 op0, mode, op1, mode2);
3149
3150 return target;
3151 }
3152
3153 /* Expand expression EXP which is a call to the strlen builtin. Return
3154 NULL_RTX if we failed the caller should emit a normal call, otherwise
3155 try to get the result in TARGET, if convenient. */
3156
3157 static rtx
3158 expand_builtin_strlen (tree exp, rtx target,
3159 enum machine_mode target_mode)
3160 {
3161 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3162 return NULL_RTX;
3163 else
3164 {
3165 rtx pat;
3166 tree len;
3167 tree src = CALL_EXPR_ARG (exp, 0);
3168 rtx result, src_reg, char_rtx, before_strlen;
3169 enum machine_mode insn_mode = target_mode, char_mode;
3170 enum insn_code icode = CODE_FOR_nothing;
3171 int align;
3172
3173 /* If the length can be computed at compile-time, return it. */
3174 len = c_strlen (src, 0);
3175 if (len)
3176 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3177
3178 /* If the length can be computed at compile-time and is constant
3179 integer, but there are side-effects in src, evaluate
3180 src for side-effects, then return len.
3181 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3182 can be optimized into: i++; x = 3; */
3183 len = c_strlen (src, 1);
3184 if (len && TREE_CODE (len) == INTEGER_CST)
3185 {
3186 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3187 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3188 }
3189
3190 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3191
3192 /* If SRC is not a pointer type, don't do this operation inline. */
3193 if (align == 0)
3194 return NULL_RTX;
3195
3196 /* Bail out if we can't compute strlen in the right mode. */
3197 while (insn_mode != VOIDmode)
3198 {
3199 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3200 if (icode != CODE_FOR_nothing)
3201 break;
3202
3203 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3204 }
3205 if (insn_mode == VOIDmode)
3206 return NULL_RTX;
3207
3208 /* Make a place to write the result of the instruction. */
3209 result = target;
3210 if (! (result != 0
3211 && REG_P (result)
3212 && GET_MODE (result) == insn_mode
3213 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3214 result = gen_reg_rtx (insn_mode);
3215
3216 /* Make a place to hold the source address. We will not expand
3217 the actual source until we are sure that the expansion will
3218 not fail -- there are trees that cannot be expanded twice. */
3219 src_reg = gen_reg_rtx (Pmode);
3220
3221 /* Mark the beginning of the strlen sequence so we can emit the
3222 source operand later. */
3223 before_strlen = get_last_insn ();
3224
3225 char_rtx = const0_rtx;
3226 char_mode = insn_data[(int) icode].operand[2].mode;
3227 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3228 char_mode))
3229 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3230
3231 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3232 char_rtx, GEN_INT (align));
3233 if (! pat)
3234 return NULL_RTX;
3235 emit_insn (pat);
3236
3237 /* Now that we are assured of success, expand the source. */
3238 start_sequence ();
3239 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3240 if (pat != src_reg)
3241 emit_move_insn (src_reg, pat);
3242 pat = get_insns ();
3243 end_sequence ();
3244
3245 if (before_strlen)
3246 emit_insn_after (pat, before_strlen);
3247 else
3248 emit_insn_before (pat, get_insns ());
3249
3250 /* Return the value in the proper mode for this function. */
3251 if (GET_MODE (result) == target_mode)
3252 target = result;
3253 else if (target != 0)
3254 convert_move (target, result, 0);
3255 else
3256 target = convert_to_mode (target_mode, result, 0);
3257
3258 return target;
3259 }
3260 }
3261
3262 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3263 caller should emit a normal call, otherwise try to get the result
3264 in TARGET, if convenient (and in mode MODE if that's convenient). */
3265
3266 static rtx
3267 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3268 {
3269 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3270 {
3271 tree type = TREE_TYPE (exp);
3272 tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
3273 CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3275 if (result)
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3277 }
3278 return NULL_RTX;
3279 }
3280
3281 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3282 caller should emit a normal call, otherwise try to get the result
3283 in TARGET, if convenient (and in mode MODE if that's convenient). */
3284
3285 static rtx
3286 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3287 {
3288 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3289 {
3290 tree type = TREE_TYPE (exp);
3291 tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
3292 CALL_EXPR_ARG (exp, 0),
3293 CALL_EXPR_ARG (exp, 1), type);
3294 if (result)
3295 return expand_expr (result, target, mode, EXPAND_NORMAL);
3296
3297 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3298 }
3299 return NULL_RTX;
3300 }
3301
3302 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3303 caller should emit a normal call, otherwise try to get the result
3304 in TARGET, if convenient (and in mode MODE if that's convenient). */
3305
3306 static rtx
3307 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3308 {
3309 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3310 {
3311 tree type = TREE_TYPE (exp);
3312 tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
3313 CALL_EXPR_ARG (exp, 0),
3314 CALL_EXPR_ARG (exp, 1), type);
3315 if (result)
3316 return expand_expr (result, target, mode, EXPAND_NORMAL);
3317 }
3318 return NULL_RTX;
3319 }
3320
3321 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3322 caller should emit a normal call, otherwise try to get the result
3323 in TARGET, if convenient (and in mode MODE if that's convenient). */
3324
3325 static rtx
3326 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3327 {
3328 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3329 {
3330 tree type = TREE_TYPE (exp);
3331 tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
3332 CALL_EXPR_ARG (exp, 0),
3333 CALL_EXPR_ARG (exp, 1), type);
3334 if (result)
3335 return expand_expr (result, target, mode, EXPAND_NORMAL);
3336 }
3337 return NULL_RTX;
3338 }
3339
3340 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3341 bytes from constant string DATA + OFFSET and return it as target
3342 constant. */
3343
3344 static rtx
3345 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3346 enum machine_mode mode)
3347 {
3348 const char *str = (const char *) data;
3349
3350 gcc_assert (offset >= 0
3351 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3352 <= strlen (str) + 1));
3353
3354 return c_readstr (str + offset, mode);
3355 }
3356
3357 /* Expand a call EXP to the memcpy builtin.
3358 Return NULL_RTX if we failed, the caller should emit a normal call,
3359 otherwise try to get the result in TARGET, if convenient (and in
3360 mode MODE if that's convenient). */
3361
3362 static rtx
3363 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3364 {
3365 tree fndecl = get_callee_fndecl (exp);
3366
3367 if (!validate_arglist (exp,
3368 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3369 return NULL_RTX;
3370 else
3371 {
3372 tree dest = CALL_EXPR_ARG (exp, 0);
3373 tree src = CALL_EXPR_ARG (exp, 1);
3374 tree len = CALL_EXPR_ARG (exp, 2);
3375 const char *src_str;
3376 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3377 unsigned int dest_align
3378 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3379 rtx dest_mem, src_mem, dest_addr, len_rtx;
3380 tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
3381 dest, src, len,
3382 TREE_TYPE (TREE_TYPE (fndecl)),
3383 false, /*endp=*/0);
3384 HOST_WIDE_INT expected_size = -1;
3385 unsigned int expected_align = 0;
3386 tree_ann_common_t ann;
3387
3388 if (result)
3389 {
3390 while (TREE_CODE (result) == COMPOUND_EXPR)
3391 {
3392 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3393 EXPAND_NORMAL);
3394 result = TREE_OPERAND (result, 1);
3395 }
3396 return expand_expr (result, target, mode, EXPAND_NORMAL);
3397 }
3398
3399 /* If DEST is not a pointer type, call the normal function. */
3400 if (dest_align == 0)
3401 return NULL_RTX;
3402
3403 /* If either SRC is not a pointer type, don't do this
3404 operation in-line. */
3405 if (src_align == 0)
3406 return NULL_RTX;
3407
3408 ann = tree_common_ann (exp);
3409 if (ann)
3410 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3411
3412 if (expected_align < dest_align)
3413 expected_align = dest_align;
3414 dest_mem = get_memory_rtx (dest, len);
3415 set_mem_align (dest_mem, dest_align);
3416 len_rtx = expand_normal (len);
3417 src_str = c_getstr (src);
3418
3419 /* If SRC is a string constant and block move would be done
3420 by pieces, we can avoid loading the string from memory
3421 and only stored the computed constants. */
3422 if (src_str
3423 && CONST_INT_P (len_rtx)
3424 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3425 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3426 CONST_CAST (char *, src_str),
3427 dest_align, false))
3428 {
3429 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3430 builtin_memcpy_read_str,
3431 CONST_CAST (char *, src_str),
3432 dest_align, false, 0);
3433 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3434 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3435 return dest_mem;
3436 }
3437
3438 src_mem = get_memory_rtx (src, len);
3439 set_mem_align (src_mem, src_align);
3440
3441 /* Copy word part most expediently. */
3442 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3443 CALL_EXPR_TAILCALL (exp)
3444 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3445 expected_align, expected_size);
3446
3447 if (dest_addr == 0)
3448 {
3449 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3450 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3451 }
3452 return dest_addr;
3453 }
3454 }
3455
3456 /* Expand a call EXP to the mempcpy builtin.
3457 Return NULL_RTX if we failed; the caller should emit a normal call,
3458 otherwise try to get the result in TARGET, if convenient (and in
3459 mode MODE if that's convenient). If ENDP is 0 return the
3460 destination pointer, if ENDP is 1 return the end pointer ala
3461 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3462 stpcpy. */
3463
3464 static rtx
3465 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3466 {
3467 if (!validate_arglist (exp,
3468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3469 return NULL_RTX;
3470 else
3471 {
3472 tree dest = CALL_EXPR_ARG (exp, 0);
3473 tree src = CALL_EXPR_ARG (exp, 1);
3474 tree len = CALL_EXPR_ARG (exp, 2);
3475 return expand_builtin_mempcpy_args (dest, src, len,
3476 TREE_TYPE (exp),
3477 target, mode, /*endp=*/ 1);
3478 }
3479 }
3480
3481 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3482 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 TYPE is the return type of the call. The other arguments and return value
3485 are the same as for expand_builtin_mempcpy. */
3486
3487 static rtx
3488 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3489 rtx target, enum machine_mode mode, int endp)
3490 {
3491 /* If return value is ignored, transform mempcpy into memcpy. */
3492 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3493 {
3494 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3495 tree result = build_call_expr (fn, 3, dest, src, len);
3496
3497 while (TREE_CODE (result) == COMPOUND_EXPR)
3498 {
3499 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3500 EXPAND_NORMAL);
3501 result = TREE_OPERAND (result, 1);
3502 }
3503 return expand_expr (result, target, mode, EXPAND_NORMAL);
3504 }
3505 else
3506 {
3507 const char *src_str;
3508 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3509 unsigned int dest_align
3510 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3511 rtx dest_mem, src_mem, len_rtx;
3512 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3513 dest, src, len, type, false, endp);
3514
3515 if (result)
3516 {
3517 while (TREE_CODE (result) == COMPOUND_EXPR)
3518 {
3519 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3520 EXPAND_NORMAL);
3521 result = TREE_OPERAND (result, 1);
3522 }
3523 return expand_expr (result, target, mode, EXPAND_NORMAL);
3524 }
3525
3526 /* If either SRC or DEST is not a pointer type, don't do this
3527 operation in-line. */
3528 if (dest_align == 0 || src_align == 0)
3529 return NULL_RTX;
3530
3531 /* If LEN is not constant, call the normal function. */
3532 if (! host_integerp (len, 1))
3533 return NULL_RTX;
3534
3535 len_rtx = expand_normal (len);
3536 src_str = c_getstr (src);
3537
3538 /* If SRC is a string constant and block move would be done
3539 by pieces, we can avoid loading the string from memory
3540 and only stored the computed constants. */
3541 if (src_str
3542 && CONST_INT_P (len_rtx)
3543 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3544 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3545 CONST_CAST (char *, src_str),
3546 dest_align, false))
3547 {
3548 dest_mem = get_memory_rtx (dest, len);
3549 set_mem_align (dest_mem, dest_align);
3550 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3551 builtin_memcpy_read_str,
3552 CONST_CAST (char *, src_str),
3553 dest_align, false, endp);
3554 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3555 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3556 return dest_mem;
3557 }
3558
3559 if (CONST_INT_P (len_rtx)
3560 && can_move_by_pieces (INTVAL (len_rtx),
3561 MIN (dest_align, src_align)))
3562 {
3563 dest_mem = get_memory_rtx (dest, len);
3564 set_mem_align (dest_mem, dest_align);
3565 src_mem = get_memory_rtx (src, len);
3566 set_mem_align (src_mem, src_align);
3567 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3568 MIN (dest_align, src_align), endp);
3569 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3570 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3571 return dest_mem;
3572 }
3573
3574 return NULL_RTX;
3575 }
3576 }
3577
3578 /* Expand expression EXP, which is a call to the memmove builtin. Return
3579 NULL_RTX if we failed; the caller should emit a normal call. */
3580
3581 static rtx
3582 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3583 {
3584 if (!validate_arglist (exp,
3585 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3586 return NULL_RTX;
3587 else
3588 {
3589 tree dest = CALL_EXPR_ARG (exp, 0);
3590 tree src = CALL_EXPR_ARG (exp, 1);
3591 tree len = CALL_EXPR_ARG (exp, 2);
3592 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3593 target, mode, ignore);
3594 }
3595 }
3596
3597 /* Helper function to do the actual work for expand_builtin_memmove. The
3598 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3599 so that this can also be called without constructing an actual CALL_EXPR.
3600 TYPE is the return type of the call. The other arguments and return value
3601 are the same as for expand_builtin_memmove. */
3602
3603 static rtx
3604 expand_builtin_memmove_args (tree dest, tree src, tree len,
3605 tree type, rtx target, enum machine_mode mode,
3606 int ignore)
3607 {
3608 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3609 dest, src, len, type, ignore, /*endp=*/3);
3610
3611 if (result)
3612 {
3613 STRIP_TYPE_NOPS (result);
3614 while (TREE_CODE (result) == COMPOUND_EXPR)
3615 {
3616 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3617 EXPAND_NORMAL);
3618 result = TREE_OPERAND (result, 1);
3619 }
3620 return expand_expr (result, target, mode, EXPAND_NORMAL);
3621 }
3622
3623 /* Otherwise, call the normal function. */
3624 return NULL_RTX;
3625 }
3626
3627 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3628 NULL_RTX if we failed the caller should emit a normal call. */
3629
3630 static rtx
3631 expand_builtin_bcopy (tree exp, int ignore)
3632 {
3633 tree type = TREE_TYPE (exp);
3634 tree src, dest, size;
3635 location_t loc = EXPR_LOCATION (exp);
3636
3637 if (!validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
3640
3641 src = CALL_EXPR_ARG (exp, 0);
3642 dest = CALL_EXPR_ARG (exp, 1);
3643 size = CALL_EXPR_ARG (exp, 2);
3644
3645 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3646 This is done this way so that if it isn't expanded inline, we fall
3647 back to calling bcopy instead of memmove. */
3648 return expand_builtin_memmove_args (dest, src,
3649 fold_convert_loc (loc, sizetype, size),
3650 type, const0_rtx, VOIDmode,
3651 ignore);
3652 }
3653
3654 #ifndef HAVE_movstr
3655 # define HAVE_movstr 0
3656 # define CODE_FOR_movstr CODE_FOR_nothing
3657 #endif
3658
3659 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3660 we failed, the caller should emit a normal call, otherwise try to
3661 get the result in TARGET, if convenient. If ENDP is 0 return the
3662 destination pointer, if ENDP is 1 return the end pointer ala
3663 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3664 stpcpy. */
3665
3666 static rtx
3667 expand_movstr (tree dest, tree src, rtx target, int endp)
3668 {
3669 rtx end;
3670 rtx dest_mem;
3671 rtx src_mem;
3672 rtx insn;
3673 const struct insn_data * data;
3674
3675 if (!HAVE_movstr)
3676 return NULL_RTX;
3677
3678 dest_mem = get_memory_rtx (dest, NULL);
3679 src_mem = get_memory_rtx (src, NULL);
3680 if (!endp)
3681 {
3682 target = force_reg (Pmode, XEXP (dest_mem, 0));
3683 dest_mem = replace_equiv_address (dest_mem, target);
3684 end = gen_reg_rtx (Pmode);
3685 }
3686 else
3687 {
3688 if (target == 0 || target == const0_rtx)
3689 {
3690 end = gen_reg_rtx (Pmode);
3691 if (target == 0)
3692 target = end;
3693 }
3694 else
3695 end = target;
3696 }
3697
3698 data = insn_data + CODE_FOR_movstr;
3699
3700 if (data->operand[0].mode != VOIDmode)
3701 end = gen_lowpart (data->operand[0].mode, end);
3702
3703 insn = data->genfun (end, dest_mem, src_mem);
3704
3705 gcc_assert (insn);
3706
3707 emit_insn (insn);
3708
3709 /* movstr is supposed to set end to the address of the NUL
3710 terminator. If the caller requested a mempcpy-like return value,
3711 adjust it. */
3712 if (endp == 1 && target != const0_rtx)
3713 {
3714 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3715 emit_move_insn (target, force_operand (tem, NULL_RTX));
3716 }
3717
3718 return target;
3719 }
3720
3721 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3722 NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3725
3726 static rtx
3727 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3728 {
3729 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3730 {
3731 tree dest = CALL_EXPR_ARG (exp, 0);
3732 tree src = CALL_EXPR_ARG (exp, 1);
3733 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3734 }
3735 return NULL_RTX;
3736 }
3737
3738 /* Helper function to do the actual work for expand_builtin_strcpy. The
3739 arguments to the builtin_strcpy call DEST and SRC are broken out
3740 so that this can also be called without constructing an actual CALL_EXPR.
3741 The other arguments and return value are the same as for
3742 expand_builtin_strcpy. */
3743
3744 static rtx
3745 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3746 rtx target, enum machine_mode mode)
3747 {
3748 tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
3749 fndecl, dest, src, 0);
3750 if (result)
3751 return expand_expr (result, target, mode, EXPAND_NORMAL);
3752 return expand_movstr (dest, src, target, /*endp=*/0);
3753
3754 }
3755
3756 /* Expand a call EXP to the stpcpy builtin.
3757 Return NULL_RTX if we failed the caller should emit a normal call,
3758 otherwise try to get the result in TARGET, if convenient (and in
3759 mode MODE if that's convenient). */
3760
3761 static rtx
3762 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3763 {
3764 tree dst, src;
3765 location_t loc = EXPR_LOCATION (exp);
3766
3767 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
3769
3770 dst = CALL_EXPR_ARG (exp, 0);
3771 src = CALL_EXPR_ARG (exp, 1);
3772
3773 /* If return value is ignored, transform stpcpy into strcpy. */
3774 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3775 {
3776 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3777 tree result = build_call_expr (fn, 2, dst, src);
3778
3779 STRIP_NOPS (result);
3780 while (TREE_CODE (result) == COMPOUND_EXPR)
3781 {
3782 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3783 EXPAND_NORMAL);
3784 result = TREE_OPERAND (result, 1);
3785 }
3786 return expand_expr (result, target, mode, EXPAND_NORMAL);
3787 }
3788 else
3789 {
3790 tree len, lenp1;
3791 rtx ret;
3792
3793 /* Ensure we get an actual string whose length can be evaluated at
3794 compile-time, not an expression containing a string. This is
3795 because the latter will potentially produce pessimized code
3796 when used to produce the return value. */
3797 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3798 return expand_movstr (dst, src, target, /*endp=*/2);
3799
3800 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3801 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3802 target, mode, /*endp=*/2);
3803
3804 if (ret)
3805 return ret;
3806
3807 if (TREE_CODE (len) == INTEGER_CST)
3808 {
3809 rtx len_rtx = expand_normal (len);
3810
3811 if (CONST_INT_P (len_rtx))
3812 {
3813 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3814 dst, src, target, mode);
3815
3816 if (ret)
3817 {
3818 if (! target)
3819 {
3820 if (mode != VOIDmode)
3821 target = gen_reg_rtx (mode);
3822 else
3823 target = gen_reg_rtx (GET_MODE (ret));
3824 }
3825 if (GET_MODE (target) != GET_MODE (ret))
3826 ret = gen_lowpart (GET_MODE (target), ret);
3827
3828 ret = plus_constant (ret, INTVAL (len_rtx));
3829 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3830 gcc_assert (ret);
3831
3832 return target;
3833 }
3834 }
3835 }
3836
3837 return expand_movstr (dst, src, target, /*endp=*/2);
3838 }
3839 }
3840
3841 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3842 bytes from constant string DATA + OFFSET and return it as target
3843 constant. */
3844
3845 rtx
3846 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3847 enum machine_mode mode)
3848 {
3849 const char *str = (const char *) data;
3850
3851 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3852 return const0_rtx;
3853
3854 return c_readstr (str + offset, mode);
3855 }
3856
3857 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call. */
3859
3860 static rtx
3861 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3862 {
3863 tree fndecl = get_callee_fndecl (exp);
3864 location_t loc = EXPR_LOCATION (exp);
3865
3866 if (validate_arglist (exp,
3867 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3868 {
3869 tree dest = CALL_EXPR_ARG (exp, 0);
3870 tree src = CALL_EXPR_ARG (exp, 1);
3871 tree len = CALL_EXPR_ARG (exp, 2);
3872 tree slen = c_strlen (src, 1);
3873 tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
3874 fndecl, dest, src, len, slen);
3875
3876 if (result)
3877 {
3878 while (TREE_CODE (result) == COMPOUND_EXPR)
3879 {
3880 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3881 EXPAND_NORMAL);
3882 result = TREE_OPERAND (result, 1);
3883 }
3884 return expand_expr (result, target, mode, EXPAND_NORMAL);
3885 }
3886
3887 /* We must be passed a constant len and src parameter. */
3888 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3889 return NULL_RTX;
3890
3891 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3892
3893 /* We're required to pad with trailing zeros if the requested
3894 len is greater than strlen(s2)+1. In that case try to
3895 use store_by_pieces, if it fails, punt. */
3896 if (tree_int_cst_lt (slen, len))
3897 {
3898 unsigned int dest_align
3899 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3900 const char *p = c_getstr (src);
3901 rtx dest_mem;
3902
3903 if (!p || dest_align == 0 || !host_integerp (len, 1)
3904 || !can_store_by_pieces (tree_low_cst (len, 1),
3905 builtin_strncpy_read_str,
3906 CONST_CAST (char *, p),
3907 dest_align, false))
3908 return NULL_RTX;
3909
3910 dest_mem = get_memory_rtx (dest, len);
3911 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3912 builtin_strncpy_read_str,
3913 CONST_CAST (char *, p), dest_align, false, 0);
3914 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3915 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3916 return dest_mem;
3917 }
3918 }
3919 return NULL_RTX;
3920 }
3921
3922 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3923 bytes from constant string DATA + OFFSET and return it as target
3924 constant. */
3925
3926 rtx
3927 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3928 enum machine_mode mode)
3929 {
3930 const char *c = (const char *) data;
3931 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3932
3933 memset (p, *c, GET_MODE_SIZE (mode));
3934
3935 return c_readstr (p, mode);
3936 }
3937
3938 /* Callback routine for store_by_pieces. Return the RTL of a register
3939 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3940 char value given in the RTL register data. For example, if mode is
3941 4 bytes wide, return the RTL for 0x01010101*data. */
3942
3943 static rtx
3944 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3945 enum machine_mode mode)
3946 {
3947 rtx target, coeff;
3948 size_t size;
3949 char *p;
3950
3951 size = GET_MODE_SIZE (mode);
3952 if (size == 1)
3953 return (rtx) data;
3954
3955 p = XALLOCAVEC (char, size);
3956 memset (p, 1, size);
3957 coeff = c_readstr (p, mode);
3958
3959 target = convert_to_mode (mode, (rtx) data, 1);
3960 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3961 return force_reg (mode, target);
3962 }
3963
3964 /* Expand expression EXP, which is a call to the memset builtin. Return
3965 NULL_RTX if we failed the caller should emit a normal call, otherwise
3966 try to get the result in TARGET, if convenient (and in mode MODE if that's
3967 convenient). */
3968
3969 static rtx
3970 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3971 {
3972 if (!validate_arglist (exp,
3973 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3974 return NULL_RTX;
3975 else
3976 {
3977 tree dest = CALL_EXPR_ARG (exp, 0);
3978 tree val = CALL_EXPR_ARG (exp, 1);
3979 tree len = CALL_EXPR_ARG (exp, 2);
3980 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3981 }
3982 }
3983
3984 /* Helper function to do the actual work for expand_builtin_memset. The
3985 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3986 so that this can also be called without constructing an actual CALL_EXPR.
3987 The other arguments and return value are the same as for
3988 expand_builtin_memset. */
3989
3990 static rtx
3991 expand_builtin_memset_args (tree dest, tree val, tree len,
3992 rtx target, enum machine_mode mode, tree orig_exp)
3993 {
3994 tree fndecl, fn;
3995 enum built_in_function fcode;
3996 char c;
3997 unsigned int dest_align;
3998 rtx dest_mem, dest_addr, len_rtx;
3999 HOST_WIDE_INT expected_size = -1;
4000 unsigned int expected_align = 0;
4001 tree_ann_common_t ann;
4002
4003 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
4004
4005 /* If DEST is not a pointer type, don't do this operation in-line. */
4006 if (dest_align == 0)
4007 return NULL_RTX;
4008
4009 ann = tree_common_ann (orig_exp);
4010 if (ann)
4011 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
4012
4013 if (expected_align < dest_align)
4014 expected_align = dest_align;
4015
4016 /* If the LEN parameter is zero, return DEST. */
4017 if (integer_zerop (len))
4018 {
4019 /* Evaluate and ignore VAL in case it has side-effects. */
4020 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4021 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4022 }
4023
4024 /* Stabilize the arguments in case we fail. */
4025 dest = builtin_save_expr (dest);
4026 val = builtin_save_expr (val);
4027 len = builtin_save_expr (len);
4028
4029 len_rtx = expand_normal (len);
4030 dest_mem = get_memory_rtx (dest, len);
4031
4032 if (TREE_CODE (val) != INTEGER_CST)
4033 {
4034 rtx val_rtx;
4035
4036 val_rtx = expand_normal (val);
4037 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4038 val_rtx, 0);
4039
4040 /* Assume that we can memset by pieces if we can store
4041 * the coefficients by pieces (in the required modes).
4042 * We can't pass builtin_memset_gen_str as that emits RTL. */
4043 c = 1;
4044 if (host_integerp (len, 1)
4045 && can_store_by_pieces (tree_low_cst (len, 1),
4046 builtin_memset_read_str, &c, dest_align,
4047 true))
4048 {
4049 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4050 val_rtx);
4051 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4052 builtin_memset_gen_str, val_rtx, dest_align,
4053 true, 0);
4054 }
4055 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4056 dest_align, expected_align,
4057 expected_size))
4058 goto do_libcall;
4059
4060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4061 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4062 return dest_mem;
4063 }
4064
4065 if (target_char_cast (val, &c))
4066 goto do_libcall;
4067
4068 if (c)
4069 {
4070 if (host_integerp (len, 1)
4071 && can_store_by_pieces (tree_low_cst (len, 1),
4072 builtin_memset_read_str, &c, dest_align,
4073 true))
4074 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4075 builtin_memset_read_str, &c, dest_align, true, 0);
4076 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4077 dest_align, expected_align,
4078 expected_size))
4079 goto do_libcall;
4080
4081 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4082 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4083 return dest_mem;
4084 }
4085
4086 set_mem_align (dest_mem, dest_align);
4087 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4088 CALL_EXPR_TAILCALL (orig_exp)
4089 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4090 expected_align, expected_size);
4091
4092 if (dest_addr == 0)
4093 {
4094 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4095 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4096 }
4097
4098 return dest_addr;
4099
4100 do_libcall:
4101 fndecl = get_callee_fndecl (orig_exp);
4102 fcode = DECL_FUNCTION_CODE (fndecl);
4103 if (fcode == BUILT_IN_MEMSET)
4104 fn = build_call_expr (fndecl, 3, dest, val, len);
4105 else if (fcode == BUILT_IN_BZERO)
4106 fn = build_call_expr (fndecl, 2, dest, len);
4107 else
4108 gcc_unreachable ();
4109 if (TREE_CODE (fn) == CALL_EXPR)
4110 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4111 return expand_call (fn, target, target == const0_rtx);
4112 }
4113
4114 /* Expand expression EXP, which is a call to the bzero builtin. Return
4115 NULL_RTX if we failed the caller should emit a normal call. */
4116
4117 static rtx
4118 expand_builtin_bzero (tree exp)
4119 {
4120 tree dest, size;
4121 location_t loc = EXPR_LOCATION (exp);
4122
4123 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4124 return NULL_RTX;
4125
4126 dest = CALL_EXPR_ARG (exp, 0);
4127 size = CALL_EXPR_ARG (exp, 1);
4128
4129 /* New argument list transforming bzero(ptr x, int y) to
4130 memset(ptr x, int 0, size_t y). This is done this way
4131 so that if it isn't expanded inline, we fallback to
4132 calling bzero instead of memset. */
4133
4134 return expand_builtin_memset_args (dest, integer_zero_node,
4135 fold_convert_loc (loc, sizetype, size),
4136 const0_rtx, VOIDmode, exp);
4137 }
4138
4139 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4140 caller should emit a normal call, otherwise try to get the result
4141 in TARGET, if convenient (and in mode MODE if that's convenient). */
4142
4143 static rtx
4144 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4145 {
4146 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4147 INTEGER_TYPE, VOID_TYPE))
4148 {
4149 tree type = TREE_TYPE (exp);
4150 tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
4151 CALL_EXPR_ARG (exp, 0),
4152 CALL_EXPR_ARG (exp, 1),
4153 CALL_EXPR_ARG (exp, 2), type);
4154 if (result)
4155 return expand_expr (result, target, mode, EXPAND_NORMAL);
4156 }
4157 return NULL_RTX;
4158 }
4159
4160 /* Expand expression EXP, which is a call to the memcmp built-in function.
4161 Return NULL_RTX if we failed and the
4162 caller should emit a normal call, otherwise try to get the result in
4163 TARGET, if convenient (and in mode MODE, if that's convenient). */
4164
4165 static rtx
4166 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4167 {
4168 location_t loc = EXPR_LOCATION (exp);
4169
4170 if (!validate_arglist (exp,
4171 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4172 return NULL_RTX;
4173 else
4174 {
4175 tree result = fold_builtin_memcmp (loc,
4176 CALL_EXPR_ARG (exp, 0),
4177 CALL_EXPR_ARG (exp, 1),
4178 CALL_EXPR_ARG (exp, 2));
4179 if (result)
4180 return expand_expr (result, target, mode, EXPAND_NORMAL);
4181 }
4182
4183 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4184 {
4185 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4186 rtx result;
4187 rtx insn;
4188 tree arg1 = CALL_EXPR_ARG (exp, 0);
4189 tree arg2 = CALL_EXPR_ARG (exp, 1);
4190 tree len = CALL_EXPR_ARG (exp, 2);
4191
4192 int arg1_align
4193 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4194 int arg2_align
4195 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4196 enum machine_mode insn_mode;
4197
4198 #ifdef HAVE_cmpmemsi
4199 if (HAVE_cmpmemsi)
4200 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4201 else
4202 #endif
4203 #ifdef HAVE_cmpstrnsi
4204 if (HAVE_cmpstrnsi)
4205 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4206 else
4207 #endif
4208 return NULL_RTX;
4209
4210 /* If we don't have POINTER_TYPE, call the function. */
4211 if (arg1_align == 0 || arg2_align == 0)
4212 return NULL_RTX;
4213
4214 /* Make a place to write the result of the instruction. */
4215 result = target;
4216 if (! (result != 0
4217 && REG_P (result) && GET_MODE (result) == insn_mode
4218 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4219 result = gen_reg_rtx (insn_mode);
4220
4221 arg1_rtx = get_memory_rtx (arg1, len);
4222 arg2_rtx = get_memory_rtx (arg2, len);
4223 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4224
4225 /* Set MEM_SIZE as appropriate. */
4226 if (CONST_INT_P (arg3_rtx))
4227 {
4228 set_mem_size (arg1_rtx, arg3_rtx);
4229 set_mem_size (arg2_rtx, arg3_rtx);
4230 }
4231
4232 #ifdef HAVE_cmpmemsi
4233 if (HAVE_cmpmemsi)
4234 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4235 GEN_INT (MIN (arg1_align, arg2_align)));
4236 else
4237 #endif
4238 #ifdef HAVE_cmpstrnsi
4239 if (HAVE_cmpstrnsi)
4240 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4241 GEN_INT (MIN (arg1_align, arg2_align)));
4242 else
4243 #endif
4244 gcc_unreachable ();
4245
4246 if (insn)
4247 emit_insn (insn);
4248 else
4249 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4250 TYPE_MODE (integer_type_node), 3,
4251 XEXP (arg1_rtx, 0), Pmode,
4252 XEXP (arg2_rtx, 0), Pmode,
4253 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4254 TYPE_UNSIGNED (sizetype)),
4255 TYPE_MODE (sizetype));
4256
4257 /* Return the value in the proper mode for this function. */
4258 mode = TYPE_MODE (TREE_TYPE (exp));
4259 if (GET_MODE (result) == mode)
4260 return result;
4261 else if (target != 0)
4262 {
4263 convert_move (target, result, 0);
4264 return target;
4265 }
4266 else
4267 return convert_to_mode (mode, result, 0);
4268 }
4269 #endif
4270
4271 return NULL_RTX;
4272 }
4273
4274 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4275 if we failed the caller should emit a normal call, otherwise try to get
4276 the result in TARGET, if convenient. */
4277
4278 static rtx
4279 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4280 {
4281 location_t loc = EXPR_LOCATION (exp);
4282
4283 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4284 return NULL_RTX;
4285 else
4286 {
4287 tree result = fold_builtin_strcmp (loc,
4288 CALL_EXPR_ARG (exp, 0),
4289 CALL_EXPR_ARG (exp, 1));
4290 if (result)
4291 return expand_expr (result, target, mode, EXPAND_NORMAL);
4292 }
4293
4294 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4295 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4296 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4297 {
4298 rtx arg1_rtx, arg2_rtx;
4299 rtx result, insn = NULL_RTX;
4300 tree fndecl, fn;
4301 tree arg1 = CALL_EXPR_ARG (exp, 0);
4302 tree arg2 = CALL_EXPR_ARG (exp, 1);
4303
4304 int arg1_align
4305 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4306 int arg2_align
4307 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4308
4309 /* If we don't have POINTER_TYPE, call the function. */
4310 if (arg1_align == 0 || arg2_align == 0)
4311 return NULL_RTX;
4312
4313 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4314 arg1 = builtin_save_expr (arg1);
4315 arg2 = builtin_save_expr (arg2);
4316
4317 arg1_rtx = get_memory_rtx (arg1, NULL);
4318 arg2_rtx = get_memory_rtx (arg2, NULL);
4319
4320 #ifdef HAVE_cmpstrsi
4321 /* Try to call cmpstrsi. */
4322 if (HAVE_cmpstrsi)
4323 {
4324 enum machine_mode insn_mode
4325 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4326
4327 /* Make a place to write the result of the instruction. */
4328 result = target;
4329 if (! (result != 0
4330 && REG_P (result) && GET_MODE (result) == insn_mode
4331 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4332 result = gen_reg_rtx (insn_mode);
4333
4334 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4335 GEN_INT (MIN (arg1_align, arg2_align)));
4336 }
4337 #endif
4338 #ifdef HAVE_cmpstrnsi
4339 /* Try to determine at least one length and call cmpstrnsi. */
4340 if (!insn && HAVE_cmpstrnsi)
4341 {
4342 tree len;
4343 rtx arg3_rtx;
4344
4345 enum machine_mode insn_mode
4346 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4347 tree len1 = c_strlen (arg1, 1);
4348 tree len2 = c_strlen (arg2, 1);
4349
4350 if (len1)
4351 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4352 if (len2)
4353 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4354
4355 /* If we don't have a constant length for the first, use the length
4356 of the second, if we know it. We don't require a constant for
4357 this case; some cost analysis could be done if both are available
4358 but neither is constant. For now, assume they're equally cheap,
4359 unless one has side effects. If both strings have constant lengths,
4360 use the smaller. */
4361
4362 if (!len1)
4363 len = len2;
4364 else if (!len2)
4365 len = len1;
4366 else if (TREE_SIDE_EFFECTS (len1))
4367 len = len2;
4368 else if (TREE_SIDE_EFFECTS (len2))
4369 len = len1;
4370 else if (TREE_CODE (len1) != INTEGER_CST)
4371 len = len2;
4372 else if (TREE_CODE (len2) != INTEGER_CST)
4373 len = len1;
4374 else if (tree_int_cst_lt (len1, len2))
4375 len = len1;
4376 else
4377 len = len2;
4378
4379 /* If both arguments have side effects, we cannot optimize. */
4380 if (!len || TREE_SIDE_EFFECTS (len))
4381 goto do_libcall;
4382
4383 arg3_rtx = expand_normal (len);
4384
4385 /* Make a place to write the result of the instruction. */
4386 result = target;
4387 if (! (result != 0
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4391
4392 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4393 GEN_INT (MIN (arg1_align, arg2_align)));
4394 }
4395 #endif
4396
4397 if (insn)
4398 {
4399 emit_insn (insn);
4400
4401 /* Return the value in the proper mode for this function. */
4402 mode = TYPE_MODE (TREE_TYPE (exp));
4403 if (GET_MODE (result) == mode)
4404 return result;
4405 if (target == 0)
4406 return convert_to_mode (mode, result, 0);
4407 convert_move (target, result, 0);
4408 return target;
4409 }
4410
4411 /* Expand the library call ourselves using a stabilized argument
4412 list to avoid re-evaluating the function's arguments twice. */
4413 #ifdef HAVE_cmpstrnsi
4414 do_libcall:
4415 #endif
4416 fndecl = get_callee_fndecl (exp);
4417 fn = build_call_expr (fndecl, 2, arg1, arg2);
4418 if (TREE_CODE (fn) == CALL_EXPR)
4419 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4420 return expand_call (fn, target, target == const0_rtx);
4421 }
4422 #endif
4423 return NULL_RTX;
4424 }
4425
4426 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4427 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4428 the result in TARGET, if convenient. */
4429
4430 static rtx
4431 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4432 {
4433 location_t loc = EXPR_LOCATION (exp);
4434
4435 if (!validate_arglist (exp,
4436 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4437 return NULL_RTX;
4438 else
4439 {
4440 tree result = fold_builtin_strncmp (loc,
4441 CALL_EXPR_ARG (exp, 0),
4442 CALL_EXPR_ARG (exp, 1),
4443 CALL_EXPR_ARG (exp, 2));
4444 if (result)
4445 return expand_expr (result, target, mode, EXPAND_NORMAL);
4446 }
4447
4448 /* If c_strlen can determine an expression for one of the string
4449 lengths, and it doesn't have side effects, then emit cmpstrnsi
4450 using length MIN(strlen(string)+1, arg3). */
4451 #ifdef HAVE_cmpstrnsi
4452 if (HAVE_cmpstrnsi)
4453 {
4454 tree len, len1, len2;
4455 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4456 rtx result, insn;
4457 tree fndecl, fn;
4458 tree arg1 = CALL_EXPR_ARG (exp, 0);
4459 tree arg2 = CALL_EXPR_ARG (exp, 1);
4460 tree arg3 = CALL_EXPR_ARG (exp, 2);
4461
4462 int arg1_align
4463 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4464 int arg2_align
4465 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4466 enum machine_mode insn_mode
4467 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4468
4469 len1 = c_strlen (arg1, 1);
4470 len2 = c_strlen (arg2, 1);
4471
4472 if (len1)
4473 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4474 if (len2)
4475 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4476
4477 /* If we don't have a constant length for the first, use the length
4478 of the second, if we know it. We don't require a constant for
4479 this case; some cost analysis could be done if both are available
4480 but neither is constant. For now, assume they're equally cheap,
4481 unless one has side effects. If both strings have constant lengths,
4482 use the smaller. */
4483
4484 if (!len1)
4485 len = len2;
4486 else if (!len2)
4487 len = len1;
4488 else if (TREE_SIDE_EFFECTS (len1))
4489 len = len2;
4490 else if (TREE_SIDE_EFFECTS (len2))
4491 len = len1;
4492 else if (TREE_CODE (len1) != INTEGER_CST)
4493 len = len2;
4494 else if (TREE_CODE (len2) != INTEGER_CST)
4495 len = len1;
4496 else if (tree_int_cst_lt (len1, len2))
4497 len = len1;
4498 else
4499 len = len2;
4500
4501 /* If both arguments have side effects, we cannot optimize. */
4502 if (!len || TREE_SIDE_EFFECTS (len))
4503 return NULL_RTX;
4504
4505 /* The actual new length parameter is MIN(len,arg3). */
4506 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4507 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4508
4509 /* If we don't have POINTER_TYPE, call the function. */
4510 if (arg1_align == 0 || arg2_align == 0)
4511 return NULL_RTX;
4512
4513 /* Make a place to write the result of the instruction. */
4514 result = target;
4515 if (! (result != 0
4516 && REG_P (result) && GET_MODE (result) == insn_mode
4517 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4518 result = gen_reg_rtx (insn_mode);
4519
4520 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4521 arg1 = builtin_save_expr (arg1);
4522 arg2 = builtin_save_expr (arg2);
4523 len = builtin_save_expr (len);
4524
4525 arg1_rtx = get_memory_rtx (arg1, len);
4526 arg2_rtx = get_memory_rtx (arg2, len);
4527 arg3_rtx = expand_normal (len);
4528 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4529 GEN_INT (MIN (arg1_align, arg2_align)));
4530 if (insn)
4531 {
4532 emit_insn (insn);
4533
4534 /* Return the value in the proper mode for this function. */
4535 mode = TYPE_MODE (TREE_TYPE (exp));
4536 if (GET_MODE (result) == mode)
4537 return result;
4538 if (target == 0)
4539 return convert_to_mode (mode, result, 0);
4540 convert_move (target, result, 0);
4541 return target;
4542 }
4543
4544 /* Expand the library call ourselves using a stabilized argument
4545 list to avoid re-evaluating the function's arguments twice. */
4546 fndecl = get_callee_fndecl (exp);
4547 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4548 if (TREE_CODE (fn) == CALL_EXPR)
4549 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4550 return expand_call (fn, target, target == const0_rtx);
4551 }
4552 #endif
4553 return NULL_RTX;
4554 }
4555
4556 /* Expand expression EXP, which is a call to the strcat builtin.
4557 Return NULL_RTX if we failed the caller should emit a normal call,
4558 otherwise try to get the result in TARGET, if convenient. */
4559
4560 static rtx
4561 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4562 {
4563 location_t loc = EXPR_LOCATION (exp);
4564
4565 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4566 return NULL_RTX;
4567 else
4568 {
4569 tree dst = CALL_EXPR_ARG (exp, 0);
4570 tree src = CALL_EXPR_ARG (exp, 1);
4571 const char *p = c_getstr (src);
4572
4573 /* If the string length is zero, return the dst parameter. */
4574 if (p && *p == '\0')
4575 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4576
4577 if (optimize_insn_for_speed_p ())
4578 {
4579 /* See if we can store by pieces into (dst + strlen(dst)). */
4580 tree newsrc, newdst,
4581 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4582 rtx insns;
4583
4584 /* Stabilize the argument list. */
4585 newsrc = builtin_save_expr (src);
4586 dst = builtin_save_expr (dst);
4587
4588 start_sequence ();
4589
4590 /* Create strlen (dst). */
4591 newdst = build_call_expr (strlen_fn, 1, dst);
4592 /* Create (dst p+ strlen (dst)). */
4593
4594 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
4595 TREE_TYPE (dst), dst, newdst);
4596 newdst = builtin_save_expr (newdst);
4597
4598 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4599 {
4600 end_sequence (); /* Stop sequence. */
4601 return NULL_RTX;
4602 }
4603
4604 /* Output the entire sequence. */
4605 insns = get_insns ();
4606 end_sequence ();
4607 emit_insn (insns);
4608
4609 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4610 }
4611
4612 return NULL_RTX;
4613 }
4614 }
4615
4616 /* Expand expression EXP, which is a call to the strncat builtin.
4617 Return NULL_RTX if we failed the caller should emit a normal call,
4618 otherwise try to get the result in TARGET, if convenient. */
4619
4620 static rtx
4621 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4622 {
4623 if (validate_arglist (exp,
4624 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4625 {
4626 tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
4627 CALL_EXPR_ARG (exp, 0),
4628 CALL_EXPR_ARG (exp, 1),
4629 CALL_EXPR_ARG (exp, 2));
4630 if (result)
4631 return expand_expr (result, target, mode, EXPAND_NORMAL);
4632 }
4633 return NULL_RTX;
4634 }
4635
4636 /* Expand expression EXP, which is a call to the strspn builtin.
4637 Return NULL_RTX if we failed the caller should emit a normal call,
4638 otherwise try to get the result in TARGET, if convenient. */
4639
4640 static rtx
4641 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4642 {
4643 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4644 {
4645 tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
4646 CALL_EXPR_ARG (exp, 0),
4647 CALL_EXPR_ARG (exp, 1));
4648 if (result)
4649 return expand_expr (result, target, mode, EXPAND_NORMAL);
4650 }
4651 return NULL_RTX;
4652 }
4653
4654 /* Expand expression EXP, which is a call to the strcspn builtin.
4655 Return NULL_RTX if we failed the caller should emit a normal call,
4656 otherwise try to get the result in TARGET, if convenient. */
4657
4658 static rtx
4659 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4660 {
4661 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4662 {
4663 tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
4664 CALL_EXPR_ARG (exp, 0),
4665 CALL_EXPR_ARG (exp, 1));
4666 if (result)
4667 return expand_expr (result, target, mode, EXPAND_NORMAL);
4668 }
4669 return NULL_RTX;
4670 }
4671
4672 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4673 if that's convenient. */
4674
4675 rtx
4676 expand_builtin_saveregs (void)
4677 {
4678 rtx val, seq;
4679
4680 /* Don't do __builtin_saveregs more than once in a function.
4681 Save the result of the first call and reuse it. */
4682 if (saveregs_value != 0)
4683 return saveregs_value;
4684
4685 /* When this function is called, it means that registers must be
4686 saved on entry to this function. So we migrate the call to the
4687 first insn of this function. */
4688
4689 start_sequence ();
4690
4691 /* Do whatever the machine needs done in this case. */
4692 val = targetm.calls.expand_builtin_saveregs ();
4693
4694 seq = get_insns ();
4695 end_sequence ();
4696
4697 saveregs_value = val;
4698
4699 /* Put the insns after the NOTE that starts the function. If this
4700 is inside a start_sequence, make the outer-level insn chain current, so
4701 the code is placed at the start of the function. */
4702 push_topmost_sequence ();
4703 emit_insn_after (seq, entry_of_function ());
4704 pop_topmost_sequence ();
4705
4706 return val;
4707 }
4708
4709 /* __builtin_args_info (N) returns word N of the arg space info
4710 for the current function. The number and meanings of words
4711 is controlled by the definition of CUMULATIVE_ARGS. */
4712
4713 static rtx
4714 expand_builtin_args_info (tree exp)
4715 {
4716 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4717 int *word_ptr = (int *) &crtl->args.info;
4718
4719 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4720
4721 if (call_expr_nargs (exp) != 0)
4722 {
4723 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4724 error ("argument of %<__builtin_args_info%> must be constant");
4725 else
4726 {
4727 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4728
4729 if (wordnum < 0 || wordnum >= nwords)
4730 error ("argument of %<__builtin_args_info%> out of range");
4731 else
4732 return GEN_INT (word_ptr[wordnum]);
4733 }
4734 }
4735 else
4736 error ("missing argument in %<__builtin_args_info%>");
4737
4738 return const0_rtx;
4739 }
4740
4741 /* Expand a call to __builtin_next_arg. */
4742
4743 static rtx
4744 expand_builtin_next_arg (void)
4745 {
4746 /* Checking arguments is already done in fold_builtin_next_arg
4747 that must be called before this function. */
4748 return expand_binop (ptr_mode, add_optab,
4749 crtl->args.internal_arg_pointer,
4750 crtl->args.arg_offset_rtx,
4751 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4752 }
4753
4754 /* Make it easier for the backends by protecting the valist argument
4755 from multiple evaluations. */
4756
4757 static tree
4758 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4759 {
4760 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4761
4762 gcc_assert (vatype != NULL_TREE);
4763
4764 if (TREE_CODE (vatype) == ARRAY_TYPE)
4765 {
4766 if (TREE_SIDE_EFFECTS (valist))
4767 valist = save_expr (valist);
4768
4769 /* For this case, the backends will be expecting a pointer to
4770 vatype, but it's possible we've actually been given an array
4771 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4772 So fix it. */
4773 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4774 {
4775 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4776 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4777 }
4778 }
4779 else
4780 {
4781 tree pt;
4782
4783 if (! needs_lvalue)
4784 {
4785 if (! TREE_SIDE_EFFECTS (valist))
4786 return valist;
4787
4788 pt = build_pointer_type (vatype);
4789 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4790 TREE_SIDE_EFFECTS (valist) = 1;
4791 }
4792
4793 if (TREE_SIDE_EFFECTS (valist))
4794 valist = save_expr (valist);
4795 valist = build_fold_indirect_ref_loc (loc, valist);
4796 }
4797
4798 return valist;
4799 }
4800
4801 /* The "standard" definition of va_list is void*. */
4802
4803 tree
4804 std_build_builtin_va_list (void)
4805 {
4806 return ptr_type_node;
4807 }
4808
4809 /* The "standard" abi va_list is va_list_type_node. */
4810
4811 tree
4812 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4813 {
4814 return va_list_type_node;
4815 }
4816
4817 /* The "standard" type of va_list is va_list_type_node. */
4818
4819 tree
4820 std_canonical_va_list_type (tree type)
4821 {
4822 tree wtype, htype;
4823
4824 if (INDIRECT_REF_P (type))
4825 type = TREE_TYPE (type);
4826 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4827 type = TREE_TYPE (type);
4828 wtype = va_list_type_node;
4829 htype = type;
4830 /* Treat structure va_list types. */
4831 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4832 htype = TREE_TYPE (htype);
4833 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4834 {
4835 /* If va_list is an array type, the argument may have decayed
4836 to a pointer type, e.g. by being passed to another function.
4837 In that case, unwrap both types so that we can compare the
4838 underlying records. */
4839 if (TREE_CODE (htype) == ARRAY_TYPE
4840 || POINTER_TYPE_P (htype))
4841 {
4842 wtype = TREE_TYPE (wtype);
4843 htype = TREE_TYPE (htype);
4844 }
4845 }
4846 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4847 return va_list_type_node;
4848
4849 return NULL_TREE;
4850 }
4851
4852 /* The "standard" implementation of va_start: just assign `nextarg' to
4853 the variable. */
4854
4855 void
4856 std_expand_builtin_va_start (tree valist, rtx nextarg)
4857 {
4858 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4859 convert_move (va_r, nextarg, 0);
4860 }
4861
4862 /* Expand EXP, a call to __builtin_va_start. */
4863
4864 static rtx
4865 expand_builtin_va_start (tree exp)
4866 {
4867 rtx nextarg;
4868 tree valist;
4869 location_t loc = EXPR_LOCATION (exp);
4870
4871 if (call_expr_nargs (exp) < 2)
4872 {
4873 error_at (loc, "too few arguments to function %<va_start%>");
4874 return const0_rtx;
4875 }
4876
4877 if (fold_builtin_next_arg (exp, true))
4878 return const0_rtx;
4879
4880 nextarg = expand_builtin_next_arg ();
4881 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4882
4883 if (targetm.expand_builtin_va_start)
4884 targetm.expand_builtin_va_start (valist, nextarg);
4885 else
4886 std_expand_builtin_va_start (valist, nextarg);
4887
4888 return const0_rtx;
4889 }
4890
4891 /* The "standard" implementation of va_arg: read the value from the
4892 current (padded) address and increment by the (padded) size. */
4893
4894 tree
4895 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4896 gimple_seq *post_p)
4897 {
4898 tree addr, t, type_size, rounded_size, valist_tmp;
4899 unsigned HOST_WIDE_INT align, boundary;
4900 bool indirect;
4901
4902 #ifdef ARGS_GROW_DOWNWARD
4903 /* All of the alignment and movement below is for args-grow-up machines.
4904 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4905 implement their own specialized gimplify_va_arg_expr routines. */
4906 gcc_unreachable ();
4907 #endif
4908
4909 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4910 if (indirect)
4911 type = build_pointer_type (type);
4912
4913 align = PARM_BOUNDARY / BITS_PER_UNIT;
4914 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4915
4916 /* When we align parameter on stack for caller, if the parameter
4917 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4918 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4919 here with caller. */
4920 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4921 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4922
4923 boundary /= BITS_PER_UNIT;
4924
4925 /* Hoist the valist value into a temporary for the moment. */
4926 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4927
4928 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4929 requires greater alignment, we must perform dynamic alignment. */
4930 if (boundary > align
4931 && !integer_zerop (TYPE_SIZE (type)))
4932 {
4933 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4934 fold_build2 (POINTER_PLUS_EXPR,
4935 TREE_TYPE (valist),
4936 valist_tmp, size_int (boundary - 1)));
4937 gimplify_and_add (t, pre_p);
4938
4939 t = fold_convert (sizetype, valist_tmp);
4940 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4941 fold_convert (TREE_TYPE (valist),
4942 fold_build2 (BIT_AND_EXPR, sizetype, t,
4943 size_int (-boundary))));
4944 gimplify_and_add (t, pre_p);
4945 }
4946 else
4947 boundary = align;
4948
4949 /* If the actual alignment is less than the alignment of the type,
4950 adjust the type accordingly so that we don't assume strict alignment
4951 when dereferencing the pointer. */
4952 boundary *= BITS_PER_UNIT;
4953 if (boundary < TYPE_ALIGN (type))
4954 {
4955 type = build_variant_type_copy (type);
4956 TYPE_ALIGN (type) = boundary;
4957 }
4958
4959 /* Compute the rounded size of the type. */
4960 type_size = size_in_bytes (type);
4961 rounded_size = round_up (type_size, align);
4962
4963 /* Reduce rounded_size so it's sharable with the postqueue. */
4964 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4965
4966 /* Get AP. */
4967 addr = valist_tmp;
4968 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4969 {
4970 /* Small args are padded downward. */
4971 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4972 rounded_size, size_int (align));
4973 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4974 size_binop (MINUS_EXPR, rounded_size, type_size));
4975 addr = fold_build2 (POINTER_PLUS_EXPR,
4976 TREE_TYPE (addr), addr, t);
4977 }
4978
4979 /* Compute new value for AP. */
4980 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4981 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4982 gimplify_and_add (t, pre_p);
4983
4984 addr = fold_convert (build_pointer_type (type), addr);
4985
4986 if (indirect)
4987 addr = build_va_arg_indirect_ref (addr);
4988
4989 return build_va_arg_indirect_ref (addr);
4990 }
4991
4992 /* Build an indirect-ref expression over the given TREE, which represents a
4993 piece of a va_arg() expansion. */
4994 tree
4995 build_va_arg_indirect_ref (tree addr)
4996 {
4997 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4998
4999 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
5000 mf_mark (addr);
5001
5002 return addr;
5003 }
5004
5005 /* Return a dummy expression of type TYPE in order to keep going after an
5006 error. */
5007
5008 static tree
5009 dummy_object (tree type)
5010 {
5011 tree t = build_int_cst (build_pointer_type (type), 0);
5012 return build1 (INDIRECT_REF, type, t);
5013 }
5014
5015 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
5016 builtin function, but a very special sort of operator. */
5017
5018 enum gimplify_status
5019 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5020 {
5021 tree promoted_type, have_va_type;
5022 tree valist = TREE_OPERAND (*expr_p, 0);
5023 tree type = TREE_TYPE (*expr_p);
5024 tree t;
5025 location_t loc = EXPR_LOCATION (*expr_p);
5026
5027 /* Verify that valist is of the proper type. */
5028 have_va_type = TREE_TYPE (valist);
5029 if (have_va_type == error_mark_node)
5030 return GS_ERROR;
5031 have_va_type = targetm.canonical_va_list_type (have_va_type);
5032
5033 if (have_va_type == NULL_TREE)
5034 {
5035 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5036 return GS_ERROR;
5037 }
5038
5039 /* Generate a diagnostic for requesting data of a type that cannot
5040 be passed through `...' due to type promotion at the call site. */
5041 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5042 != type)
5043 {
5044 static bool gave_help;
5045 bool warned;
5046
5047 /* Unfortunately, this is merely undefined, rather than a constraint
5048 violation, so we cannot make this an error. If this call is never
5049 executed, the program is still strictly conforming. */
5050 warned = warning_at (loc, 0,
5051 "%qT is promoted to %qT when passed through %<...%>",
5052 type, promoted_type);
5053 if (!gave_help && warned)
5054 {
5055 gave_help = true;
5056 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5057 promoted_type, type);
5058 }
5059
5060 /* We can, however, treat "undefined" any way we please.
5061 Call abort to encourage the user to fix the program. */
5062 if (warned)
5063 inform (loc, "if this code is reached, the program will abort");
5064 /* Before the abort, allow the evaluation of the va_list
5065 expression to exit or longjmp. */
5066 gimplify_and_add (valist, pre_p);
5067 t = build_call_expr_loc (loc,
5068 implicit_built_in_decls[BUILT_IN_TRAP], 0);
5069 gimplify_and_add (t, pre_p);
5070
5071 /* This is dead code, but go ahead and finish so that the
5072 mode of the result comes out right. */
5073 *expr_p = dummy_object (type);
5074 return GS_ALL_DONE;
5075 }
5076 else
5077 {
5078 /* Make it easier for the backends by protecting the valist argument
5079 from multiple evaluations. */
5080 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5081 {
5082 /* For this case, the backends will be expecting a pointer to
5083 TREE_TYPE (abi), but it's possible we've
5084 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5085 So fix it. */
5086 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5087 {
5088 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5089 valist = fold_convert_loc (loc, p1,
5090 build_fold_addr_expr_loc (loc, valist));
5091 }
5092
5093 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5094 }
5095 else
5096 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5097
5098 if (!targetm.gimplify_va_arg_expr)
5099 /* FIXME: Once most targets are converted we should merely
5100 assert this is non-null. */
5101 return GS_ALL_DONE;
5102
5103 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5104 return GS_OK;
5105 }
5106 }
5107
5108 /* Expand EXP, a call to __builtin_va_end. */
5109
5110 static rtx
5111 expand_builtin_va_end (tree exp)
5112 {
5113 tree valist = CALL_EXPR_ARG (exp, 0);
5114
5115 /* Evaluate for side effects, if needed. I hate macros that don't
5116 do that. */
5117 if (TREE_SIDE_EFFECTS (valist))
5118 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5119
5120 return const0_rtx;
5121 }
5122
5123 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5124 builtin rather than just as an assignment in stdarg.h because of the
5125 nastiness of array-type va_list types. */
5126
5127 static rtx
5128 expand_builtin_va_copy (tree exp)
5129 {
5130 tree dst, src, t;
5131 location_t loc = EXPR_LOCATION (exp);
5132
5133 dst = CALL_EXPR_ARG (exp, 0);
5134 src = CALL_EXPR_ARG (exp, 1);
5135
5136 dst = stabilize_va_list_loc (loc, dst, 1);
5137 src = stabilize_va_list_loc (loc, src, 0);
5138
5139 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5140
5141 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5142 {
5143 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5144 TREE_SIDE_EFFECTS (t) = 1;
5145 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5146 }
5147 else
5148 {
5149 rtx dstb, srcb, size;
5150
5151 /* Evaluate to pointers. */
5152 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5153 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5154 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5155 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5156
5157 dstb = convert_memory_address (Pmode, dstb);
5158 srcb = convert_memory_address (Pmode, srcb);
5159
5160 /* "Dereference" to BLKmode memories. */
5161 dstb = gen_rtx_MEM (BLKmode, dstb);
5162 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5163 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5164 srcb = gen_rtx_MEM (BLKmode, srcb);
5165 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5166 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5167
5168 /* Copy. */
5169 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5170 }
5171
5172 return const0_rtx;
5173 }
5174
5175 /* Expand a call to one of the builtin functions __builtin_frame_address or
5176 __builtin_return_address. */
5177
5178 static rtx
5179 expand_builtin_frame_address (tree fndecl, tree exp)
5180 {
5181 /* The argument must be a nonnegative integer constant.
5182 It counts the number of frames to scan up the stack.
5183 The value is the return address saved in that frame. */
5184 if (call_expr_nargs (exp) == 0)
5185 /* Warning about missing arg was already issued. */
5186 return const0_rtx;
5187 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5188 {
5189 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5190 error ("invalid argument to %<__builtin_frame_address%>");
5191 else
5192 error ("invalid argument to %<__builtin_return_address%>");
5193 return const0_rtx;
5194 }
5195 else
5196 {
5197 rtx tem
5198 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5199 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5200
5201 /* Some ports cannot access arbitrary stack frames. */
5202 if (tem == NULL)
5203 {
5204 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5205 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5206 else
5207 warning (0, "unsupported argument to %<__builtin_return_address%>");
5208 return const0_rtx;
5209 }
5210
5211 /* For __builtin_frame_address, return what we've got. */
5212 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5213 return tem;
5214
5215 if (!REG_P (tem)
5216 && ! CONSTANT_P (tem))
5217 tem = copy_to_mode_reg (Pmode, tem);
5218 return tem;
5219 }
5220 }
5221
5222 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5223 we failed and the caller should emit a normal call, otherwise try to get
5224 the result in TARGET, if convenient. */
5225
5226 static rtx
5227 expand_builtin_alloca (tree exp, rtx target)
5228 {
5229 rtx op0;
5230 rtx result;
5231
5232 /* Emit normal call if marked not-inlineable. */
5233 if (CALL_CANNOT_INLINE_P (exp))
5234 return NULL_RTX;
5235
5236 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5237 return NULL_RTX;
5238
5239 /* Compute the argument. */
5240 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5241
5242 /* Allocate the desired space. */
5243 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5244 result = convert_memory_address (ptr_mode, result);
5245
5246 return result;
5247 }
5248
5249 /* Expand a call to a bswap builtin with argument ARG0. MODE
5250 is the mode to expand with. */
5251
5252 static rtx
5253 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5254 {
5255 enum machine_mode mode;
5256 tree arg;
5257 rtx op0;
5258
5259 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5260 return NULL_RTX;
5261
5262 arg = CALL_EXPR_ARG (exp, 0);
5263 mode = TYPE_MODE (TREE_TYPE (arg));
5264 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5265
5266 target = expand_unop (mode, bswap_optab, op0, target, 1);
5267
5268 gcc_assert (target);
5269
5270 return convert_to_mode (mode, target, 0);
5271 }
5272
5273 /* Expand a call to a unary builtin in EXP.
5274 Return NULL_RTX if a normal call should be emitted rather than expanding the
5275 function in-line. If convenient, the result should be placed in TARGET.
5276 SUBTARGET may be used as the target for computing one of EXP's operands. */
5277
5278 static rtx
5279 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5280 rtx subtarget, optab op_optab)
5281 {
5282 rtx op0;
5283
5284 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5285 return NULL_RTX;
5286
5287 /* Compute the argument. */
5288 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5289 VOIDmode, EXPAND_NORMAL);
5290 /* Compute op, into TARGET if possible.
5291 Set TARGET to wherever the result comes back. */
5292 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5293 op_optab, op0, target, 1);
5294 gcc_assert (target);
5295
5296 return convert_to_mode (target_mode, target, 0);
5297 }
5298
5299 /* If the string passed to fputs is a constant and is one character
5300 long, we attempt to transform this call into __builtin_fputc(). */
5301
5302 static rtx
5303 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5304 {
5305 /* Verify the arguments in the original call. */
5306 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5307 {
5308 tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
5309 CALL_EXPR_ARG (exp, 0),
5310 CALL_EXPR_ARG (exp, 1),
5311 (target == const0_rtx),
5312 unlocked, NULL_TREE);
5313 if (result)
5314 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5315 }
5316 return NULL_RTX;
5317 }
5318
5319 /* Expand a call to __builtin_expect. We just return our argument
5320 as the builtin_expect semantic should've been already executed by
5321 tree branch prediction pass. */
5322
5323 static rtx
5324 expand_builtin_expect (tree exp, rtx target)
5325 {
5326 tree arg, c;
5327
5328 if (call_expr_nargs (exp) < 2)
5329 return const0_rtx;
5330 arg = CALL_EXPR_ARG (exp, 0);
5331 c = CALL_EXPR_ARG (exp, 1);
5332
5333 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5334 /* When guessing was done, the hints should be already stripped away. */
5335 gcc_assert (!flag_guess_branch_prob
5336 || optimize == 0 || errorcount || sorrycount);
5337 return target;
5338 }
5339
5340 void
5341 expand_builtin_trap (void)
5342 {
5343 #ifdef HAVE_trap
5344 if (HAVE_trap)
5345 emit_insn (gen_trap ());
5346 else
5347 #endif
5348 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5349 emit_barrier ();
5350 }
5351
5352 /* Expand a call to __builtin_unreachable. We do nothing except emit
5353 a barrier saying that control flow will not pass here.
5354
5355 It is the responsibility of the program being compiled to ensure
5356 that control flow does never reach __builtin_unreachable. */
5357 static void
5358 expand_builtin_unreachable (void)
5359 {
5360 emit_barrier ();
5361 }
5362
5363 /* Expand EXP, a call to fabs, fabsf or fabsl.
5364 Return NULL_RTX if a normal call should be emitted rather than expanding
5365 the function inline. If convenient, the result should be placed
5366 in TARGET. SUBTARGET may be used as the target for computing
5367 the operand. */
5368
5369 static rtx
5370 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5371 {
5372 enum machine_mode mode;
5373 tree arg;
5374 rtx op0;
5375
5376 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5377 return NULL_RTX;
5378
5379 arg = CALL_EXPR_ARG (exp, 0);
5380 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5381 mode = TYPE_MODE (TREE_TYPE (arg));
5382 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5383 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5384 }
5385
5386 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5387 Return NULL is a normal call should be emitted rather than expanding the
5388 function inline. If convenient, the result should be placed in TARGET.
5389 SUBTARGET may be used as the target for computing the operand. */
5390
5391 static rtx
5392 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5393 {
5394 rtx op0, op1;
5395 tree arg;
5396
5397 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5398 return NULL_RTX;
5399
5400 arg = CALL_EXPR_ARG (exp, 0);
5401 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5402
5403 arg = CALL_EXPR_ARG (exp, 1);
5404 op1 = expand_normal (arg);
5405
5406 return expand_copysign (op0, op1, target);
5407 }
5408
5409 /* Create a new constant string literal and return a char* pointer to it.
5410 The STRING_CST value is the LEN characters at STR. */
5411 tree
5412 build_string_literal (int len, const char *str)
5413 {
5414 tree t, elem, index, type;
5415
5416 t = build_string (len, str);
5417 elem = build_type_variant (char_type_node, 1, 0);
5418 index = build_index_type (size_int (len - 1));
5419 type = build_array_type (elem, index);
5420 TREE_TYPE (t) = type;
5421 TREE_CONSTANT (t) = 1;
5422 TREE_READONLY (t) = 1;
5423 TREE_STATIC (t) = 1;
5424
5425 type = build_pointer_type (elem);
5426 t = build1 (ADDR_EXPR, type,
5427 build4 (ARRAY_REF, elem,
5428 t, integer_zero_node, NULL_TREE, NULL_TREE));
5429 return t;
5430 }
5431
5432 /* Expand EXP, a call to printf or printf_unlocked.
5433 Return NULL_RTX if a normal call should be emitted rather than transforming
5434 the function inline. If convenient, the result should be placed in
5435 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5436 call. */
5437 static rtx
5438 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5439 bool unlocked)
5440 {
5441 /* If we're using an unlocked function, assume the other unlocked
5442 functions exist explicitly. */
5443 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5444 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5445 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5446 : implicit_built_in_decls[BUILT_IN_PUTS];
5447 const char *fmt_str;
5448 tree fn = 0;
5449 tree fmt, arg;
5450 int nargs = call_expr_nargs (exp);
5451
5452 /* If the return value is used, don't do the transformation. */
5453 if (target != const0_rtx)
5454 return NULL_RTX;
5455
5456 /* Verify the required arguments in the original call. */
5457 if (nargs == 0)
5458 return NULL_RTX;
5459 fmt = CALL_EXPR_ARG (exp, 0);
5460 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5461 return NULL_RTX;
5462
5463 /* Check whether the format is a literal string constant. */
5464 fmt_str = c_getstr (fmt);
5465 if (fmt_str == NULL)
5466 return NULL_RTX;
5467
5468 if (!init_target_chars ())
5469 return NULL_RTX;
5470
5471 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5472 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5473 {
5474 if ((nargs != 2)
5475 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5476 return NULL_RTX;
5477 if (fn_puts)
5478 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5479 }
5480 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5481 else if (strcmp (fmt_str, target_percent_c) == 0)
5482 {
5483 if ((nargs != 2)
5484 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5485 return NULL_RTX;
5486 if (fn_putchar)
5487 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5488 }
5489 else
5490 {
5491 /* We can't handle anything else with % args or %% ... yet. */
5492 if (strchr (fmt_str, target_percent))
5493 return NULL_RTX;
5494
5495 if (nargs > 1)
5496 return NULL_RTX;
5497
5498 /* If the format specifier was "", printf does nothing. */
5499 if (fmt_str[0] == '\0')
5500 return const0_rtx;
5501 /* If the format specifier has length of 1, call putchar. */
5502 if (fmt_str[1] == '\0')
5503 {
5504 /* Given printf("c"), (where c is any one character,)
5505 convert "c"[0] to an int and pass that to the replacement
5506 function. */
5507 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5508 if (fn_putchar)
5509 fn = build_call_expr (fn_putchar, 1, arg);
5510 }
5511 else
5512 {
5513 /* If the format specifier was "string\n", call puts("string"). */
5514 size_t len = strlen (fmt_str);
5515 if ((unsigned char)fmt_str[len - 1] == target_newline)
5516 {
5517 /* Create a NUL-terminated string that's one char shorter
5518 than the original, stripping off the trailing '\n'. */
5519 char *newstr = XALLOCAVEC (char, len);
5520 memcpy (newstr, fmt_str, len - 1);
5521 newstr[len - 1] = 0;
5522 arg = build_string_literal (len, newstr);
5523 if (fn_puts)
5524 fn = build_call_expr (fn_puts, 1, arg);
5525 }
5526 else
5527 /* We'd like to arrange to call fputs(string,stdout) here,
5528 but we need stdout and don't have a way to get it yet. */
5529 return NULL_RTX;
5530 }
5531 }
5532
5533 if (!fn)
5534 return NULL_RTX;
5535 if (TREE_CODE (fn) == CALL_EXPR)
5536 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5537 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5538 }
5539
5540 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5541 Return NULL_RTX if a normal call should be emitted rather than transforming
5542 the function inline. If convenient, the result should be placed in
5543 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5544 call. */
5545 static rtx
5546 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5547 bool unlocked)
5548 {
5549 /* If we're using an unlocked function, assume the other unlocked
5550 functions exist explicitly. */
5551 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5552 : implicit_built_in_decls[BUILT_IN_FPUTC];
5553 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5554 : implicit_built_in_decls[BUILT_IN_FPUTS];
5555 const char *fmt_str;
5556 tree fn = 0;
5557 tree fmt, fp, arg;
5558 int nargs = call_expr_nargs (exp);
5559
5560 /* If the return value is used, don't do the transformation. */
5561 if (target != const0_rtx)
5562 return NULL_RTX;
5563
5564 /* Verify the required arguments in the original call. */
5565 if (nargs < 2)
5566 return NULL_RTX;
5567 fp = CALL_EXPR_ARG (exp, 0);
5568 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5569 return NULL_RTX;
5570 fmt = CALL_EXPR_ARG (exp, 1);
5571 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5572 return NULL_RTX;
5573
5574 /* Check whether the format is a literal string constant. */
5575 fmt_str = c_getstr (fmt);
5576 if (fmt_str == NULL)
5577 return NULL_RTX;
5578
5579 if (!init_target_chars ())
5580 return NULL_RTX;
5581
5582 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5583 if (strcmp (fmt_str, target_percent_s) == 0)
5584 {
5585 if ((nargs != 3)
5586 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5587 return NULL_RTX;
5588 arg = CALL_EXPR_ARG (exp, 2);
5589 if (fn_fputs)
5590 fn = build_call_expr (fn_fputs, 2, arg, fp);
5591 }
5592 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5593 else if (strcmp (fmt_str, target_percent_c) == 0)
5594 {
5595 if ((nargs != 3)
5596 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5597 return NULL_RTX;
5598 arg = CALL_EXPR_ARG (exp, 2);
5599 if (fn_fputc)
5600 fn = build_call_expr (fn_fputc, 2, arg, fp);
5601 }
5602 else
5603 {
5604 /* We can't handle anything else with % args or %% ... yet. */
5605 if (strchr (fmt_str, target_percent))
5606 return NULL_RTX;
5607
5608 if (nargs > 2)
5609 return NULL_RTX;
5610
5611 /* If the format specifier was "", fprintf does nothing. */
5612 if (fmt_str[0] == '\0')
5613 {
5614 /* Evaluate and ignore FILE* argument for side-effects. */
5615 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5616 return const0_rtx;
5617 }
5618
5619 /* When "string" doesn't contain %, replace all cases of
5620 fprintf(stream,string) with fputs(string,stream). The fputs
5621 builtin will take care of special cases like length == 1. */
5622 if (fn_fputs)
5623 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5624 }
5625
5626 if (!fn)
5627 return NULL_RTX;
5628 if (TREE_CODE (fn) == CALL_EXPR)
5629 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5630 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5631 }
5632
5633 /* Expand a call EXP to sprintf. Return NULL_RTX if
5634 a normal call should be emitted rather than expanding the function
5635 inline. If convenient, the result should be placed in TARGET with
5636 mode MODE. */
5637
5638 static rtx
5639 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5640 {
5641 tree dest, fmt;
5642 const char *fmt_str;
5643 int nargs = call_expr_nargs (exp);
5644
5645 /* Verify the required arguments in the original call. */
5646 if (nargs < 2)
5647 return NULL_RTX;
5648 dest = CALL_EXPR_ARG (exp, 0);
5649 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5650 return NULL_RTX;
5651 fmt = CALL_EXPR_ARG (exp, 0);
5652 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5653 return NULL_RTX;
5654
5655 /* Check whether the format is a literal string constant. */
5656 fmt_str = c_getstr (fmt);
5657 if (fmt_str == NULL)
5658 return NULL_RTX;
5659
5660 if (!init_target_chars ())
5661 return NULL_RTX;
5662
5663 /* If the format doesn't contain % args or %%, use strcpy. */
5664 if (strchr (fmt_str, target_percent) == 0)
5665 {
5666 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5667 tree exp;
5668
5669 if ((nargs > 2) || ! fn)
5670 return NULL_RTX;
5671 expand_expr (build_call_expr (fn, 2, dest, fmt),
5672 const0_rtx, VOIDmode, EXPAND_NORMAL);
5673 if (target == const0_rtx)
5674 return const0_rtx;
5675 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5676 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5677 }
5678 /* If the format is "%s", use strcpy if the result isn't used. */
5679 else if (strcmp (fmt_str, target_percent_s) == 0)
5680 {
5681 tree fn, arg, len;
5682 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5683
5684 if (! fn)
5685 return NULL_RTX;
5686 if (nargs != 3)
5687 return NULL_RTX;
5688 arg = CALL_EXPR_ARG (exp, 2);
5689 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5690 return NULL_RTX;
5691
5692 if (target != const0_rtx)
5693 {
5694 len = c_strlen (arg, 1);
5695 if (! len || TREE_CODE (len) != INTEGER_CST)
5696 return NULL_RTX;
5697 }
5698 else
5699 len = NULL_TREE;
5700
5701 expand_expr (build_call_expr (fn, 2, dest, arg),
5702 const0_rtx, VOIDmode, EXPAND_NORMAL);
5703
5704 if (target == const0_rtx)
5705 return const0_rtx;
5706 return expand_expr (len, target, mode, EXPAND_NORMAL);
5707 }
5708
5709 return NULL_RTX;
5710 }
5711
5712 /* Expand a call to either the entry or exit function profiler. */
5713
5714 static rtx
5715 expand_builtin_profile_func (bool exitp)
5716 {
5717 rtx this_rtx, which;
5718
5719 this_rtx = DECL_RTL (current_function_decl);
5720 gcc_assert (MEM_P (this_rtx));
5721 this_rtx = XEXP (this_rtx, 0);
5722
5723 if (exitp)
5724 which = profile_function_exit_libfunc;
5725 else
5726 which = profile_function_entry_libfunc;
5727
5728 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5729 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5730 0),
5731 Pmode);
5732
5733 return const0_rtx;
5734 }
5735
5736 /* Expand a call to __builtin___clear_cache. */
5737
5738 static rtx
5739 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5740 {
5741 #ifndef HAVE_clear_cache
5742 #ifdef CLEAR_INSN_CACHE
5743 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5744 does something. Just do the default expansion to a call to
5745 __clear_cache(). */
5746 return NULL_RTX;
5747 #else
5748 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5749 does nothing. There is no need to call it. Do nothing. */
5750 return const0_rtx;
5751 #endif /* CLEAR_INSN_CACHE */
5752 #else
5753 /* We have a "clear_cache" insn, and it will handle everything. */
5754 tree begin, end;
5755 rtx begin_rtx, end_rtx;
5756 enum insn_code icode;
5757
5758 /* We must not expand to a library call. If we did, any
5759 fallback library function in libgcc that might contain a call to
5760 __builtin___clear_cache() would recurse infinitely. */
5761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5762 {
5763 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5764 return const0_rtx;
5765 }
5766
5767 if (HAVE_clear_cache)
5768 {
5769 icode = CODE_FOR_clear_cache;
5770
5771 begin = CALL_EXPR_ARG (exp, 0);
5772 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5773 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5774 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5775 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5776
5777 end = CALL_EXPR_ARG (exp, 1);
5778 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5779 end_rtx = convert_memory_address (Pmode, end_rtx);
5780 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5781 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5782
5783 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5784 }
5785 return const0_rtx;
5786 #endif /* HAVE_clear_cache */
5787 }
5788
5789 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5790
5791 static rtx
5792 round_trampoline_addr (rtx tramp)
5793 {
5794 rtx temp, addend, mask;
5795
5796 /* If we don't need too much alignment, we'll have been guaranteed
5797 proper alignment by get_trampoline_type. */
5798 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5799 return tramp;
5800
5801 /* Round address up to desired boundary. */
5802 temp = gen_reg_rtx (Pmode);
5803 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5804 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5805
5806 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5807 temp, 0, OPTAB_LIB_WIDEN);
5808 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5809 temp, 0, OPTAB_LIB_WIDEN);
5810
5811 return tramp;
5812 }
5813
5814 static rtx
5815 expand_builtin_init_trampoline (tree exp)
5816 {
5817 tree t_tramp, t_func, t_chain;
5818 rtx r_tramp, r_func, r_chain;
5819 #ifdef TRAMPOLINE_TEMPLATE
5820 rtx blktramp;
5821 #endif
5822
5823 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5824 POINTER_TYPE, VOID_TYPE))
5825 return NULL_RTX;
5826
5827 t_tramp = CALL_EXPR_ARG (exp, 0);
5828 t_func = CALL_EXPR_ARG (exp, 1);
5829 t_chain = CALL_EXPR_ARG (exp, 2);
5830
5831 r_tramp = expand_normal (t_tramp);
5832 r_func = expand_normal (t_func);
5833 r_chain = expand_normal (t_chain);
5834
5835 /* Generate insns to initialize the trampoline. */
5836 r_tramp = round_trampoline_addr (r_tramp);
5837 #ifdef TRAMPOLINE_TEMPLATE
5838 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5839 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5840 emit_block_move (blktramp, assemble_trampoline_template (),
5841 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5842 #endif
5843 trampolines_created = 1;
5844 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5845
5846 return const0_rtx;
5847 }
5848
5849 static rtx
5850 expand_builtin_adjust_trampoline (tree exp)
5851 {
5852 rtx tramp;
5853
5854 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5855 return NULL_RTX;
5856
5857 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5858 tramp = round_trampoline_addr (tramp);
5859 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5860 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5861 #endif
5862
5863 return tramp;
5864 }
5865
5866 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5867 function. The function first checks whether the back end provides
5868 an insn to implement signbit for the respective mode. If not, it
5869 checks whether the floating point format of the value is such that
5870 the sign bit can be extracted. If that is not the case, the
5871 function returns NULL_RTX to indicate that a normal call should be
5872 emitted rather than expanding the function in-line. EXP is the
5873 expression that is a call to the builtin function; if convenient,
5874 the result should be placed in TARGET. */
5875 static rtx
5876 expand_builtin_signbit (tree exp, rtx target)
5877 {
5878 const struct real_format *fmt;
5879 enum machine_mode fmode, imode, rmode;
5880 HOST_WIDE_INT hi, lo;
5881 tree arg;
5882 int word, bitpos;
5883 enum insn_code icode;
5884 rtx temp;
5885 location_t loc = EXPR_LOCATION (exp);
5886
5887 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5888 return NULL_RTX;
5889
5890 arg = CALL_EXPR_ARG (exp, 0);
5891 fmode = TYPE_MODE (TREE_TYPE (arg));
5892 rmode = TYPE_MODE (TREE_TYPE (exp));
5893 fmt = REAL_MODE_FORMAT (fmode);
5894
5895 arg = builtin_save_expr (arg);
5896
5897 /* Expand the argument yielding a RTX expression. */
5898 temp = expand_normal (arg);
5899
5900 /* Check if the back end provides an insn that handles signbit for the
5901 argument's mode. */
5902 icode = signbit_optab->handlers [(int) fmode].insn_code;
5903 if (icode != CODE_FOR_nothing)
5904 {
5905 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5906 emit_unop_insn (icode, target, temp, UNKNOWN);
5907 return target;
5908 }
5909
5910 /* For floating point formats without a sign bit, implement signbit
5911 as "ARG < 0.0". */
5912 bitpos = fmt->signbit_ro;
5913 if (bitpos < 0)
5914 {
5915 /* But we can't do this if the format supports signed zero. */
5916 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5917 return NULL_RTX;
5918
5919 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5920 build_real (TREE_TYPE (arg), dconst0));
5921 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5922 }
5923
5924 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5925 {
5926 imode = int_mode_for_mode (fmode);
5927 if (imode == BLKmode)
5928 return NULL_RTX;
5929 temp = gen_lowpart (imode, temp);
5930 }
5931 else
5932 {
5933 imode = word_mode;
5934 /* Handle targets with different FP word orders. */
5935 if (FLOAT_WORDS_BIG_ENDIAN)
5936 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5937 else
5938 word = bitpos / BITS_PER_WORD;
5939 temp = operand_subword_force (temp, word, fmode);
5940 bitpos = bitpos % BITS_PER_WORD;
5941 }
5942
5943 /* Force the intermediate word_mode (or narrower) result into a
5944 register. This avoids attempting to create paradoxical SUBREGs
5945 of floating point modes below. */
5946 temp = force_reg (imode, temp);
5947
5948 /* If the bitpos is within the "result mode" lowpart, the operation
5949 can be implement with a single bitwise AND. Otherwise, we need
5950 a right shift and an AND. */
5951
5952 if (bitpos < GET_MODE_BITSIZE (rmode))
5953 {
5954 if (bitpos < HOST_BITS_PER_WIDE_INT)
5955 {
5956 hi = 0;
5957 lo = (HOST_WIDE_INT) 1 << bitpos;
5958 }
5959 else
5960 {
5961 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5962 lo = 0;
5963 }
5964
5965 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5966 temp = gen_lowpart (rmode, temp);
5967 temp = expand_binop (rmode, and_optab, temp,
5968 immed_double_const (lo, hi, rmode),
5969 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5970 }
5971 else
5972 {
5973 /* Perform a logical right shift to place the signbit in the least
5974 significant bit, then truncate the result to the desired mode
5975 and mask just this bit. */
5976 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5977 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5978 temp = gen_lowpart (rmode, temp);
5979 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5980 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5981 }
5982
5983 return temp;
5984 }
5985
5986 /* Expand fork or exec calls. TARGET is the desired target of the
5987 call. EXP is the call. FN is the
5988 identificator of the actual function. IGNORE is nonzero if the
5989 value is to be ignored. */
5990
5991 static rtx
5992 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5993 {
5994 tree id, decl;
5995 tree call;
5996
5997 /* If we are not profiling, just call the function. */
5998 if (!profile_arc_flag)
5999 return NULL_RTX;
6000
6001 /* Otherwise call the wrapper. This should be equivalent for the rest of
6002 compiler, so the code does not diverge, and the wrapper may run the
6003 code necessary for keeping the profiling sane. */
6004
6005 switch (DECL_FUNCTION_CODE (fn))
6006 {
6007 case BUILT_IN_FORK:
6008 id = get_identifier ("__gcov_fork");
6009 break;
6010
6011 case BUILT_IN_EXECL:
6012 id = get_identifier ("__gcov_execl");
6013 break;
6014
6015 case BUILT_IN_EXECV:
6016 id = get_identifier ("__gcov_execv");
6017 break;
6018
6019 case BUILT_IN_EXECLP:
6020 id = get_identifier ("__gcov_execlp");
6021 break;
6022
6023 case BUILT_IN_EXECLE:
6024 id = get_identifier ("__gcov_execle");
6025 break;
6026
6027 case BUILT_IN_EXECVP:
6028 id = get_identifier ("__gcov_execvp");
6029 break;
6030
6031 case BUILT_IN_EXECVE:
6032 id = get_identifier ("__gcov_execve");
6033 break;
6034
6035 default:
6036 gcc_unreachable ();
6037 }
6038
6039 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6040 FUNCTION_DECL, id, TREE_TYPE (fn));
6041 DECL_EXTERNAL (decl) = 1;
6042 TREE_PUBLIC (decl) = 1;
6043 DECL_ARTIFICIAL (decl) = 1;
6044 TREE_NOTHROW (decl) = 1;
6045 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6046 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6047 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6048 return expand_call (call, target, ignore);
6049 }
6050
6051
6052 \f
6053 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6054 the pointer in these functions is void*, the tree optimizers may remove
6055 casts. The mode computed in expand_builtin isn't reliable either, due
6056 to __sync_bool_compare_and_swap.
6057
6058 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6059 group of builtins. This gives us log2 of the mode size. */
6060
6061 static inline enum machine_mode
6062 get_builtin_sync_mode (int fcode_diff)
6063 {
6064 /* The size is not negotiable, so ask not to get BLKmode in return
6065 if the target indicates that a smaller size would be better. */
6066 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6067 }
6068
6069 /* Expand the memory expression LOC and return the appropriate memory operand
6070 for the builtin_sync operations. */
6071
6072 static rtx
6073 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6074 {
6075 rtx addr, mem;
6076
6077 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6078
6079 /* Note that we explicitly do not want any alias information for this
6080 memory, so that we kill all other live memories. Otherwise we don't
6081 satisfy the full barrier semantics of the intrinsic. */
6082 mem = validize_mem (gen_rtx_MEM (mode, addr));
6083
6084 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6085 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6086 MEM_VOLATILE_P (mem) = 1;
6087
6088 return mem;
6089 }
6090
6091 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6092 EXP is the CALL_EXPR. CODE is the rtx code
6093 that corresponds to the arithmetic or logical operation from the name;
6094 an exception here is that NOT actually means NAND. TARGET is an optional
6095 place for us to store the results; AFTER is true if this is the
6096 fetch_and_xxx form. IGNORE is true if we don't actually care about
6097 the result of the operation at all. */
6098
6099 static rtx
6100 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6101 enum rtx_code code, bool after,
6102 rtx target, bool ignore)
6103 {
6104 rtx val, mem;
6105 enum machine_mode old_mode;
6106 location_t loc = EXPR_LOCATION (exp);
6107
6108 if (code == NOT && warn_sync_nand)
6109 {
6110 tree fndecl = get_callee_fndecl (exp);
6111 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6112
6113 static bool warned_f_a_n, warned_n_a_f;
6114
6115 switch (fcode)
6116 {
6117 case BUILT_IN_FETCH_AND_NAND_1:
6118 case BUILT_IN_FETCH_AND_NAND_2:
6119 case BUILT_IN_FETCH_AND_NAND_4:
6120 case BUILT_IN_FETCH_AND_NAND_8:
6121 case BUILT_IN_FETCH_AND_NAND_16:
6122
6123 if (warned_f_a_n)
6124 break;
6125
6126 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6127 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6128 warned_f_a_n = true;
6129 break;
6130
6131 case BUILT_IN_NAND_AND_FETCH_1:
6132 case BUILT_IN_NAND_AND_FETCH_2:
6133 case BUILT_IN_NAND_AND_FETCH_4:
6134 case BUILT_IN_NAND_AND_FETCH_8:
6135 case BUILT_IN_NAND_AND_FETCH_16:
6136
6137 if (warned_n_a_f)
6138 break;
6139
6140 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6141 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6142 warned_n_a_f = true;
6143 break;
6144
6145 default:
6146 gcc_unreachable ();
6147 }
6148 }
6149
6150 /* Expand the operands. */
6151 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6152
6153 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6155 of CONST_INTs, where we know the old_mode only from the call argument. */
6156 old_mode = GET_MODE (val);
6157 if (old_mode == VOIDmode)
6158 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6159 val = convert_modes (mode, old_mode, val, 1);
6160
6161 if (ignore)
6162 return expand_sync_operation (mem, val, code);
6163 else
6164 return expand_sync_fetch_operation (mem, val, code, after, target);
6165 }
6166
6167 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6168 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6169 true if this is the boolean form. TARGET is a place for us to store the
6170 results; this is NOT optional if IS_BOOL is true. */
6171
6172 static rtx
6173 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6174 bool is_bool, rtx target)
6175 {
6176 rtx old_val, new_val, mem;
6177 enum machine_mode old_mode;
6178
6179 /* Expand the operands. */
6180 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6181
6182
6183 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6184 mode, EXPAND_NORMAL);
6185 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6186 of CONST_INTs, where we know the old_mode only from the call argument. */
6187 old_mode = GET_MODE (old_val);
6188 if (old_mode == VOIDmode)
6189 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6190 old_val = convert_modes (mode, old_mode, old_val, 1);
6191
6192 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6193 mode, EXPAND_NORMAL);
6194 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6195 of CONST_INTs, where we know the old_mode only from the call argument. */
6196 old_mode = GET_MODE (new_val);
6197 if (old_mode == VOIDmode)
6198 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6199 new_val = convert_modes (mode, old_mode, new_val, 1);
6200
6201 if (is_bool)
6202 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6203 else
6204 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6205 }
6206
6207 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6208 general form is actually an atomic exchange, and some targets only
6209 support a reduced form with the second argument being a constant 1.
6210 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6211 the results. */
6212
6213 static rtx
6214 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6215 rtx target)
6216 {
6217 rtx val, mem;
6218 enum machine_mode old_mode;
6219
6220 /* Expand the operands. */
6221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6222 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6223 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6224 of CONST_INTs, where we know the old_mode only from the call argument. */
6225 old_mode = GET_MODE (val);
6226 if (old_mode == VOIDmode)
6227 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6228 val = convert_modes (mode, old_mode, val, 1);
6229
6230 return expand_sync_lock_test_and_set (mem, val, target);
6231 }
6232
6233 /* Expand the __sync_synchronize intrinsic. */
6234
6235 static void
6236 expand_builtin_synchronize (void)
6237 {
6238 tree x;
6239
6240 #ifdef HAVE_memory_barrier
6241 if (HAVE_memory_barrier)
6242 {
6243 emit_insn (gen_memory_barrier ());
6244 return;
6245 }
6246 #endif
6247
6248 if (synchronize_libfunc != NULL_RTX)
6249 {
6250 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6251 return;
6252 }
6253
6254 /* If no explicit memory barrier instruction is available, create an
6255 empty asm stmt with a memory clobber. */
6256 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6257 tree_cons (NULL, build_string (6, "memory"), NULL));
6258 ASM_VOLATILE_P (x) = 1;
6259 expand_asm_expr (x);
6260 }
6261
6262 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6263
6264 static void
6265 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6266 {
6267 enum insn_code icode;
6268 rtx mem, insn;
6269 rtx val = const0_rtx;
6270
6271 /* Expand the operands. */
6272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6273
6274 /* If there is an explicit operation in the md file, use it. */
6275 icode = sync_lock_release[mode];
6276 if (icode != CODE_FOR_nothing)
6277 {
6278 if (!insn_data[icode].operand[1].predicate (val, mode))
6279 val = force_reg (mode, val);
6280
6281 insn = GEN_FCN (icode) (mem, val);
6282 if (insn)
6283 {
6284 emit_insn (insn);
6285 return;
6286 }
6287 }
6288
6289 /* Otherwise we can implement this operation by emitting a barrier
6290 followed by a store of zero. */
6291 expand_builtin_synchronize ();
6292 emit_move_insn (mem, val);
6293 }
6294 \f
6295 /* Expand an expression EXP that calls a built-in function,
6296 with result going to TARGET if that's convenient
6297 (and in mode MODE if that's convenient).
6298 SUBTARGET may be used as the target for computing one of EXP's operands.
6299 IGNORE is nonzero if the value is to be ignored. */
6300
6301 rtx
6302 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6303 int ignore)
6304 {
6305 tree fndecl = get_callee_fndecl (exp);
6306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6307 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6308
6309 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6310 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6311
6312 /* When not optimizing, generate calls to library functions for a certain
6313 set of builtins. */
6314 if (!optimize
6315 && !called_as_built_in (fndecl)
6316 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6317 && fcode != BUILT_IN_ALLOCA
6318 && fcode != BUILT_IN_FREE)
6319 return expand_call (exp, target, ignore);
6320
6321 /* The built-in function expanders test for target == const0_rtx
6322 to determine whether the function's result will be ignored. */
6323 if (ignore)
6324 target = const0_rtx;
6325
6326 /* If the result of a pure or const built-in function is ignored, and
6327 none of its arguments are volatile, we can avoid expanding the
6328 built-in call and just evaluate the arguments for side-effects. */
6329 if (target == const0_rtx
6330 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6331 {
6332 bool volatilep = false;
6333 tree arg;
6334 call_expr_arg_iterator iter;
6335
6336 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6337 if (TREE_THIS_VOLATILE (arg))
6338 {
6339 volatilep = true;
6340 break;
6341 }
6342
6343 if (! volatilep)
6344 {
6345 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6346 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6347 return const0_rtx;
6348 }
6349 }
6350
6351 switch (fcode)
6352 {
6353 CASE_FLT_FN (BUILT_IN_FABS):
6354 target = expand_builtin_fabs (exp, target, subtarget);
6355 if (target)
6356 return target;
6357 break;
6358
6359 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6360 target = expand_builtin_copysign (exp, target, subtarget);
6361 if (target)
6362 return target;
6363 break;
6364
6365 /* Just do a normal library call if we were unable to fold
6366 the values. */
6367 CASE_FLT_FN (BUILT_IN_CABS):
6368 break;
6369
6370 CASE_FLT_FN (BUILT_IN_EXP):
6371 CASE_FLT_FN (BUILT_IN_EXP10):
6372 CASE_FLT_FN (BUILT_IN_POW10):
6373 CASE_FLT_FN (BUILT_IN_EXP2):
6374 CASE_FLT_FN (BUILT_IN_EXPM1):
6375 CASE_FLT_FN (BUILT_IN_LOGB):
6376 CASE_FLT_FN (BUILT_IN_LOG):
6377 CASE_FLT_FN (BUILT_IN_LOG10):
6378 CASE_FLT_FN (BUILT_IN_LOG2):
6379 CASE_FLT_FN (BUILT_IN_LOG1P):
6380 CASE_FLT_FN (BUILT_IN_TAN):
6381 CASE_FLT_FN (BUILT_IN_ASIN):
6382 CASE_FLT_FN (BUILT_IN_ACOS):
6383 CASE_FLT_FN (BUILT_IN_ATAN):
6384 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6385 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6386 because of possible accuracy problems. */
6387 if (! flag_unsafe_math_optimizations)
6388 break;
6389 CASE_FLT_FN (BUILT_IN_SQRT):
6390 CASE_FLT_FN (BUILT_IN_FLOOR):
6391 CASE_FLT_FN (BUILT_IN_CEIL):
6392 CASE_FLT_FN (BUILT_IN_TRUNC):
6393 CASE_FLT_FN (BUILT_IN_ROUND):
6394 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6395 CASE_FLT_FN (BUILT_IN_RINT):
6396 target = expand_builtin_mathfn (exp, target, subtarget);
6397 if (target)
6398 return target;
6399 break;
6400
6401 CASE_FLT_FN (BUILT_IN_ILOGB):
6402 if (! flag_unsafe_math_optimizations)
6403 break;
6404 CASE_FLT_FN (BUILT_IN_ISINF):
6405 CASE_FLT_FN (BUILT_IN_FINITE):
6406 case BUILT_IN_ISFINITE:
6407 case BUILT_IN_ISNORMAL:
6408 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6409 if (target)
6410 return target;
6411 break;
6412
6413 CASE_FLT_FN (BUILT_IN_LCEIL):
6414 CASE_FLT_FN (BUILT_IN_LLCEIL):
6415 CASE_FLT_FN (BUILT_IN_LFLOOR):
6416 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6417 target = expand_builtin_int_roundingfn (exp, target);
6418 if (target)
6419 return target;
6420 break;
6421
6422 CASE_FLT_FN (BUILT_IN_LRINT):
6423 CASE_FLT_FN (BUILT_IN_LLRINT):
6424 CASE_FLT_FN (BUILT_IN_LROUND):
6425 CASE_FLT_FN (BUILT_IN_LLROUND):
6426 target = expand_builtin_int_roundingfn_2 (exp, target);
6427 if (target)
6428 return target;
6429 break;
6430
6431 CASE_FLT_FN (BUILT_IN_POW):
6432 target = expand_builtin_pow (exp, target, subtarget);
6433 if (target)
6434 return target;
6435 break;
6436
6437 CASE_FLT_FN (BUILT_IN_POWI):
6438 target = expand_builtin_powi (exp, target, subtarget);
6439 if (target)
6440 return target;
6441 break;
6442
6443 CASE_FLT_FN (BUILT_IN_ATAN2):
6444 CASE_FLT_FN (BUILT_IN_LDEXP):
6445 CASE_FLT_FN (BUILT_IN_SCALB):
6446 CASE_FLT_FN (BUILT_IN_SCALBN):
6447 CASE_FLT_FN (BUILT_IN_SCALBLN):
6448 if (! flag_unsafe_math_optimizations)
6449 break;
6450
6451 CASE_FLT_FN (BUILT_IN_FMOD):
6452 CASE_FLT_FN (BUILT_IN_REMAINDER):
6453 CASE_FLT_FN (BUILT_IN_DREM):
6454 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6455 if (target)
6456 return target;
6457 break;
6458
6459 CASE_FLT_FN (BUILT_IN_CEXPI):
6460 target = expand_builtin_cexpi (exp, target, subtarget);
6461 gcc_assert (target);
6462 return target;
6463
6464 CASE_FLT_FN (BUILT_IN_SIN):
6465 CASE_FLT_FN (BUILT_IN_COS):
6466 if (! flag_unsafe_math_optimizations)
6467 break;
6468 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6469 if (target)
6470 return target;
6471 break;
6472
6473 CASE_FLT_FN (BUILT_IN_SINCOS):
6474 if (! flag_unsafe_math_optimizations)
6475 break;
6476 target = expand_builtin_sincos (exp);
6477 if (target)
6478 return target;
6479 break;
6480
6481 case BUILT_IN_APPLY_ARGS:
6482 return expand_builtin_apply_args ();
6483
6484 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6485 FUNCTION with a copy of the parameters described by
6486 ARGUMENTS, and ARGSIZE. It returns a block of memory
6487 allocated on the stack into which is stored all the registers
6488 that might possibly be used for returning the result of a
6489 function. ARGUMENTS is the value returned by
6490 __builtin_apply_args. ARGSIZE is the number of bytes of
6491 arguments that must be copied. ??? How should this value be
6492 computed? We'll also need a safe worst case value for varargs
6493 functions. */
6494 case BUILT_IN_APPLY:
6495 if (!validate_arglist (exp, POINTER_TYPE,
6496 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6497 && !validate_arglist (exp, REFERENCE_TYPE,
6498 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6499 return const0_rtx;
6500 else
6501 {
6502 rtx ops[3];
6503
6504 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6505 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6506 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6507
6508 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6509 }
6510
6511 /* __builtin_return (RESULT) causes the function to return the
6512 value described by RESULT. RESULT is address of the block of
6513 memory returned by __builtin_apply. */
6514 case BUILT_IN_RETURN:
6515 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6516 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6517 return const0_rtx;
6518
6519 case BUILT_IN_SAVEREGS:
6520 return expand_builtin_saveregs ();
6521
6522 case BUILT_IN_ARGS_INFO:
6523 return expand_builtin_args_info (exp);
6524
6525 case BUILT_IN_VA_ARG_PACK:
6526 /* All valid uses of __builtin_va_arg_pack () are removed during
6527 inlining. */
6528 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6529 return const0_rtx;
6530
6531 case BUILT_IN_VA_ARG_PACK_LEN:
6532 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6533 inlining. */
6534 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6535 return const0_rtx;
6536
6537 /* Return the address of the first anonymous stack arg. */
6538 case BUILT_IN_NEXT_ARG:
6539 if (fold_builtin_next_arg (exp, false))
6540 return const0_rtx;
6541 return expand_builtin_next_arg ();
6542
6543 case BUILT_IN_CLEAR_CACHE:
6544 target = expand_builtin___clear_cache (exp);
6545 if (target)
6546 return target;
6547 break;
6548
6549 case BUILT_IN_CLASSIFY_TYPE:
6550 return expand_builtin_classify_type (exp);
6551
6552 case BUILT_IN_CONSTANT_P:
6553 return const0_rtx;
6554
6555 case BUILT_IN_FRAME_ADDRESS:
6556 case BUILT_IN_RETURN_ADDRESS:
6557 return expand_builtin_frame_address (fndecl, exp);
6558
6559 /* Returns the address of the area where the structure is returned.
6560 0 otherwise. */
6561 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6562 if (call_expr_nargs (exp) != 0
6563 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6564 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6565 return const0_rtx;
6566 else
6567 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6568
6569 case BUILT_IN_ALLOCA:
6570 target = expand_builtin_alloca (exp, target);
6571 if (target)
6572 return target;
6573 break;
6574
6575 case BUILT_IN_STACK_SAVE:
6576 return expand_stack_save ();
6577
6578 case BUILT_IN_STACK_RESTORE:
6579 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6580 return const0_rtx;
6581
6582 case BUILT_IN_BSWAP32:
6583 case BUILT_IN_BSWAP64:
6584 target = expand_builtin_bswap (exp, target, subtarget);
6585
6586 if (target)
6587 return target;
6588 break;
6589
6590 CASE_INT_FN (BUILT_IN_FFS):
6591 case BUILT_IN_FFSIMAX:
6592 target = expand_builtin_unop (target_mode, exp, target,
6593 subtarget, ffs_optab);
6594 if (target)
6595 return target;
6596 break;
6597
6598 CASE_INT_FN (BUILT_IN_CLZ):
6599 case BUILT_IN_CLZIMAX:
6600 target = expand_builtin_unop (target_mode, exp, target,
6601 subtarget, clz_optab);
6602 if (target)
6603 return target;
6604 break;
6605
6606 CASE_INT_FN (BUILT_IN_CTZ):
6607 case BUILT_IN_CTZIMAX:
6608 target = expand_builtin_unop (target_mode, exp, target,
6609 subtarget, ctz_optab);
6610 if (target)
6611 return target;
6612 break;
6613
6614 CASE_INT_FN (BUILT_IN_POPCOUNT):
6615 case BUILT_IN_POPCOUNTIMAX:
6616 target = expand_builtin_unop (target_mode, exp, target,
6617 subtarget, popcount_optab);
6618 if (target)
6619 return target;
6620 break;
6621
6622 CASE_INT_FN (BUILT_IN_PARITY):
6623 case BUILT_IN_PARITYIMAX:
6624 target = expand_builtin_unop (target_mode, exp, target,
6625 subtarget, parity_optab);
6626 if (target)
6627 return target;
6628 break;
6629
6630 case BUILT_IN_STRLEN:
6631 target = expand_builtin_strlen (exp, target, target_mode);
6632 if (target)
6633 return target;
6634 break;
6635
6636 case BUILT_IN_STRCPY:
6637 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6638 if (target)
6639 return target;
6640 break;
6641
6642 case BUILT_IN_STRNCPY:
6643 target = expand_builtin_strncpy (exp, target, mode);
6644 if (target)
6645 return target;
6646 break;
6647
6648 case BUILT_IN_STPCPY:
6649 target = expand_builtin_stpcpy (exp, target, mode);
6650 if (target)
6651 return target;
6652 break;
6653
6654 case BUILT_IN_STRCAT:
6655 target = expand_builtin_strcat (fndecl, exp, target, mode);
6656 if (target)
6657 return target;
6658 break;
6659
6660 case BUILT_IN_STRNCAT:
6661 target = expand_builtin_strncat (exp, target, mode);
6662 if (target)
6663 return target;
6664 break;
6665
6666 case BUILT_IN_STRSPN:
6667 target = expand_builtin_strspn (exp, target, mode);
6668 if (target)
6669 return target;
6670 break;
6671
6672 case BUILT_IN_STRCSPN:
6673 target = expand_builtin_strcspn (exp, target, mode);
6674 if (target)
6675 return target;
6676 break;
6677
6678 case BUILT_IN_STRSTR:
6679 target = expand_builtin_strstr (exp, target, mode);
6680 if (target)
6681 return target;
6682 break;
6683
6684 case BUILT_IN_STRPBRK:
6685 target = expand_builtin_strpbrk (exp, target, mode);
6686 if (target)
6687 return target;
6688 break;
6689
6690 case BUILT_IN_INDEX:
6691 case BUILT_IN_STRCHR:
6692 target = expand_builtin_strchr (exp, target, mode);
6693 if (target)
6694 return target;
6695 break;
6696
6697 case BUILT_IN_RINDEX:
6698 case BUILT_IN_STRRCHR:
6699 target = expand_builtin_strrchr (exp, target, mode);
6700 if (target)
6701 return target;
6702 break;
6703
6704 case BUILT_IN_MEMCPY:
6705 target = expand_builtin_memcpy (exp, target, mode);
6706 if (target)
6707 return target;
6708 break;
6709
6710 case BUILT_IN_MEMPCPY:
6711 target = expand_builtin_mempcpy (exp, target, mode);
6712 if (target)
6713 return target;
6714 break;
6715
6716 case BUILT_IN_MEMMOVE:
6717 target = expand_builtin_memmove (exp, target, mode, ignore);
6718 if (target)
6719 return target;
6720 break;
6721
6722 case BUILT_IN_BCOPY:
6723 target = expand_builtin_bcopy (exp, ignore);
6724 if (target)
6725 return target;
6726 break;
6727
6728 case BUILT_IN_MEMSET:
6729 target = expand_builtin_memset (exp, target, mode);
6730 if (target)
6731 return target;
6732 break;
6733
6734 case BUILT_IN_BZERO:
6735 target = expand_builtin_bzero (exp);
6736 if (target)
6737 return target;
6738 break;
6739
6740 case BUILT_IN_STRCMP:
6741 target = expand_builtin_strcmp (exp, target, mode);
6742 if (target)
6743 return target;
6744 break;
6745
6746 case BUILT_IN_STRNCMP:
6747 target = expand_builtin_strncmp (exp, target, mode);
6748 if (target)
6749 return target;
6750 break;
6751
6752 case BUILT_IN_MEMCHR:
6753 target = expand_builtin_memchr (exp, target, mode);
6754 if (target)
6755 return target;
6756 break;
6757
6758 case BUILT_IN_BCMP:
6759 case BUILT_IN_MEMCMP:
6760 target = expand_builtin_memcmp (exp, target, mode);
6761 if (target)
6762 return target;
6763 break;
6764
6765 case BUILT_IN_SETJMP:
6766 /* This should have been lowered to the builtins below. */
6767 gcc_unreachable ();
6768
6769 case BUILT_IN_SETJMP_SETUP:
6770 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6771 and the receiver label. */
6772 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6773 {
6774 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6775 VOIDmode, EXPAND_NORMAL);
6776 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6777 rtx label_r = label_rtx (label);
6778
6779 /* This is copied from the handling of non-local gotos. */
6780 expand_builtin_setjmp_setup (buf_addr, label_r);
6781 nonlocal_goto_handler_labels
6782 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6783 nonlocal_goto_handler_labels);
6784 /* ??? Do not let expand_label treat us as such since we would
6785 not want to be both on the list of non-local labels and on
6786 the list of forced labels. */
6787 FORCED_LABEL (label) = 0;
6788 return const0_rtx;
6789 }
6790 break;
6791
6792 case BUILT_IN_SETJMP_DISPATCHER:
6793 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6794 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6795 {
6796 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6797 rtx label_r = label_rtx (label);
6798
6799 /* Remove the dispatcher label from the list of non-local labels
6800 since the receiver labels have been added to it above. */
6801 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6802 return const0_rtx;
6803 }
6804 break;
6805
6806 case BUILT_IN_SETJMP_RECEIVER:
6807 /* __builtin_setjmp_receiver is passed the receiver label. */
6808 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6809 {
6810 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6811 rtx label_r = label_rtx (label);
6812
6813 expand_builtin_setjmp_receiver (label_r);
6814 return const0_rtx;
6815 }
6816 break;
6817
6818 /* __builtin_longjmp is passed a pointer to an array of five words.
6819 It's similar to the C library longjmp function but works with
6820 __builtin_setjmp above. */
6821 case BUILT_IN_LONGJMP:
6822 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6823 {
6824 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6825 VOIDmode, EXPAND_NORMAL);
6826 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6827
6828 if (value != const1_rtx)
6829 {
6830 error ("%<__builtin_longjmp%> second argument must be 1");
6831 return const0_rtx;
6832 }
6833
6834 expand_builtin_longjmp (buf_addr, value);
6835 return const0_rtx;
6836 }
6837 break;
6838
6839 case BUILT_IN_NONLOCAL_GOTO:
6840 target = expand_builtin_nonlocal_goto (exp);
6841 if (target)
6842 return target;
6843 break;
6844
6845 /* This updates the setjmp buffer that is its argument with the value
6846 of the current stack pointer. */
6847 case BUILT_IN_UPDATE_SETJMP_BUF:
6848 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6849 {
6850 rtx buf_addr
6851 = expand_normal (CALL_EXPR_ARG (exp, 0));
6852
6853 expand_builtin_update_setjmp_buf (buf_addr);
6854 return const0_rtx;
6855 }
6856 break;
6857
6858 case BUILT_IN_TRAP:
6859 expand_builtin_trap ();
6860 return const0_rtx;
6861
6862 case BUILT_IN_UNREACHABLE:
6863 expand_builtin_unreachable ();
6864 return const0_rtx;
6865
6866 case BUILT_IN_PRINTF:
6867 target = expand_builtin_printf (exp, target, mode, false);
6868 if (target)
6869 return target;
6870 break;
6871
6872 case BUILT_IN_PRINTF_UNLOCKED:
6873 target = expand_builtin_printf (exp, target, mode, true);
6874 if (target)
6875 return target;
6876 break;
6877
6878 case BUILT_IN_FPUTS:
6879 target = expand_builtin_fputs (exp, target, false);
6880 if (target)
6881 return target;
6882 break;
6883 case BUILT_IN_FPUTS_UNLOCKED:
6884 target = expand_builtin_fputs (exp, target, true);
6885 if (target)
6886 return target;
6887 break;
6888
6889 case BUILT_IN_FPRINTF:
6890 target = expand_builtin_fprintf (exp, target, mode, false);
6891 if (target)
6892 return target;
6893 break;
6894
6895 case BUILT_IN_FPRINTF_UNLOCKED:
6896 target = expand_builtin_fprintf (exp, target, mode, true);
6897 if (target)
6898 return target;
6899 break;
6900
6901 case BUILT_IN_SPRINTF:
6902 target = expand_builtin_sprintf (exp, target, mode);
6903 if (target)
6904 return target;
6905 break;
6906
6907 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6908 case BUILT_IN_SIGNBITD32:
6909 case BUILT_IN_SIGNBITD64:
6910 case BUILT_IN_SIGNBITD128:
6911 target = expand_builtin_signbit (exp, target);
6912 if (target)
6913 return target;
6914 break;
6915
6916 /* Various hooks for the DWARF 2 __throw routine. */
6917 case BUILT_IN_UNWIND_INIT:
6918 expand_builtin_unwind_init ();
6919 return const0_rtx;
6920 case BUILT_IN_DWARF_CFA:
6921 return virtual_cfa_rtx;
6922 #ifdef DWARF2_UNWIND_INFO
6923 case BUILT_IN_DWARF_SP_COLUMN:
6924 return expand_builtin_dwarf_sp_column ();
6925 case BUILT_IN_INIT_DWARF_REG_SIZES:
6926 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6927 return const0_rtx;
6928 #endif
6929 case BUILT_IN_FROB_RETURN_ADDR:
6930 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6931 case BUILT_IN_EXTRACT_RETURN_ADDR:
6932 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6933 case BUILT_IN_EH_RETURN:
6934 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6935 CALL_EXPR_ARG (exp, 1));
6936 return const0_rtx;
6937 #ifdef EH_RETURN_DATA_REGNO
6938 case BUILT_IN_EH_RETURN_DATA_REGNO:
6939 return expand_builtin_eh_return_data_regno (exp);
6940 #endif
6941 case BUILT_IN_EXTEND_POINTER:
6942 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6943
6944 case BUILT_IN_VA_START:
6945 return expand_builtin_va_start (exp);
6946 case BUILT_IN_VA_END:
6947 return expand_builtin_va_end (exp);
6948 case BUILT_IN_VA_COPY:
6949 return expand_builtin_va_copy (exp);
6950 case BUILT_IN_EXPECT:
6951 return expand_builtin_expect (exp, target);
6952 case BUILT_IN_PREFETCH:
6953 expand_builtin_prefetch (exp);
6954 return const0_rtx;
6955
6956 case BUILT_IN_PROFILE_FUNC_ENTER:
6957 return expand_builtin_profile_func (false);
6958 case BUILT_IN_PROFILE_FUNC_EXIT:
6959 return expand_builtin_profile_func (true);
6960
6961 case BUILT_IN_INIT_TRAMPOLINE:
6962 return expand_builtin_init_trampoline (exp);
6963 case BUILT_IN_ADJUST_TRAMPOLINE:
6964 return expand_builtin_adjust_trampoline (exp);
6965
6966 case BUILT_IN_FORK:
6967 case BUILT_IN_EXECL:
6968 case BUILT_IN_EXECV:
6969 case BUILT_IN_EXECLP:
6970 case BUILT_IN_EXECLE:
6971 case BUILT_IN_EXECVP:
6972 case BUILT_IN_EXECVE:
6973 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6974 if (target)
6975 return target;
6976 break;
6977
6978 case BUILT_IN_FETCH_AND_ADD_1:
6979 case BUILT_IN_FETCH_AND_ADD_2:
6980 case BUILT_IN_FETCH_AND_ADD_4:
6981 case BUILT_IN_FETCH_AND_ADD_8:
6982 case BUILT_IN_FETCH_AND_ADD_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6984 target = expand_builtin_sync_operation (mode, exp, PLUS,
6985 false, target, ignore);
6986 if (target)
6987 return target;
6988 break;
6989
6990 case BUILT_IN_FETCH_AND_SUB_1:
6991 case BUILT_IN_FETCH_AND_SUB_2:
6992 case BUILT_IN_FETCH_AND_SUB_4:
6993 case BUILT_IN_FETCH_AND_SUB_8:
6994 case BUILT_IN_FETCH_AND_SUB_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6996 target = expand_builtin_sync_operation (mode, exp, MINUS,
6997 false, target, ignore);
6998 if (target)
6999 return target;
7000 break;
7001
7002 case BUILT_IN_FETCH_AND_OR_1:
7003 case BUILT_IN_FETCH_AND_OR_2:
7004 case BUILT_IN_FETCH_AND_OR_4:
7005 case BUILT_IN_FETCH_AND_OR_8:
7006 case BUILT_IN_FETCH_AND_OR_16:
7007 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
7008 target = expand_builtin_sync_operation (mode, exp, IOR,
7009 false, target, ignore);
7010 if (target)
7011 return target;
7012 break;
7013
7014 case BUILT_IN_FETCH_AND_AND_1:
7015 case BUILT_IN_FETCH_AND_AND_2:
7016 case BUILT_IN_FETCH_AND_AND_4:
7017 case BUILT_IN_FETCH_AND_AND_8:
7018 case BUILT_IN_FETCH_AND_AND_16:
7019 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
7020 target = expand_builtin_sync_operation (mode, exp, AND,
7021 false, target, ignore);
7022 if (target)
7023 return target;
7024 break;
7025
7026 case BUILT_IN_FETCH_AND_XOR_1:
7027 case BUILT_IN_FETCH_AND_XOR_2:
7028 case BUILT_IN_FETCH_AND_XOR_4:
7029 case BUILT_IN_FETCH_AND_XOR_8:
7030 case BUILT_IN_FETCH_AND_XOR_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
7032 target = expand_builtin_sync_operation (mode, exp, XOR,
7033 false, target, ignore);
7034 if (target)
7035 return target;
7036 break;
7037
7038 case BUILT_IN_FETCH_AND_NAND_1:
7039 case BUILT_IN_FETCH_AND_NAND_2:
7040 case BUILT_IN_FETCH_AND_NAND_4:
7041 case BUILT_IN_FETCH_AND_NAND_8:
7042 case BUILT_IN_FETCH_AND_NAND_16:
7043 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7044 target = expand_builtin_sync_operation (mode, exp, NOT,
7045 false, target, ignore);
7046 if (target)
7047 return target;
7048 break;
7049
7050 case BUILT_IN_ADD_AND_FETCH_1:
7051 case BUILT_IN_ADD_AND_FETCH_2:
7052 case BUILT_IN_ADD_AND_FETCH_4:
7053 case BUILT_IN_ADD_AND_FETCH_8:
7054 case BUILT_IN_ADD_AND_FETCH_16:
7055 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7056 target = expand_builtin_sync_operation (mode, exp, PLUS,
7057 true, target, ignore);
7058 if (target)
7059 return target;
7060 break;
7061
7062 case BUILT_IN_SUB_AND_FETCH_1:
7063 case BUILT_IN_SUB_AND_FETCH_2:
7064 case BUILT_IN_SUB_AND_FETCH_4:
7065 case BUILT_IN_SUB_AND_FETCH_8:
7066 case BUILT_IN_SUB_AND_FETCH_16:
7067 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7068 target = expand_builtin_sync_operation (mode, exp, MINUS,
7069 true, target, ignore);
7070 if (target)
7071 return target;
7072 break;
7073
7074 case BUILT_IN_OR_AND_FETCH_1:
7075 case BUILT_IN_OR_AND_FETCH_2:
7076 case BUILT_IN_OR_AND_FETCH_4:
7077 case BUILT_IN_OR_AND_FETCH_8:
7078 case BUILT_IN_OR_AND_FETCH_16:
7079 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7080 target = expand_builtin_sync_operation (mode, exp, IOR,
7081 true, target, ignore);
7082 if (target)
7083 return target;
7084 break;
7085
7086 case BUILT_IN_AND_AND_FETCH_1:
7087 case BUILT_IN_AND_AND_FETCH_2:
7088 case BUILT_IN_AND_AND_FETCH_4:
7089 case BUILT_IN_AND_AND_FETCH_8:
7090 case BUILT_IN_AND_AND_FETCH_16:
7091 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7092 target = expand_builtin_sync_operation (mode, exp, AND,
7093 true, target, ignore);
7094 if (target)
7095 return target;
7096 break;
7097
7098 case BUILT_IN_XOR_AND_FETCH_1:
7099 case BUILT_IN_XOR_AND_FETCH_2:
7100 case BUILT_IN_XOR_AND_FETCH_4:
7101 case BUILT_IN_XOR_AND_FETCH_8:
7102 case BUILT_IN_XOR_AND_FETCH_16:
7103 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7104 target = expand_builtin_sync_operation (mode, exp, XOR,
7105 true, target, ignore);
7106 if (target)
7107 return target;
7108 break;
7109
7110 case BUILT_IN_NAND_AND_FETCH_1:
7111 case BUILT_IN_NAND_AND_FETCH_2:
7112 case BUILT_IN_NAND_AND_FETCH_4:
7113 case BUILT_IN_NAND_AND_FETCH_8:
7114 case BUILT_IN_NAND_AND_FETCH_16:
7115 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7116 target = expand_builtin_sync_operation (mode, exp, NOT,
7117 true, target, ignore);
7118 if (target)
7119 return target;
7120 break;
7121
7122 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7123 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7124 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7125 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7126 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7127 if (mode == VOIDmode)
7128 mode = TYPE_MODE (boolean_type_node);
7129 if (!target || !register_operand (target, mode))
7130 target = gen_reg_rtx (mode);
7131
7132 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7133 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7134 if (target)
7135 return target;
7136 break;
7137
7138 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7139 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7140 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7141 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7142 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7143 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7144 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7145 if (target)
7146 return target;
7147 break;
7148
7149 case BUILT_IN_LOCK_TEST_AND_SET_1:
7150 case BUILT_IN_LOCK_TEST_AND_SET_2:
7151 case BUILT_IN_LOCK_TEST_AND_SET_4:
7152 case BUILT_IN_LOCK_TEST_AND_SET_8:
7153 case BUILT_IN_LOCK_TEST_AND_SET_16:
7154 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7155 target = expand_builtin_lock_test_and_set (mode, exp, target);
7156 if (target)
7157 return target;
7158 break;
7159
7160 case BUILT_IN_LOCK_RELEASE_1:
7161 case BUILT_IN_LOCK_RELEASE_2:
7162 case BUILT_IN_LOCK_RELEASE_4:
7163 case BUILT_IN_LOCK_RELEASE_8:
7164 case BUILT_IN_LOCK_RELEASE_16:
7165 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7166 expand_builtin_lock_release (mode, exp);
7167 return const0_rtx;
7168
7169 case BUILT_IN_SYNCHRONIZE:
7170 expand_builtin_synchronize ();
7171 return const0_rtx;
7172
7173 case BUILT_IN_OBJECT_SIZE:
7174 return expand_builtin_object_size (exp);
7175
7176 case BUILT_IN_MEMCPY_CHK:
7177 case BUILT_IN_MEMPCPY_CHK:
7178 case BUILT_IN_MEMMOVE_CHK:
7179 case BUILT_IN_MEMSET_CHK:
7180 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7181 if (target)
7182 return target;
7183 break;
7184
7185 case BUILT_IN_STRCPY_CHK:
7186 case BUILT_IN_STPCPY_CHK:
7187 case BUILT_IN_STRNCPY_CHK:
7188 case BUILT_IN_STRCAT_CHK:
7189 case BUILT_IN_STRNCAT_CHK:
7190 case BUILT_IN_SNPRINTF_CHK:
7191 case BUILT_IN_VSNPRINTF_CHK:
7192 maybe_emit_chk_warning (exp, fcode);
7193 break;
7194
7195 case BUILT_IN_SPRINTF_CHK:
7196 case BUILT_IN_VSPRINTF_CHK:
7197 maybe_emit_sprintf_chk_warning (exp, fcode);
7198 break;
7199
7200 case BUILT_IN_FREE:
7201 maybe_emit_free_warning (exp);
7202 break;
7203
7204 default: /* just do library call, if unknown builtin */
7205 break;
7206 }
7207
7208 /* The switch statement above can drop through to cause the function
7209 to be called normally. */
7210 return expand_call (exp, target, ignore);
7211 }
7212
7213 /* Determine whether a tree node represents a call to a built-in
7214 function. If the tree T is a call to a built-in function with
7215 the right number of arguments of the appropriate types, return
7216 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7217 Otherwise the return value is END_BUILTINS. */
7218
7219 enum built_in_function
7220 builtin_mathfn_code (const_tree t)
7221 {
7222 const_tree fndecl, arg, parmlist;
7223 const_tree argtype, parmtype;
7224 const_call_expr_arg_iterator iter;
7225
7226 if (TREE_CODE (t) != CALL_EXPR
7227 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7228 return END_BUILTINS;
7229
7230 fndecl = get_callee_fndecl (t);
7231 if (fndecl == NULL_TREE
7232 || TREE_CODE (fndecl) != FUNCTION_DECL
7233 || ! DECL_BUILT_IN (fndecl)
7234 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7235 return END_BUILTINS;
7236
7237 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7238 init_const_call_expr_arg_iterator (t, &iter);
7239 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7240 {
7241 /* If a function doesn't take a variable number of arguments,
7242 the last element in the list will have type `void'. */
7243 parmtype = TREE_VALUE (parmlist);
7244 if (VOID_TYPE_P (parmtype))
7245 {
7246 if (more_const_call_expr_args_p (&iter))
7247 return END_BUILTINS;
7248 return DECL_FUNCTION_CODE (fndecl);
7249 }
7250
7251 if (! more_const_call_expr_args_p (&iter))
7252 return END_BUILTINS;
7253
7254 arg = next_const_call_expr_arg (&iter);
7255 argtype = TREE_TYPE (arg);
7256
7257 if (SCALAR_FLOAT_TYPE_P (parmtype))
7258 {
7259 if (! SCALAR_FLOAT_TYPE_P (argtype))
7260 return END_BUILTINS;
7261 }
7262 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7263 {
7264 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7265 return END_BUILTINS;
7266 }
7267 else if (POINTER_TYPE_P (parmtype))
7268 {
7269 if (! POINTER_TYPE_P (argtype))
7270 return END_BUILTINS;
7271 }
7272 else if (INTEGRAL_TYPE_P (parmtype))
7273 {
7274 if (! INTEGRAL_TYPE_P (argtype))
7275 return END_BUILTINS;
7276 }
7277 else
7278 return END_BUILTINS;
7279 }
7280
7281 /* Variable-length argument list. */
7282 return DECL_FUNCTION_CODE (fndecl);
7283 }
7284
7285 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7286 evaluate to a constant. */
7287
7288 static tree
7289 fold_builtin_constant_p (tree arg)
7290 {
7291 /* We return 1 for a numeric type that's known to be a constant
7292 value at compile-time or for an aggregate type that's a
7293 literal constant. */
7294 STRIP_NOPS (arg);
7295
7296 /* If we know this is a constant, emit the constant of one. */
7297 if (CONSTANT_CLASS_P (arg)
7298 || (TREE_CODE (arg) == CONSTRUCTOR
7299 && TREE_CONSTANT (arg)))
7300 return integer_one_node;
7301 if (TREE_CODE (arg) == ADDR_EXPR)
7302 {
7303 tree op = TREE_OPERAND (arg, 0);
7304 if (TREE_CODE (op) == STRING_CST
7305 || (TREE_CODE (op) == ARRAY_REF
7306 && integer_zerop (TREE_OPERAND (op, 1))
7307 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7308 return integer_one_node;
7309 }
7310
7311 /* If this expression has side effects, show we don't know it to be a
7312 constant. Likewise if it's a pointer or aggregate type since in
7313 those case we only want literals, since those are only optimized
7314 when generating RTL, not later.
7315 And finally, if we are compiling an initializer, not code, we
7316 need to return a definite result now; there's not going to be any
7317 more optimization done. */
7318 if (TREE_SIDE_EFFECTS (arg)
7319 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7320 || POINTER_TYPE_P (TREE_TYPE (arg))
7321 || cfun == 0
7322 || folding_initializer)
7323 return integer_zero_node;
7324
7325 return NULL_TREE;
7326 }
7327
7328 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7329 return it as a truthvalue. */
7330
7331 static tree
7332 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7333 {
7334 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7335
7336 fn = built_in_decls[BUILT_IN_EXPECT];
7337 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7338 ret_type = TREE_TYPE (TREE_TYPE (fn));
7339 pred_type = TREE_VALUE (arg_types);
7340 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7341
7342 pred = fold_convert_loc (loc, pred_type, pred);
7343 expected = fold_convert_loc (loc, expected_type, expected);
7344 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7345
7346 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7347 build_int_cst (ret_type, 0));
7348 }
7349
7350 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7351 NULL_TREE if no simplification is possible. */
7352
7353 static tree
7354 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7355 {
7356 tree inner, fndecl;
7357 enum tree_code code;
7358
7359 /* If this is a builtin_expect within a builtin_expect keep the
7360 inner one. See through a comparison against a constant. It
7361 might have been added to create a thruthvalue. */
7362 inner = arg0;
7363 if (COMPARISON_CLASS_P (inner)
7364 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7365 inner = TREE_OPERAND (inner, 0);
7366
7367 if (TREE_CODE (inner) == CALL_EXPR
7368 && (fndecl = get_callee_fndecl (inner))
7369 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7370 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7371 return arg0;
7372
7373 /* Distribute the expected value over short-circuiting operators.
7374 See through the cast from truthvalue_type_node to long. */
7375 inner = arg0;
7376 while (TREE_CODE (inner) == NOP_EXPR
7377 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7378 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7379 inner = TREE_OPERAND (inner, 0);
7380
7381 code = TREE_CODE (inner);
7382 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7383 {
7384 tree op0 = TREE_OPERAND (inner, 0);
7385 tree op1 = TREE_OPERAND (inner, 1);
7386
7387 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7388 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7389 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7390
7391 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7392 }
7393
7394 /* If the argument isn't invariant then there's nothing else we can do. */
7395 if (!TREE_CONSTANT (arg0))
7396 return NULL_TREE;
7397
7398 /* If we expect that a comparison against the argument will fold to
7399 a constant return the constant. In practice, this means a true
7400 constant or the address of a non-weak symbol. */
7401 inner = arg0;
7402 STRIP_NOPS (inner);
7403 if (TREE_CODE (inner) == ADDR_EXPR)
7404 {
7405 do
7406 {
7407 inner = TREE_OPERAND (inner, 0);
7408 }
7409 while (TREE_CODE (inner) == COMPONENT_REF
7410 || TREE_CODE (inner) == ARRAY_REF);
7411 if ((TREE_CODE (inner) == VAR_DECL
7412 || TREE_CODE (inner) == FUNCTION_DECL)
7413 && DECL_WEAK (inner))
7414 return NULL_TREE;
7415 }
7416
7417 /* Otherwise, ARG0 already has the proper type for the return value. */
7418 return arg0;
7419 }
7420
7421 /* Fold a call to __builtin_classify_type with argument ARG. */
7422
7423 static tree
7424 fold_builtin_classify_type (tree arg)
7425 {
7426 if (arg == 0)
7427 return build_int_cst (NULL_TREE, no_type_class);
7428
7429 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7430 }
7431
7432 /* Fold a call to __builtin_strlen with argument ARG. */
7433
7434 static tree
7435 fold_builtin_strlen (location_t loc, tree arg)
7436 {
7437 if (!validate_arg (arg, POINTER_TYPE))
7438 return NULL_TREE;
7439 else
7440 {
7441 tree len = c_strlen (arg, 0);
7442
7443 if (len)
7444 {
7445 /* Convert from the internal "sizetype" type to "size_t". */
7446 if (size_type_node)
7447 len = fold_convert_loc (loc, size_type_node, len);
7448 return len;
7449 }
7450
7451 return NULL_TREE;
7452 }
7453 }
7454
7455 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7456
7457 static tree
7458 fold_builtin_inf (location_t loc, tree type, int warn)
7459 {
7460 REAL_VALUE_TYPE real;
7461
7462 /* __builtin_inff is intended to be usable to define INFINITY on all
7463 targets. If an infinity is not available, INFINITY expands "to a
7464 positive constant of type float that overflows at translation
7465 time", footnote "In this case, using INFINITY will violate the
7466 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7467 Thus we pedwarn to ensure this constraint violation is
7468 diagnosed. */
7469 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7470 pedwarn (loc, 0, "target format does not support infinity");
7471
7472 real_inf (&real);
7473 return build_real (type, real);
7474 }
7475
7476 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7477
7478 static tree
7479 fold_builtin_nan (tree arg, tree type, int quiet)
7480 {
7481 REAL_VALUE_TYPE real;
7482 const char *str;
7483
7484 if (!validate_arg (arg, POINTER_TYPE))
7485 return NULL_TREE;
7486 str = c_getstr (arg);
7487 if (!str)
7488 return NULL_TREE;
7489
7490 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7491 return NULL_TREE;
7492
7493 return build_real (type, real);
7494 }
7495
7496 /* Return true if the floating point expression T has an integer value.
7497 We also allow +Inf, -Inf and NaN to be considered integer values. */
7498
7499 static bool
7500 integer_valued_real_p (tree t)
7501 {
7502 switch (TREE_CODE (t))
7503 {
7504 case FLOAT_EXPR:
7505 return true;
7506
7507 case ABS_EXPR:
7508 case SAVE_EXPR:
7509 return integer_valued_real_p (TREE_OPERAND (t, 0));
7510
7511 case COMPOUND_EXPR:
7512 case MODIFY_EXPR:
7513 case BIND_EXPR:
7514 return integer_valued_real_p (TREE_OPERAND (t, 1));
7515
7516 case PLUS_EXPR:
7517 case MINUS_EXPR:
7518 case MULT_EXPR:
7519 case MIN_EXPR:
7520 case MAX_EXPR:
7521 return integer_valued_real_p (TREE_OPERAND (t, 0))
7522 && integer_valued_real_p (TREE_OPERAND (t, 1));
7523
7524 case COND_EXPR:
7525 return integer_valued_real_p (TREE_OPERAND (t, 1))
7526 && integer_valued_real_p (TREE_OPERAND (t, 2));
7527
7528 case REAL_CST:
7529 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7530
7531 case NOP_EXPR:
7532 {
7533 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7534 if (TREE_CODE (type) == INTEGER_TYPE)
7535 return true;
7536 if (TREE_CODE (type) == REAL_TYPE)
7537 return integer_valued_real_p (TREE_OPERAND (t, 0));
7538 break;
7539 }
7540
7541 case CALL_EXPR:
7542 switch (builtin_mathfn_code (t))
7543 {
7544 CASE_FLT_FN (BUILT_IN_CEIL):
7545 CASE_FLT_FN (BUILT_IN_FLOOR):
7546 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7547 CASE_FLT_FN (BUILT_IN_RINT):
7548 CASE_FLT_FN (BUILT_IN_ROUND):
7549 CASE_FLT_FN (BUILT_IN_TRUNC):
7550 return true;
7551
7552 CASE_FLT_FN (BUILT_IN_FMIN):
7553 CASE_FLT_FN (BUILT_IN_FMAX):
7554 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7555 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7556
7557 default:
7558 break;
7559 }
7560 break;
7561
7562 default:
7563 break;
7564 }
7565 return false;
7566 }
7567
7568 /* FNDECL is assumed to be a builtin where truncation can be propagated
7569 across (for instance floor((double)f) == (double)floorf (f).
7570 Do the transformation for a call with argument ARG. */
7571
7572 static tree
7573 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7574 {
7575 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7576
7577 if (!validate_arg (arg, REAL_TYPE))
7578 return NULL_TREE;
7579
7580 /* Integer rounding functions are idempotent. */
7581 if (fcode == builtin_mathfn_code (arg))
7582 return arg;
7583
7584 /* If argument is already integer valued, and we don't need to worry
7585 about setting errno, there's no need to perform rounding. */
7586 if (! flag_errno_math && integer_valued_real_p (arg))
7587 return arg;
7588
7589 if (optimize)
7590 {
7591 tree arg0 = strip_float_extensions (arg);
7592 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7593 tree newtype = TREE_TYPE (arg0);
7594 tree decl;
7595
7596 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7597 && (decl = mathfn_built_in (newtype, fcode)))
7598 return fold_convert_loc (loc, ftype,
7599 build_call_expr_loc (loc, decl, 1,
7600 fold_convert_loc (loc,
7601 newtype,
7602 arg0)));
7603 }
7604 return NULL_TREE;
7605 }
7606
7607 /* FNDECL is assumed to be builtin which can narrow the FP type of
7608 the argument, for instance lround((double)f) -> lroundf (f).
7609 Do the transformation for a call with argument ARG. */
7610
7611 static tree
7612 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7613 {
7614 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7615
7616 if (!validate_arg (arg, REAL_TYPE))
7617 return NULL_TREE;
7618
7619 /* If argument is already integer valued, and we don't need to worry
7620 about setting errno, there's no need to perform rounding. */
7621 if (! flag_errno_math && integer_valued_real_p (arg))
7622 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7623 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7624
7625 if (optimize)
7626 {
7627 tree ftype = TREE_TYPE (arg);
7628 tree arg0 = strip_float_extensions (arg);
7629 tree newtype = TREE_TYPE (arg0);
7630 tree decl;
7631
7632 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7633 && (decl = mathfn_built_in (newtype, fcode)))
7634 return build_call_expr_loc (loc, decl, 1,
7635 fold_convert_loc (loc, newtype, arg0));
7636 }
7637
7638 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7639 sizeof (long long) == sizeof (long). */
7640 if (TYPE_PRECISION (long_long_integer_type_node)
7641 == TYPE_PRECISION (long_integer_type_node))
7642 {
7643 tree newfn = NULL_TREE;
7644 switch (fcode)
7645 {
7646 CASE_FLT_FN (BUILT_IN_LLCEIL):
7647 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7648 break;
7649
7650 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7651 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7652 break;
7653
7654 CASE_FLT_FN (BUILT_IN_LLROUND):
7655 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7656 break;
7657
7658 CASE_FLT_FN (BUILT_IN_LLRINT):
7659 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7660 break;
7661
7662 default:
7663 break;
7664 }
7665
7666 if (newfn)
7667 {
7668 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7669 return fold_convert_loc (loc,
7670 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7671 }
7672 }
7673
7674 return NULL_TREE;
7675 }
7676
7677 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7678 return type. Return NULL_TREE if no simplification can be made. */
7679
7680 static tree
7681 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7682 {
7683 tree res;
7684
7685 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7686 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7687 return NULL_TREE;
7688
7689 /* Calculate the result when the argument is a constant. */
7690 if (TREE_CODE (arg) == COMPLEX_CST
7691 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7692 type, mpfr_hypot)))
7693 return res;
7694
7695 if (TREE_CODE (arg) == COMPLEX_EXPR)
7696 {
7697 tree real = TREE_OPERAND (arg, 0);
7698 tree imag = TREE_OPERAND (arg, 1);
7699
7700 /* If either part is zero, cabs is fabs of the other. */
7701 if (real_zerop (real))
7702 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7703 if (real_zerop (imag))
7704 return fold_build1_loc (loc, ABS_EXPR, type, real);
7705
7706 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7707 if (flag_unsafe_math_optimizations
7708 && operand_equal_p (real, imag, OEP_PURE_SAME))
7709 {
7710 const REAL_VALUE_TYPE sqrt2_trunc
7711 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7712 STRIP_NOPS (real);
7713 return fold_build2_loc (loc, MULT_EXPR, type,
7714 fold_build1_loc (loc, ABS_EXPR, type, real),
7715 build_real (type, sqrt2_trunc));
7716 }
7717 }
7718
7719 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7720 if (TREE_CODE (arg) == NEGATE_EXPR
7721 || TREE_CODE (arg) == CONJ_EXPR)
7722 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7723
7724 /* Don't do this when optimizing for size. */
7725 if (flag_unsafe_math_optimizations
7726 && optimize && optimize_function_for_speed_p (cfun))
7727 {
7728 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7729
7730 if (sqrtfn != NULL_TREE)
7731 {
7732 tree rpart, ipart, result;
7733
7734 arg = builtin_save_expr (arg);
7735
7736 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7737 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7738
7739 rpart = builtin_save_expr (rpart);
7740 ipart = builtin_save_expr (ipart);
7741
7742 result = fold_build2_loc (loc, PLUS_EXPR, type,
7743 fold_build2_loc (loc, MULT_EXPR, type,
7744 rpart, rpart),
7745 fold_build2_loc (loc, MULT_EXPR, type,
7746 ipart, ipart));
7747
7748 return build_call_expr_loc (loc, sqrtfn, 1, result);
7749 }
7750 }
7751
7752 return NULL_TREE;
7753 }
7754
7755 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7756 Return NULL_TREE if no simplification can be made. */
7757
7758 static tree
7759 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7760 {
7761
7762 enum built_in_function fcode;
7763 tree res;
7764
7765 if (!validate_arg (arg, REAL_TYPE))
7766 return NULL_TREE;
7767
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7770 return res;
7771
7772 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7773 fcode = builtin_mathfn_code (arg);
7774 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7775 {
7776 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7777 arg = fold_build2_loc (loc, MULT_EXPR, type,
7778 CALL_EXPR_ARG (arg, 0),
7779 build_real (type, dconsthalf));
7780 return build_call_expr_loc (loc, expfn, 1, arg);
7781 }
7782
7783 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7784 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7785 {
7786 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7787
7788 if (powfn)
7789 {
7790 tree arg0 = CALL_EXPR_ARG (arg, 0);
7791 tree tree_root;
7792 /* The inner root was either sqrt or cbrt. */
7793 /* This was a conditional expression but it triggered a bug
7794 in Sun C 5.5. */
7795 REAL_VALUE_TYPE dconstroot;
7796 if (BUILTIN_SQRT_P (fcode))
7797 dconstroot = dconsthalf;
7798 else
7799 dconstroot = dconst_third ();
7800
7801 /* Adjust for the outer root. */
7802 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7803 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7804 tree_root = build_real (type, dconstroot);
7805 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7806 }
7807 }
7808
7809 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7810 if (flag_unsafe_math_optimizations
7811 && (fcode == BUILT_IN_POW
7812 || fcode == BUILT_IN_POWF
7813 || fcode == BUILT_IN_POWL))
7814 {
7815 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7816 tree arg0 = CALL_EXPR_ARG (arg, 0);
7817 tree arg1 = CALL_EXPR_ARG (arg, 1);
7818 tree narg1;
7819 if (!tree_expr_nonnegative_p (arg0))
7820 arg0 = build1 (ABS_EXPR, type, arg0);
7821 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7822 build_real (type, dconsthalf));
7823 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7824 }
7825
7826 return NULL_TREE;
7827 }
7828
7829 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7830 Return NULL_TREE if no simplification can be made. */
7831
7832 static tree
7833 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7834 {
7835 const enum built_in_function fcode = builtin_mathfn_code (arg);
7836 tree res;
7837
7838 if (!validate_arg (arg, REAL_TYPE))
7839 return NULL_TREE;
7840
7841 /* Calculate the result when the argument is a constant. */
7842 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7843 return res;
7844
7845 if (flag_unsafe_math_optimizations)
7846 {
7847 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7848 if (BUILTIN_EXPONENT_P (fcode))
7849 {
7850 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7851 const REAL_VALUE_TYPE third_trunc =
7852 real_value_truncate (TYPE_MODE (type), dconst_third ());
7853 arg = fold_build2_loc (loc, MULT_EXPR, type,
7854 CALL_EXPR_ARG (arg, 0),
7855 build_real (type, third_trunc));
7856 return build_call_expr_loc (loc, expfn, 1, arg);
7857 }
7858
7859 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7860 if (BUILTIN_SQRT_P (fcode))
7861 {
7862 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7863
7864 if (powfn)
7865 {
7866 tree arg0 = CALL_EXPR_ARG (arg, 0);
7867 tree tree_root;
7868 REAL_VALUE_TYPE dconstroot = dconst_third ();
7869
7870 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7871 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7872 tree_root = build_real (type, dconstroot);
7873 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7874 }
7875 }
7876
7877 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7878 if (BUILTIN_CBRT_P (fcode))
7879 {
7880 tree arg0 = CALL_EXPR_ARG (arg, 0);
7881 if (tree_expr_nonnegative_p (arg0))
7882 {
7883 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7884
7885 if (powfn)
7886 {
7887 tree tree_root;
7888 REAL_VALUE_TYPE dconstroot;
7889
7890 real_arithmetic (&dconstroot, MULT_EXPR,
7891 dconst_third_ptr (), dconst_third_ptr ());
7892 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7893 tree_root = build_real (type, dconstroot);
7894 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7895 }
7896 }
7897 }
7898
7899 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7900 if (fcode == BUILT_IN_POW
7901 || fcode == BUILT_IN_POWF
7902 || fcode == BUILT_IN_POWL)
7903 {
7904 tree arg00 = CALL_EXPR_ARG (arg, 0);
7905 tree arg01 = CALL_EXPR_ARG (arg, 1);
7906 if (tree_expr_nonnegative_p (arg00))
7907 {
7908 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7909 const REAL_VALUE_TYPE dconstroot
7910 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7911 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7912 build_real (type, dconstroot));
7913 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7914 }
7915 }
7916 }
7917 return NULL_TREE;
7918 }
7919
7920 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7921 TYPE is the type of the return value. Return NULL_TREE if no
7922 simplification can be made. */
7923
7924 static tree
7925 fold_builtin_cos (location_t loc,
7926 tree arg, tree type, tree fndecl)
7927 {
7928 tree res, narg;
7929
7930 if (!validate_arg (arg, REAL_TYPE))
7931 return NULL_TREE;
7932
7933 /* Calculate the result when the argument is a constant. */
7934 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7935 return res;
7936
7937 /* Optimize cos(-x) into cos (x). */
7938 if ((narg = fold_strip_sign_ops (arg)))
7939 return build_call_expr_loc (loc, fndecl, 1, narg);
7940
7941 return NULL_TREE;
7942 }
7943
7944 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7945 Return NULL_TREE if no simplification can be made. */
7946
7947 static tree
7948 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7949 {
7950 if (validate_arg (arg, REAL_TYPE))
7951 {
7952 tree res, narg;
7953
7954 /* Calculate the result when the argument is a constant. */
7955 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7956 return res;
7957
7958 /* Optimize cosh(-x) into cosh (x). */
7959 if ((narg = fold_strip_sign_ops (arg)))
7960 return build_call_expr_loc (loc, fndecl, 1, narg);
7961 }
7962
7963 return NULL_TREE;
7964 }
7965
7966 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7967 argument ARG. TYPE is the type of the return value. Return
7968 NULL_TREE if no simplification can be made. */
7969
7970 static tree
7971 fold_builtin_ccos (location_t loc,
7972 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7973 bool hyper ATTRIBUTE_UNUSED)
7974 {
7975 if (validate_arg (arg, COMPLEX_TYPE)
7976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7977 {
7978 tree tmp;
7979
7980 #ifdef HAVE_mpc
7981 /* Calculate the result when the argument is a constant. */
7982 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7983 return tmp;
7984 #endif
7985
7986 /* Optimize fn(-x) into fn(x). */
7987 if ((tmp = fold_strip_sign_ops (arg)))
7988 return build_call_expr_loc (loc, fndecl, 1, tmp);
7989 }
7990
7991 return NULL_TREE;
7992 }
7993
7994 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7995 Return NULL_TREE if no simplification can be made. */
7996
7997 static tree
7998 fold_builtin_tan (tree arg, tree type)
7999 {
8000 enum built_in_function fcode;
8001 tree res;
8002
8003 if (!validate_arg (arg, REAL_TYPE))
8004 return NULL_TREE;
8005
8006 /* Calculate the result when the argument is a constant. */
8007 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8008 return res;
8009
8010 /* Optimize tan(atan(x)) = x. */
8011 fcode = builtin_mathfn_code (arg);
8012 if (flag_unsafe_math_optimizations
8013 && (fcode == BUILT_IN_ATAN
8014 || fcode == BUILT_IN_ATANF
8015 || fcode == BUILT_IN_ATANL))
8016 return CALL_EXPR_ARG (arg, 0);
8017
8018 return NULL_TREE;
8019 }
8020
8021 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8022 NULL_TREE if no simplification can be made. */
8023
8024 static tree
8025 fold_builtin_sincos (location_t loc,
8026 tree arg0, tree arg1, tree arg2)
8027 {
8028 tree type;
8029 tree res, fn, call;
8030
8031 if (!validate_arg (arg0, REAL_TYPE)
8032 || !validate_arg (arg1, POINTER_TYPE)
8033 || !validate_arg (arg2, POINTER_TYPE))
8034 return NULL_TREE;
8035
8036 type = TREE_TYPE (arg0);
8037
8038 /* Calculate the result when the argument is a constant. */
8039 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8040 return res;
8041
8042 /* Canonicalize sincos to cexpi. */
8043 if (!TARGET_C99_FUNCTIONS)
8044 return NULL_TREE;
8045 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8046 if (!fn)
8047 return NULL_TREE;
8048
8049 call = build_call_expr_loc (loc, fn, 1, arg0);
8050 call = builtin_save_expr (call);
8051
8052 return build2 (COMPOUND_EXPR, void_type_node,
8053 build2 (MODIFY_EXPR, void_type_node,
8054 build_fold_indirect_ref_loc (loc, arg1),
8055 build1 (IMAGPART_EXPR, type, call)),
8056 build2 (MODIFY_EXPR, void_type_node,
8057 build_fold_indirect_ref_loc (loc, arg2),
8058 build1 (REALPART_EXPR, type, call)));
8059 }
8060
8061 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8062 NULL_TREE if no simplification can be made. */
8063
8064 static tree
8065 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8066 {
8067 tree rtype;
8068 tree realp, imagp, ifn;
8069 #ifdef HAVE_mpc
8070 tree res;
8071 #endif
8072
8073 if (!validate_arg (arg0, COMPLEX_TYPE)
8074 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8075 return NULL_TREE;
8076
8077 #ifdef HAVE_mpc
8078 /* Calculate the result when the argument is a constant. */
8079 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8080 return res;
8081 #endif
8082
8083 rtype = TREE_TYPE (TREE_TYPE (arg0));
8084
8085 /* In case we can figure out the real part of arg0 and it is constant zero
8086 fold to cexpi. */
8087 if (!TARGET_C99_FUNCTIONS)
8088 return NULL_TREE;
8089 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8090 if (!ifn)
8091 return NULL_TREE;
8092
8093 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8094 && real_zerop (realp))
8095 {
8096 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8097 return build_call_expr_loc (loc, ifn, 1, narg);
8098 }
8099
8100 /* In case we can easily decompose real and imaginary parts split cexp
8101 to exp (r) * cexpi (i). */
8102 if (flag_unsafe_math_optimizations
8103 && realp)
8104 {
8105 tree rfn, rcall, icall;
8106
8107 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8108 if (!rfn)
8109 return NULL_TREE;
8110
8111 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8112 if (!imagp)
8113 return NULL_TREE;
8114
8115 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8116 icall = builtin_save_expr (icall);
8117 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8118 rcall = builtin_save_expr (rcall);
8119 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8120 fold_build2_loc (loc, MULT_EXPR, rtype,
8121 rcall,
8122 fold_build1_loc (loc, REALPART_EXPR,
8123 rtype, icall)),
8124 fold_build2_loc (loc, MULT_EXPR, rtype,
8125 rcall,
8126 fold_build1_loc (loc, IMAGPART_EXPR,
8127 rtype, icall)));
8128 }
8129
8130 return NULL_TREE;
8131 }
8132
8133 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8134 Return NULL_TREE if no simplification can be made. */
8135
8136 static tree
8137 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8138 {
8139 if (!validate_arg (arg, REAL_TYPE))
8140 return NULL_TREE;
8141
8142 /* Optimize trunc of constant value. */
8143 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8144 {
8145 REAL_VALUE_TYPE r, x;
8146 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8147
8148 x = TREE_REAL_CST (arg);
8149 real_trunc (&r, TYPE_MODE (type), &x);
8150 return build_real (type, r);
8151 }
8152
8153 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8154 }
8155
8156 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8157 Return NULL_TREE if no simplification can be made. */
8158
8159 static tree
8160 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8161 {
8162 if (!validate_arg (arg, REAL_TYPE))
8163 return NULL_TREE;
8164
8165 /* Optimize floor of constant value. */
8166 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8167 {
8168 REAL_VALUE_TYPE x;
8169
8170 x = TREE_REAL_CST (arg);
8171 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8172 {
8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8174 REAL_VALUE_TYPE r;
8175
8176 real_floor (&r, TYPE_MODE (type), &x);
8177 return build_real (type, r);
8178 }
8179 }
8180
8181 /* Fold floor (x) where x is nonnegative to trunc (x). */
8182 if (tree_expr_nonnegative_p (arg))
8183 {
8184 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8185 if (truncfn)
8186 return build_call_expr_loc (loc, truncfn, 1, arg);
8187 }
8188
8189 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8190 }
8191
8192 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8193 Return NULL_TREE if no simplification can be made. */
8194
8195 static tree
8196 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8197 {
8198 if (!validate_arg (arg, REAL_TYPE))
8199 return NULL_TREE;
8200
8201 /* Optimize ceil of constant value. */
8202 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8203 {
8204 REAL_VALUE_TYPE x;
8205
8206 x = TREE_REAL_CST (arg);
8207 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8208 {
8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8210 REAL_VALUE_TYPE r;
8211
8212 real_ceil (&r, TYPE_MODE (type), &x);
8213 return build_real (type, r);
8214 }
8215 }
8216
8217 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8218 }
8219
8220 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8221 Return NULL_TREE if no simplification can be made. */
8222
8223 static tree
8224 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8225 {
8226 if (!validate_arg (arg, REAL_TYPE))
8227 return NULL_TREE;
8228
8229 /* Optimize round of constant value. */
8230 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8231 {
8232 REAL_VALUE_TYPE x;
8233
8234 x = TREE_REAL_CST (arg);
8235 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8236 {
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 REAL_VALUE_TYPE r;
8239
8240 real_round (&r, TYPE_MODE (type), &x);
8241 return build_real (type, r);
8242 }
8243 }
8244
8245 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8246 }
8247
8248 /* Fold function call to builtin lround, lroundf or lroundl (or the
8249 corresponding long long versions) and other rounding functions. ARG
8250 is the argument to the call. Return NULL_TREE if no simplification
8251 can be made. */
8252
8253 static tree
8254 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8255 {
8256 if (!validate_arg (arg, REAL_TYPE))
8257 return NULL_TREE;
8258
8259 /* Optimize lround of constant value. */
8260 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8261 {
8262 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8263
8264 if (real_isfinite (&x))
8265 {
8266 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8267 tree ftype = TREE_TYPE (arg);
8268 unsigned HOST_WIDE_INT lo2;
8269 HOST_WIDE_INT hi, lo;
8270 REAL_VALUE_TYPE r;
8271
8272 switch (DECL_FUNCTION_CODE (fndecl))
8273 {
8274 CASE_FLT_FN (BUILT_IN_LFLOOR):
8275 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8276 real_floor (&r, TYPE_MODE (ftype), &x);
8277 break;
8278
8279 CASE_FLT_FN (BUILT_IN_LCEIL):
8280 CASE_FLT_FN (BUILT_IN_LLCEIL):
8281 real_ceil (&r, TYPE_MODE (ftype), &x);
8282 break;
8283
8284 CASE_FLT_FN (BUILT_IN_LROUND):
8285 CASE_FLT_FN (BUILT_IN_LLROUND):
8286 real_round (&r, TYPE_MODE (ftype), &x);
8287 break;
8288
8289 default:
8290 gcc_unreachable ();
8291 }
8292
8293 REAL_VALUE_TO_INT (&lo, &hi, r);
8294 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8295 return build_int_cst_wide (itype, lo2, hi);
8296 }
8297 }
8298
8299 switch (DECL_FUNCTION_CODE (fndecl))
8300 {
8301 CASE_FLT_FN (BUILT_IN_LFLOOR):
8302 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8303 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8304 if (tree_expr_nonnegative_p (arg))
8305 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8306 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8307 break;
8308 default:;
8309 }
8310
8311 return fold_fixed_mathfn (loc, fndecl, arg);
8312 }
8313
8314 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8315 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8316 the argument to the call. Return NULL_TREE if no simplification can
8317 be made. */
8318
8319 static tree
8320 fold_builtin_bitop (tree fndecl, tree arg)
8321 {
8322 if (!validate_arg (arg, INTEGER_TYPE))
8323 return NULL_TREE;
8324
8325 /* Optimize for constant argument. */
8326 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8327 {
8328 HOST_WIDE_INT hi, width, result;
8329 unsigned HOST_WIDE_INT lo;
8330 tree type;
8331
8332 type = TREE_TYPE (arg);
8333 width = TYPE_PRECISION (type);
8334 lo = TREE_INT_CST_LOW (arg);
8335
8336 /* Clear all the bits that are beyond the type's precision. */
8337 if (width > HOST_BITS_PER_WIDE_INT)
8338 {
8339 hi = TREE_INT_CST_HIGH (arg);
8340 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8341 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8342 }
8343 else
8344 {
8345 hi = 0;
8346 if (width < HOST_BITS_PER_WIDE_INT)
8347 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8348 }
8349
8350 switch (DECL_FUNCTION_CODE (fndecl))
8351 {
8352 CASE_INT_FN (BUILT_IN_FFS):
8353 if (lo != 0)
8354 result = exact_log2 (lo & -lo) + 1;
8355 else if (hi != 0)
8356 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8357 else
8358 result = 0;
8359 break;
8360
8361 CASE_INT_FN (BUILT_IN_CLZ):
8362 if (hi != 0)
8363 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8364 else if (lo != 0)
8365 result = width - floor_log2 (lo) - 1;
8366 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8367 result = width;
8368 break;
8369
8370 CASE_INT_FN (BUILT_IN_CTZ):
8371 if (lo != 0)
8372 result = exact_log2 (lo & -lo);
8373 else if (hi != 0)
8374 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8375 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8376 result = width;
8377 break;
8378
8379 CASE_INT_FN (BUILT_IN_POPCOUNT):
8380 result = 0;
8381 while (lo)
8382 result++, lo &= lo - 1;
8383 while (hi)
8384 result++, hi &= hi - 1;
8385 break;
8386
8387 CASE_INT_FN (BUILT_IN_PARITY):
8388 result = 0;
8389 while (lo)
8390 result++, lo &= lo - 1;
8391 while (hi)
8392 result++, hi &= hi - 1;
8393 result &= 1;
8394 break;
8395
8396 default:
8397 gcc_unreachable ();
8398 }
8399
8400 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8401 }
8402
8403 return NULL_TREE;
8404 }
8405
8406 /* Fold function call to builtin_bswap and the long and long long
8407 variants. Return NULL_TREE if no simplification can be made. */
8408 static tree
8409 fold_builtin_bswap (tree fndecl, tree arg)
8410 {
8411 if (! validate_arg (arg, INTEGER_TYPE))
8412 return NULL_TREE;
8413
8414 /* Optimize constant value. */
8415 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8416 {
8417 HOST_WIDE_INT hi, width, r_hi = 0;
8418 unsigned HOST_WIDE_INT lo, r_lo = 0;
8419 tree type;
8420
8421 type = TREE_TYPE (arg);
8422 width = TYPE_PRECISION (type);
8423 lo = TREE_INT_CST_LOW (arg);
8424 hi = TREE_INT_CST_HIGH (arg);
8425
8426 switch (DECL_FUNCTION_CODE (fndecl))
8427 {
8428 case BUILT_IN_BSWAP32:
8429 case BUILT_IN_BSWAP64:
8430 {
8431 int s;
8432
8433 for (s = 0; s < width; s += 8)
8434 {
8435 int d = width - s - 8;
8436 unsigned HOST_WIDE_INT byte;
8437
8438 if (s < HOST_BITS_PER_WIDE_INT)
8439 byte = (lo >> s) & 0xff;
8440 else
8441 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8442
8443 if (d < HOST_BITS_PER_WIDE_INT)
8444 r_lo |= byte << d;
8445 else
8446 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8447 }
8448 }
8449
8450 break;
8451
8452 default:
8453 gcc_unreachable ();
8454 }
8455
8456 if (width < HOST_BITS_PER_WIDE_INT)
8457 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8458 else
8459 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8460 }
8461
8462 return NULL_TREE;
8463 }
8464
8465 /* A subroutine of fold_builtin to fold the various logarithmic
8466 functions. Return NULL_TREE if no simplification can me made.
8467 FUNC is the corresponding MPFR logarithm function. */
8468
8469 static tree
8470 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8471 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8472 {
8473 if (validate_arg (arg, REAL_TYPE))
8474 {
8475 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8476 tree res;
8477 const enum built_in_function fcode = builtin_mathfn_code (arg);
8478
8479 /* Calculate the result when the argument is a constant. */
8480 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8481 return res;
8482
8483 /* Special case, optimize logN(expN(x)) = x. */
8484 if (flag_unsafe_math_optimizations
8485 && ((func == mpfr_log
8486 && (fcode == BUILT_IN_EXP
8487 || fcode == BUILT_IN_EXPF
8488 || fcode == BUILT_IN_EXPL))
8489 || (func == mpfr_log2
8490 && (fcode == BUILT_IN_EXP2
8491 || fcode == BUILT_IN_EXP2F
8492 || fcode == BUILT_IN_EXP2L))
8493 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8494 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8495
8496 /* Optimize logN(func()) for various exponential functions. We
8497 want to determine the value "x" and the power "exponent" in
8498 order to transform logN(x**exponent) into exponent*logN(x). */
8499 if (flag_unsafe_math_optimizations)
8500 {
8501 tree exponent = 0, x = 0;
8502
8503 switch (fcode)
8504 {
8505 CASE_FLT_FN (BUILT_IN_EXP):
8506 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8507 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8508 dconst_e ()));
8509 exponent = CALL_EXPR_ARG (arg, 0);
8510 break;
8511 CASE_FLT_FN (BUILT_IN_EXP2):
8512 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8513 x = build_real (type, dconst2);
8514 exponent = CALL_EXPR_ARG (arg, 0);
8515 break;
8516 CASE_FLT_FN (BUILT_IN_EXP10):
8517 CASE_FLT_FN (BUILT_IN_POW10):
8518 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8519 {
8520 REAL_VALUE_TYPE dconst10;
8521 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8522 x = build_real (type, dconst10);
8523 }
8524 exponent = CALL_EXPR_ARG (arg, 0);
8525 break;
8526 CASE_FLT_FN (BUILT_IN_SQRT):
8527 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8528 x = CALL_EXPR_ARG (arg, 0);
8529 exponent = build_real (type, dconsthalf);
8530 break;
8531 CASE_FLT_FN (BUILT_IN_CBRT):
8532 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8533 x = CALL_EXPR_ARG (arg, 0);
8534 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8535 dconst_third ()));
8536 break;
8537 CASE_FLT_FN (BUILT_IN_POW):
8538 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8539 x = CALL_EXPR_ARG (arg, 0);
8540 exponent = CALL_EXPR_ARG (arg, 1);
8541 break;
8542 default:
8543 break;
8544 }
8545
8546 /* Now perform the optimization. */
8547 if (x && exponent)
8548 {
8549 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8550 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8551 }
8552 }
8553 }
8554
8555 return NULL_TREE;
8556 }
8557
8558 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8559 NULL_TREE if no simplification can be made. */
8560
8561 static tree
8562 fold_builtin_hypot (location_t loc, tree fndecl,
8563 tree arg0, tree arg1, tree type)
8564 {
8565 tree res, narg0, narg1;
8566
8567 if (!validate_arg (arg0, REAL_TYPE)
8568 || !validate_arg (arg1, REAL_TYPE))
8569 return NULL_TREE;
8570
8571 /* Calculate the result when the argument is a constant. */
8572 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8573 return res;
8574
8575 /* If either argument to hypot has a negate or abs, strip that off.
8576 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8577 narg0 = fold_strip_sign_ops (arg0);
8578 narg1 = fold_strip_sign_ops (arg1);
8579 if (narg0 || narg1)
8580 {
8581 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8582 narg1 ? narg1 : arg1);
8583 }
8584
8585 /* If either argument is zero, hypot is fabs of the other. */
8586 if (real_zerop (arg0))
8587 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8588 else if (real_zerop (arg1))
8589 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8590
8591 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8592 if (flag_unsafe_math_optimizations
8593 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8594 {
8595 const REAL_VALUE_TYPE sqrt2_trunc
8596 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8597 return fold_build2_loc (loc, MULT_EXPR, type,
8598 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8599 build_real (type, sqrt2_trunc));
8600 }
8601
8602 return NULL_TREE;
8603 }
8604
8605
8606 /* Fold a builtin function call to pow, powf, or powl. Return
8607 NULL_TREE if no simplification can be made. */
8608 static tree
8609 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8610 {
8611 tree res;
8612
8613 if (!validate_arg (arg0, REAL_TYPE)
8614 || !validate_arg (arg1, REAL_TYPE))
8615 return NULL_TREE;
8616
8617 /* Calculate the result when the argument is a constant. */
8618 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8619 return res;
8620
8621 /* Optimize pow(1.0,y) = 1.0. */
8622 if (real_onep (arg0))
8623 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8624
8625 if (TREE_CODE (arg1) == REAL_CST
8626 && !TREE_OVERFLOW (arg1))
8627 {
8628 REAL_VALUE_TYPE cint;
8629 REAL_VALUE_TYPE c;
8630 HOST_WIDE_INT n;
8631
8632 c = TREE_REAL_CST (arg1);
8633
8634 /* Optimize pow(x,0.0) = 1.0. */
8635 if (REAL_VALUES_EQUAL (c, dconst0))
8636 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8637 arg0);
8638
8639 /* Optimize pow(x,1.0) = x. */
8640 if (REAL_VALUES_EQUAL (c, dconst1))
8641 return arg0;
8642
8643 /* Optimize pow(x,-1.0) = 1.0/x. */
8644 if (REAL_VALUES_EQUAL (c, dconstm1))
8645 return fold_build2_loc (loc, RDIV_EXPR, type,
8646 build_real (type, dconst1), arg0);
8647
8648 /* Optimize pow(x,0.5) = sqrt(x). */
8649 if (flag_unsafe_math_optimizations
8650 && REAL_VALUES_EQUAL (c, dconsthalf))
8651 {
8652 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8653
8654 if (sqrtfn != NULL_TREE)
8655 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8656 }
8657
8658 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8659 if (flag_unsafe_math_optimizations)
8660 {
8661 const REAL_VALUE_TYPE dconstroot
8662 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8663
8664 if (REAL_VALUES_EQUAL (c, dconstroot))
8665 {
8666 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8667 if (cbrtfn != NULL_TREE)
8668 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8669 }
8670 }
8671
8672 /* Check for an integer exponent. */
8673 n = real_to_integer (&c);
8674 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8675 if (real_identical (&c, &cint))
8676 {
8677 /* Attempt to evaluate pow at compile-time, unless this should
8678 raise an exception. */
8679 if (TREE_CODE (arg0) == REAL_CST
8680 && !TREE_OVERFLOW (arg0)
8681 && (n > 0
8682 || (!flag_trapping_math && !flag_errno_math)
8683 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8684 {
8685 REAL_VALUE_TYPE x;
8686 bool inexact;
8687
8688 x = TREE_REAL_CST (arg0);
8689 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8690 if (flag_unsafe_math_optimizations || !inexact)
8691 return build_real (type, x);
8692 }
8693
8694 /* Strip sign ops from even integer powers. */
8695 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8696 {
8697 tree narg0 = fold_strip_sign_ops (arg0);
8698 if (narg0)
8699 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8700 }
8701 }
8702 }
8703
8704 if (flag_unsafe_math_optimizations)
8705 {
8706 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8707
8708 /* Optimize pow(expN(x),y) = expN(x*y). */
8709 if (BUILTIN_EXPONENT_P (fcode))
8710 {
8711 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8712 tree arg = CALL_EXPR_ARG (arg0, 0);
8713 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8714 return build_call_expr_loc (loc, expfn, 1, arg);
8715 }
8716
8717 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8718 if (BUILTIN_SQRT_P (fcode))
8719 {
8720 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8721 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8722 build_real (type, dconsthalf));
8723 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8724 }
8725
8726 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8727 if (BUILTIN_CBRT_P (fcode))
8728 {
8729 tree arg = CALL_EXPR_ARG (arg0, 0);
8730 if (tree_expr_nonnegative_p (arg))
8731 {
8732 const REAL_VALUE_TYPE dconstroot
8733 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8734 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8735 build_real (type, dconstroot));
8736 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8737 }
8738 }
8739
8740 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8741 if (fcode == BUILT_IN_POW
8742 || fcode == BUILT_IN_POWF
8743 || fcode == BUILT_IN_POWL)
8744 {
8745 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8746 if (tree_expr_nonnegative_p (arg00))
8747 {
8748 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8749 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8750 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8751 }
8752 }
8753 }
8754
8755 return NULL_TREE;
8756 }
8757
8758 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8759 Return NULL_TREE if no simplification can be made. */
8760 static tree
8761 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8762 tree arg0, tree arg1, tree type)
8763 {
8764 if (!validate_arg (arg0, REAL_TYPE)
8765 || !validate_arg (arg1, INTEGER_TYPE))
8766 return NULL_TREE;
8767
8768 /* Optimize pow(1.0,y) = 1.0. */
8769 if (real_onep (arg0))
8770 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8771
8772 if (host_integerp (arg1, 0))
8773 {
8774 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8775
8776 /* Evaluate powi at compile-time. */
8777 if (TREE_CODE (arg0) == REAL_CST
8778 && !TREE_OVERFLOW (arg0))
8779 {
8780 REAL_VALUE_TYPE x;
8781 x = TREE_REAL_CST (arg0);
8782 real_powi (&x, TYPE_MODE (type), &x, c);
8783 return build_real (type, x);
8784 }
8785
8786 /* Optimize pow(x,0) = 1.0. */
8787 if (c == 0)
8788 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8789 arg0);
8790
8791 /* Optimize pow(x,1) = x. */
8792 if (c == 1)
8793 return arg0;
8794
8795 /* Optimize pow(x,-1) = 1.0/x. */
8796 if (c == -1)
8797 return fold_build2_loc (loc, RDIV_EXPR, type,
8798 build_real (type, dconst1), arg0);
8799 }
8800
8801 return NULL_TREE;
8802 }
8803
8804 /* A subroutine of fold_builtin to fold the various exponent
8805 functions. Return NULL_TREE if no simplification can be made.
8806 FUNC is the corresponding MPFR exponent function. */
8807
8808 static tree
8809 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8810 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8811 {
8812 if (validate_arg (arg, REAL_TYPE))
8813 {
8814 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8815 tree res;
8816
8817 /* Calculate the result when the argument is a constant. */
8818 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8819 return res;
8820
8821 /* Optimize expN(logN(x)) = x. */
8822 if (flag_unsafe_math_optimizations)
8823 {
8824 const enum built_in_function fcode = builtin_mathfn_code (arg);
8825
8826 if ((func == mpfr_exp
8827 && (fcode == BUILT_IN_LOG
8828 || fcode == BUILT_IN_LOGF
8829 || fcode == BUILT_IN_LOGL))
8830 || (func == mpfr_exp2
8831 && (fcode == BUILT_IN_LOG2
8832 || fcode == BUILT_IN_LOG2F
8833 || fcode == BUILT_IN_LOG2L))
8834 || (func == mpfr_exp10
8835 && (fcode == BUILT_IN_LOG10
8836 || fcode == BUILT_IN_LOG10F
8837 || fcode == BUILT_IN_LOG10L)))
8838 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8839 }
8840 }
8841
8842 return NULL_TREE;
8843 }
8844
8845 /* Return true if VAR is a VAR_DECL or a component thereof. */
8846
8847 static bool
8848 var_decl_component_p (tree var)
8849 {
8850 tree inner = var;
8851 while (handled_component_p (inner))
8852 inner = TREE_OPERAND (inner, 0);
8853 return SSA_VAR_P (inner);
8854 }
8855
8856 /* Fold function call to builtin memset. Return
8857 NULL_TREE if no simplification can be made. */
8858
8859 static tree
8860 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8861 tree type, bool ignore)
8862 {
8863 tree var, ret, etype;
8864 unsigned HOST_WIDE_INT length, cval;
8865
8866 if (! validate_arg (dest, POINTER_TYPE)
8867 || ! validate_arg (c, INTEGER_TYPE)
8868 || ! validate_arg (len, INTEGER_TYPE))
8869 return NULL_TREE;
8870
8871 if (! host_integerp (len, 1))
8872 return NULL_TREE;
8873
8874 /* If the LEN parameter is zero, return DEST. */
8875 if (integer_zerop (len))
8876 return omit_one_operand_loc (loc, type, dest, c);
8877
8878 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8879 return NULL_TREE;
8880
8881 var = dest;
8882 STRIP_NOPS (var);
8883 if (TREE_CODE (var) != ADDR_EXPR)
8884 return NULL_TREE;
8885
8886 var = TREE_OPERAND (var, 0);
8887 if (TREE_THIS_VOLATILE (var))
8888 return NULL_TREE;
8889
8890 etype = TREE_TYPE (var);
8891 if (TREE_CODE (etype) == ARRAY_TYPE)
8892 etype = TREE_TYPE (etype);
8893
8894 if (!INTEGRAL_TYPE_P (etype)
8895 && !POINTER_TYPE_P (etype))
8896 return NULL_TREE;
8897
8898 if (! var_decl_component_p (var))
8899 return NULL_TREE;
8900
8901 length = tree_low_cst (len, 1);
8902 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8903 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8904 < (int) length)
8905 return NULL_TREE;
8906
8907 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8908 return NULL_TREE;
8909
8910 if (integer_zerop (c))
8911 cval = 0;
8912 else
8913 {
8914 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8915 return NULL_TREE;
8916
8917 cval = tree_low_cst (c, 1);
8918 cval &= 0xff;
8919 cval |= cval << 8;
8920 cval |= cval << 16;
8921 cval |= (cval << 31) << 1;
8922 }
8923
8924 ret = build_int_cst_type (etype, cval);
8925 var = build_fold_indirect_ref_loc (loc,
8926 fold_convert_loc (loc,
8927 build_pointer_type (etype),
8928 dest));
8929 ret = build2 (MODIFY_EXPR, etype, var, ret);
8930 if (ignore)
8931 return ret;
8932
8933 return omit_one_operand_loc (loc, type, dest, ret);
8934 }
8935
8936 /* Fold function call to builtin memset. Return
8937 NULL_TREE if no simplification can be made. */
8938
8939 static tree
8940 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8941 {
8942 if (! validate_arg (dest, POINTER_TYPE)
8943 || ! validate_arg (size, INTEGER_TYPE))
8944 return NULL_TREE;
8945
8946 if (!ignore)
8947 return NULL_TREE;
8948
8949 /* New argument list transforming bzero(ptr x, int y) to
8950 memset(ptr x, int 0, size_t y). This is done this way
8951 so that if it isn't expanded inline, we fallback to
8952 calling bzero instead of memset. */
8953
8954 return fold_builtin_memset (loc, dest, integer_zero_node,
8955 fold_convert_loc (loc, sizetype, size),
8956 void_type_node, ignore);
8957 }
8958
8959 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8960 NULL_TREE if no simplification can be made.
8961 If ENDP is 0, return DEST (like memcpy).
8962 If ENDP is 1, return DEST+LEN (like mempcpy).
8963 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8964 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8965 (memmove). */
8966
8967 static tree
8968 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8969 tree len, tree type, bool ignore, int endp)
8970 {
8971 tree destvar, srcvar, expr;
8972
8973 if (! validate_arg (dest, POINTER_TYPE)
8974 || ! validate_arg (src, POINTER_TYPE)
8975 || ! validate_arg (len, INTEGER_TYPE))
8976 return NULL_TREE;
8977
8978 /* If the LEN parameter is zero, return DEST. */
8979 if (integer_zerop (len))
8980 return omit_one_operand_loc (loc, type, dest, src);
8981
8982 /* If SRC and DEST are the same (and not volatile), return
8983 DEST{,+LEN,+LEN-1}. */
8984 if (operand_equal_p (src, dest, 0))
8985 expr = len;
8986 else
8987 {
8988 tree srctype, desttype;
8989 int src_align, dest_align;
8990
8991 if (endp == 3)
8992 {
8993 ao_ref srcref, destref;
8994
8995 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8996 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8997
8998 /* Both DEST and SRC must be pointer types.
8999 ??? This is what old code did. Is the testing for pointer types
9000 really mandatory?
9001
9002 If either SRC is readonly or length is 1, we can use memcpy. */
9003 if (!dest_align || !src_align)
9004 return NULL_TREE;
9005 if (readonly_data_expr (src)
9006 || (host_integerp (len, 1)
9007 && (MIN (src_align, dest_align) / BITS_PER_UNIT
9008 >= tree_low_cst (len, 1))))
9009 {
9010 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9011 if (!fn)
9012 return NULL_TREE;
9013 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9014 }
9015
9016 /* If *src and *dest can't overlap, optimize into memcpy as well. */
9017 ao_ref_init_from_ptr_and_size (&srcref, src, len);
9018 ao_ref_init_from_ptr_and_size (&destref, dest, len);
9019 if (!refs_may_alias_p_1 (&srcref, &destref, false))
9020 {
9021 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9022 if (!fn)
9023 return NULL_TREE;
9024 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9025 }
9026
9027 return NULL_TREE;
9028 }
9029
9030 if (!host_integerp (len, 0))
9031 return NULL_TREE;
9032 /* FIXME:
9033 This logic lose for arguments like (type *)malloc (sizeof (type)),
9034 since we strip the casts of up to VOID return value from malloc.
9035 Perhaps we ought to inherit type from non-VOID argument here? */
9036 STRIP_NOPS (src);
9037 STRIP_NOPS (dest);
9038 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9039 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9040 {
9041 tree tem = TREE_OPERAND (src, 0);
9042 STRIP_NOPS (tem);
9043 if (tem != TREE_OPERAND (src, 0))
9044 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9045 }
9046 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9047 {
9048 tree tem = TREE_OPERAND (dest, 0);
9049 STRIP_NOPS (tem);
9050 if (tem != TREE_OPERAND (dest, 0))
9051 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9052 }
9053 srctype = TREE_TYPE (TREE_TYPE (src));
9054 if (srctype
9055 && TREE_CODE (srctype) == ARRAY_TYPE
9056 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9057 {
9058 srctype = TREE_TYPE (srctype);
9059 STRIP_NOPS (src);
9060 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9061 }
9062 desttype = TREE_TYPE (TREE_TYPE (dest));
9063 if (desttype
9064 && TREE_CODE (desttype) == ARRAY_TYPE
9065 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9066 {
9067 desttype = TREE_TYPE (desttype);
9068 STRIP_NOPS (dest);
9069 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9070 }
9071 if (!srctype || !desttype
9072 || !TYPE_SIZE_UNIT (srctype)
9073 || !TYPE_SIZE_UNIT (desttype)
9074 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9075 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9076 || TYPE_VOLATILE (srctype)
9077 || TYPE_VOLATILE (desttype))
9078 return NULL_TREE;
9079
9080 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9081 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9082 if (dest_align < (int) TYPE_ALIGN (desttype)
9083 || src_align < (int) TYPE_ALIGN (srctype))
9084 return NULL_TREE;
9085
9086 if (!ignore)
9087 dest = builtin_save_expr (dest);
9088
9089 srcvar = NULL_TREE;
9090 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9091 {
9092 srcvar = build_fold_indirect_ref_loc (loc, src);
9093 if (TREE_THIS_VOLATILE (srcvar))
9094 return NULL_TREE;
9095 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9096 srcvar = NULL_TREE;
9097 /* With memcpy, it is possible to bypass aliasing rules, so without
9098 this check i.e. execute/20060930-2.c would be misoptimized,
9099 because it use conflicting alias set to hold argument for the
9100 memcpy call. This check is probably unnecessary with
9101 -fno-strict-aliasing. Similarly for destvar. See also
9102 PR29286. */
9103 else if (!var_decl_component_p (srcvar))
9104 srcvar = NULL_TREE;
9105 }
9106
9107 destvar = NULL_TREE;
9108 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9109 {
9110 destvar = build_fold_indirect_ref_loc (loc, dest);
9111 if (TREE_THIS_VOLATILE (destvar))
9112 return NULL_TREE;
9113 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9114 destvar = NULL_TREE;
9115 else if (!var_decl_component_p (destvar))
9116 destvar = NULL_TREE;
9117 }
9118
9119 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9120 return NULL_TREE;
9121
9122 if (srcvar == NULL_TREE)
9123 {
9124 tree srcptype;
9125 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9126 return NULL_TREE;
9127
9128 srctype = build_qualified_type (desttype, 0);
9129 if (src_align < (int) TYPE_ALIGN (srctype))
9130 {
9131 if (AGGREGATE_TYPE_P (srctype)
9132 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9133 return NULL_TREE;
9134
9135 srctype = build_variant_type_copy (srctype);
9136 TYPE_ALIGN (srctype) = src_align;
9137 TYPE_USER_ALIGN (srctype) = 1;
9138 TYPE_PACKED (srctype) = 1;
9139 }
9140 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9141 src = fold_convert_loc (loc, srcptype, src);
9142 srcvar = build_fold_indirect_ref_loc (loc, src);
9143 }
9144 else if (destvar == NULL_TREE)
9145 {
9146 tree destptype;
9147 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9148 return NULL_TREE;
9149
9150 desttype = build_qualified_type (srctype, 0);
9151 if (dest_align < (int) TYPE_ALIGN (desttype))
9152 {
9153 if (AGGREGATE_TYPE_P (desttype)
9154 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9155 return NULL_TREE;
9156
9157 desttype = build_variant_type_copy (desttype);
9158 TYPE_ALIGN (desttype) = dest_align;
9159 TYPE_USER_ALIGN (desttype) = 1;
9160 TYPE_PACKED (desttype) = 1;
9161 }
9162 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9163 dest = fold_convert_loc (loc, destptype, dest);
9164 destvar = build_fold_indirect_ref_loc (loc, dest);
9165 }
9166
9167 if (srctype == desttype
9168 || (gimple_in_ssa_p (cfun)
9169 && useless_type_conversion_p (desttype, srctype)))
9170 expr = srcvar;
9171 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9172 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9173 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9174 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9175 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
9176 else
9177 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
9178 TREE_TYPE (destvar), srcvar);
9179 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9180 }
9181
9182 if (ignore)
9183 return expr;
9184
9185 if (endp == 0 || endp == 3)
9186 return omit_one_operand_loc (loc, type, dest, expr);
9187
9188 if (expr == len)
9189 expr = NULL_TREE;
9190
9191 if (endp == 2)
9192 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9193 ssize_int (1));
9194
9195 len = fold_convert_loc (loc, sizetype, len);
9196 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9197 dest = fold_convert_loc (loc, type, dest);
9198 if (expr)
9199 dest = omit_one_operand_loc (loc, type, dest, expr);
9200 return dest;
9201 }
9202
9203 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9204 If LEN is not NULL, it represents the length of the string to be
9205 copied. Return NULL_TREE if no simplification can be made. */
9206
9207 tree
9208 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9209 {
9210 tree fn;
9211
9212 if (!validate_arg (dest, POINTER_TYPE)
9213 || !validate_arg (src, POINTER_TYPE))
9214 return NULL_TREE;
9215
9216 /* If SRC and DEST are the same (and not volatile), return DEST. */
9217 if (operand_equal_p (src, dest, 0))
9218 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9219
9220 if (optimize_function_for_size_p (cfun))
9221 return NULL_TREE;
9222
9223 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9224 if (!fn)
9225 return NULL_TREE;
9226
9227 if (!len)
9228 {
9229 len = c_strlen (src, 1);
9230 if (! len || TREE_SIDE_EFFECTS (len))
9231 return NULL_TREE;
9232 }
9233
9234 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
9235 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9236 build_call_expr_loc (loc, fn, 3, dest, src, len));
9237 }
9238
9239 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9240 If SLEN is not NULL, it represents the length of the source string.
9241 Return NULL_TREE if no simplification can be made. */
9242
9243 tree
9244 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9245 tree src, tree len, tree slen)
9246 {
9247 tree fn;
9248
9249 if (!validate_arg (dest, POINTER_TYPE)
9250 || !validate_arg (src, POINTER_TYPE)
9251 || !validate_arg (len, INTEGER_TYPE))
9252 return NULL_TREE;
9253
9254 /* If the LEN parameter is zero, return DEST. */
9255 if (integer_zerop (len))
9256 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9257
9258 /* We can't compare slen with len as constants below if len is not a
9259 constant. */
9260 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9261 return NULL_TREE;
9262
9263 if (!slen)
9264 slen = c_strlen (src, 1);
9265
9266 /* Now, we must be passed a constant src ptr parameter. */
9267 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9268 return NULL_TREE;
9269
9270 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9271
9272 /* We do not support simplification of this case, though we do
9273 support it when expanding trees into RTL. */
9274 /* FIXME: generate a call to __builtin_memset. */
9275 if (tree_int_cst_lt (slen, len))
9276 return NULL_TREE;
9277
9278 /* OK transform into builtin memcpy. */
9279 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9280 if (!fn)
9281 return NULL_TREE;
9282 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9283 build_call_expr_loc (loc, fn, 3, dest, src, len));
9284 }
9285
9286 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9287 arguments to the call, and TYPE is its return type.
9288 Return NULL_TREE if no simplification can be made. */
9289
9290 static tree
9291 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9292 {
9293 if (!validate_arg (arg1, POINTER_TYPE)
9294 || !validate_arg (arg2, INTEGER_TYPE)
9295 || !validate_arg (len, INTEGER_TYPE))
9296 return NULL_TREE;
9297 else
9298 {
9299 const char *p1;
9300
9301 if (TREE_CODE (arg2) != INTEGER_CST
9302 || !host_integerp (len, 1))
9303 return NULL_TREE;
9304
9305 p1 = c_getstr (arg1);
9306 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9307 {
9308 char c;
9309 const char *r;
9310 tree tem;
9311
9312 if (target_char_cast (arg2, &c))
9313 return NULL_TREE;
9314
9315 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9316
9317 if (r == NULL)
9318 return build_int_cst (TREE_TYPE (arg1), 0);
9319
9320 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9321 size_int (r - p1));
9322 return fold_convert_loc (loc, type, tem);
9323 }
9324 return NULL_TREE;
9325 }
9326 }
9327
9328 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9329 Return NULL_TREE if no simplification can be made. */
9330
9331 static tree
9332 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9333 {
9334 const char *p1, *p2;
9335
9336 if (!validate_arg (arg1, POINTER_TYPE)
9337 || !validate_arg (arg2, POINTER_TYPE)
9338 || !validate_arg (len, INTEGER_TYPE))
9339 return NULL_TREE;
9340
9341 /* If the LEN parameter is zero, return zero. */
9342 if (integer_zerop (len))
9343 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9344 arg1, arg2);
9345
9346 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9347 if (operand_equal_p (arg1, arg2, 0))
9348 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9349
9350 p1 = c_getstr (arg1);
9351 p2 = c_getstr (arg2);
9352
9353 /* If all arguments are constant, and the value of len is not greater
9354 than the lengths of arg1 and arg2, evaluate at compile-time. */
9355 if (host_integerp (len, 1) && p1 && p2
9356 && compare_tree_int (len, strlen (p1) + 1) <= 0
9357 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9358 {
9359 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9360
9361 if (r > 0)
9362 return integer_one_node;
9363 else if (r < 0)
9364 return integer_minus_one_node;
9365 else
9366 return integer_zero_node;
9367 }
9368
9369 /* If len parameter is one, return an expression corresponding to
9370 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9371 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9372 {
9373 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9374 tree cst_uchar_ptr_node
9375 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9376
9377 tree ind1
9378 = fold_convert_loc (loc, integer_type_node,
9379 build1 (INDIRECT_REF, cst_uchar_node,
9380 fold_convert_loc (loc,
9381 cst_uchar_ptr_node,
9382 arg1)));
9383 tree ind2
9384 = fold_convert_loc (loc, integer_type_node,
9385 build1 (INDIRECT_REF, cst_uchar_node,
9386 fold_convert_loc (loc,
9387 cst_uchar_ptr_node,
9388 arg2)));
9389 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9390 }
9391
9392 return NULL_TREE;
9393 }
9394
9395 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9396 Return NULL_TREE if no simplification can be made. */
9397
9398 static tree
9399 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9400 {
9401 const char *p1, *p2;
9402
9403 if (!validate_arg (arg1, POINTER_TYPE)
9404 || !validate_arg (arg2, POINTER_TYPE))
9405 return NULL_TREE;
9406
9407 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9408 if (operand_equal_p (arg1, arg2, 0))
9409 return integer_zero_node;
9410
9411 p1 = c_getstr (arg1);
9412 p2 = c_getstr (arg2);
9413
9414 if (p1 && p2)
9415 {
9416 const int i = strcmp (p1, p2);
9417 if (i < 0)
9418 return integer_minus_one_node;
9419 else if (i > 0)
9420 return integer_one_node;
9421 else
9422 return integer_zero_node;
9423 }
9424
9425 /* If the second arg is "", return *(const unsigned char*)arg1. */
9426 if (p2 && *p2 == '\0')
9427 {
9428 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9429 tree cst_uchar_ptr_node
9430 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9431
9432 return fold_convert_loc (loc, integer_type_node,
9433 build1 (INDIRECT_REF, cst_uchar_node,
9434 fold_convert_loc (loc,
9435 cst_uchar_ptr_node,
9436 arg1)));
9437 }
9438
9439 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9440 if (p1 && *p1 == '\0')
9441 {
9442 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9443 tree cst_uchar_ptr_node
9444 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9445
9446 tree temp
9447 = fold_convert_loc (loc, integer_type_node,
9448 build1 (INDIRECT_REF, cst_uchar_node,
9449 fold_convert_loc (loc,
9450 cst_uchar_ptr_node,
9451 arg2)));
9452 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9453 }
9454
9455 return NULL_TREE;
9456 }
9457
9458 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9459 Return NULL_TREE if no simplification can be made. */
9460
9461 static tree
9462 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9463 {
9464 const char *p1, *p2;
9465
9466 if (!validate_arg (arg1, POINTER_TYPE)
9467 || !validate_arg (arg2, POINTER_TYPE)
9468 || !validate_arg (len, INTEGER_TYPE))
9469 return NULL_TREE;
9470
9471 /* If the LEN parameter is zero, return zero. */
9472 if (integer_zerop (len))
9473 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9474 arg1, arg2);
9475
9476 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9477 if (operand_equal_p (arg1, arg2, 0))
9478 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9479
9480 p1 = c_getstr (arg1);
9481 p2 = c_getstr (arg2);
9482
9483 if (host_integerp (len, 1) && p1 && p2)
9484 {
9485 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9486 if (i > 0)
9487 return integer_one_node;
9488 else if (i < 0)
9489 return integer_minus_one_node;
9490 else
9491 return integer_zero_node;
9492 }
9493
9494 /* If the second arg is "", and the length is greater than zero,
9495 return *(const unsigned char*)arg1. */
9496 if (p2 && *p2 == '\0'
9497 && TREE_CODE (len) == INTEGER_CST
9498 && tree_int_cst_sgn (len) == 1)
9499 {
9500 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9501 tree cst_uchar_ptr_node
9502 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9503
9504 return fold_convert_loc (loc, integer_type_node,
9505 build1 (INDIRECT_REF, cst_uchar_node,
9506 fold_convert_loc (loc,
9507 cst_uchar_ptr_node,
9508 arg1)));
9509 }
9510
9511 /* If the first arg is "", and the length is greater than zero,
9512 return -*(const unsigned char*)arg2. */
9513 if (p1 && *p1 == '\0'
9514 && TREE_CODE (len) == INTEGER_CST
9515 && tree_int_cst_sgn (len) == 1)
9516 {
9517 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9518 tree cst_uchar_ptr_node
9519 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9520
9521 tree temp = fold_convert_loc (loc, integer_type_node,
9522 build1 (INDIRECT_REF, cst_uchar_node,
9523 fold_convert_loc (loc,
9524 cst_uchar_ptr_node,
9525 arg2)));
9526 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9527 }
9528
9529 /* If len parameter is one, return an expression corresponding to
9530 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9531 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9532 {
9533 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9534 tree cst_uchar_ptr_node
9535 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9536
9537 tree ind1 = fold_convert_loc (loc, integer_type_node,
9538 build1 (INDIRECT_REF, cst_uchar_node,
9539 fold_convert_loc (loc,
9540 cst_uchar_ptr_node,
9541 arg1)));
9542 tree ind2 = fold_convert_loc (loc, integer_type_node,
9543 build1 (INDIRECT_REF, cst_uchar_node,
9544 fold_convert_loc (loc,
9545 cst_uchar_ptr_node,
9546 arg2)));
9547 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9548 }
9549
9550 return NULL_TREE;
9551 }
9552
9553 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9554 ARG. Return NULL_TREE if no simplification can be made. */
9555
9556 static tree
9557 fold_builtin_signbit (location_t loc, tree arg, tree type)
9558 {
9559 tree temp;
9560
9561 if (!validate_arg (arg, REAL_TYPE))
9562 return NULL_TREE;
9563
9564 /* If ARG is a compile-time constant, determine the result. */
9565 if (TREE_CODE (arg) == REAL_CST
9566 && !TREE_OVERFLOW (arg))
9567 {
9568 REAL_VALUE_TYPE c;
9569
9570 c = TREE_REAL_CST (arg);
9571 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9572 return fold_convert_loc (loc, type, temp);
9573 }
9574
9575 /* If ARG is non-negative, the result is always zero. */
9576 if (tree_expr_nonnegative_p (arg))
9577 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9578
9579 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9580 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9581 return fold_build2_loc (loc, LT_EXPR, type, arg,
9582 build_real (TREE_TYPE (arg), dconst0));
9583
9584 return NULL_TREE;
9585 }
9586
9587 /* Fold function call to builtin copysign, copysignf or copysignl with
9588 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9589 be made. */
9590
9591 static tree
9592 fold_builtin_copysign (location_t loc, tree fndecl,
9593 tree arg1, tree arg2, tree type)
9594 {
9595 tree tem;
9596
9597 if (!validate_arg (arg1, REAL_TYPE)
9598 || !validate_arg (arg2, REAL_TYPE))
9599 return NULL_TREE;
9600
9601 /* copysign(X,X) is X. */
9602 if (operand_equal_p (arg1, arg2, 0))
9603 return fold_convert_loc (loc, type, arg1);
9604
9605 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9606 if (TREE_CODE (arg1) == REAL_CST
9607 && TREE_CODE (arg2) == REAL_CST
9608 && !TREE_OVERFLOW (arg1)
9609 && !TREE_OVERFLOW (arg2))
9610 {
9611 REAL_VALUE_TYPE c1, c2;
9612
9613 c1 = TREE_REAL_CST (arg1);
9614 c2 = TREE_REAL_CST (arg2);
9615 /* c1.sign := c2.sign. */
9616 real_copysign (&c1, &c2);
9617 return build_real (type, c1);
9618 }
9619
9620 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9621 Remember to evaluate Y for side-effects. */
9622 if (tree_expr_nonnegative_p (arg2))
9623 return omit_one_operand_loc (loc, type,
9624 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9625 arg2);
9626
9627 /* Strip sign changing operations for the first argument. */
9628 tem = fold_strip_sign_ops (arg1);
9629 if (tem)
9630 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9631
9632 return NULL_TREE;
9633 }
9634
9635 /* Fold a call to builtin isascii with argument ARG. */
9636
9637 static tree
9638 fold_builtin_isascii (location_t loc, tree arg)
9639 {
9640 if (!validate_arg (arg, INTEGER_TYPE))
9641 return NULL_TREE;
9642 else
9643 {
9644 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9645 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9646 build_int_cst (NULL_TREE,
9647 ~ (unsigned HOST_WIDE_INT) 0x7f));
9648 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9649 arg, integer_zero_node);
9650 }
9651 }
9652
9653 /* Fold a call to builtin toascii with argument ARG. */
9654
9655 static tree
9656 fold_builtin_toascii (location_t loc, tree arg)
9657 {
9658 if (!validate_arg (arg, INTEGER_TYPE))
9659 return NULL_TREE;
9660
9661 /* Transform toascii(c) -> (c & 0x7f). */
9662 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9663 build_int_cst (NULL_TREE, 0x7f));
9664 }
9665
9666 /* Fold a call to builtin isdigit with argument ARG. */
9667
9668 static tree
9669 fold_builtin_isdigit (location_t loc, tree arg)
9670 {
9671 if (!validate_arg (arg, INTEGER_TYPE))
9672 return NULL_TREE;
9673 else
9674 {
9675 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9676 /* According to the C standard, isdigit is unaffected by locale.
9677 However, it definitely is affected by the target character set. */
9678 unsigned HOST_WIDE_INT target_digit0
9679 = lang_hooks.to_target_charset ('0');
9680
9681 if (target_digit0 == 0)
9682 return NULL_TREE;
9683
9684 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9685 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9686 build_int_cst (unsigned_type_node, target_digit0));
9687 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9688 build_int_cst (unsigned_type_node, 9));
9689 }
9690 }
9691
9692 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9693
9694 static tree
9695 fold_builtin_fabs (location_t loc, tree arg, tree type)
9696 {
9697 if (!validate_arg (arg, REAL_TYPE))
9698 return NULL_TREE;
9699
9700 arg = fold_convert_loc (loc, type, arg);
9701 if (TREE_CODE (arg) == REAL_CST)
9702 return fold_abs_const (arg, type);
9703 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9704 }
9705
9706 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9707
9708 static tree
9709 fold_builtin_abs (location_t loc, tree arg, tree type)
9710 {
9711 if (!validate_arg (arg, INTEGER_TYPE))
9712 return NULL_TREE;
9713
9714 arg = fold_convert_loc (loc, type, arg);
9715 if (TREE_CODE (arg) == INTEGER_CST)
9716 return fold_abs_const (arg, type);
9717 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9718 }
9719
9720 /* Fold a call to builtin fmin or fmax. */
9721
9722 static tree
9723 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9724 tree type, bool max)
9725 {
9726 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9727 {
9728 /* Calculate the result when the argument is a constant. */
9729 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9730
9731 if (res)
9732 return res;
9733
9734 /* If either argument is NaN, return the other one. Avoid the
9735 transformation if we get (and honor) a signalling NaN. Using
9736 omit_one_operand() ensures we create a non-lvalue. */
9737 if (TREE_CODE (arg0) == REAL_CST
9738 && real_isnan (&TREE_REAL_CST (arg0))
9739 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9740 || ! TREE_REAL_CST (arg0).signalling))
9741 return omit_one_operand_loc (loc, type, arg1, arg0);
9742 if (TREE_CODE (arg1) == REAL_CST
9743 && real_isnan (&TREE_REAL_CST (arg1))
9744 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9745 || ! TREE_REAL_CST (arg1).signalling))
9746 return omit_one_operand_loc (loc, type, arg0, arg1);
9747
9748 /* Transform fmin/fmax(x,x) -> x. */
9749 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9750 return omit_one_operand_loc (loc, type, arg0, arg1);
9751
9752 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9753 functions to return the numeric arg if the other one is NaN.
9754 These tree codes don't honor that, so only transform if
9755 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9756 handled, so we don't have to worry about it either. */
9757 if (flag_finite_math_only)
9758 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9759 fold_convert_loc (loc, type, arg0),
9760 fold_convert_loc (loc, type, arg1));
9761 }
9762 return NULL_TREE;
9763 }
9764
9765 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9766
9767 static tree
9768 fold_builtin_carg (location_t loc, tree arg, tree type)
9769 {
9770 if (validate_arg (arg, COMPLEX_TYPE)
9771 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9772 {
9773 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9774
9775 if (atan2_fn)
9776 {
9777 tree new_arg = builtin_save_expr (arg);
9778 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9779 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9780 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9781 }
9782 }
9783
9784 return NULL_TREE;
9785 }
9786
9787 /* Fold a call to builtin logb/ilogb. */
9788
9789 static tree
9790 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9791 {
9792 if (! validate_arg (arg, REAL_TYPE))
9793 return NULL_TREE;
9794
9795 STRIP_NOPS (arg);
9796
9797 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9798 {
9799 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9800
9801 switch (value->cl)
9802 {
9803 case rvc_nan:
9804 case rvc_inf:
9805 /* If arg is Inf or NaN and we're logb, return it. */
9806 if (TREE_CODE (rettype) == REAL_TYPE)
9807 return fold_convert_loc (loc, rettype, arg);
9808 /* Fall through... */
9809 case rvc_zero:
9810 /* Zero may set errno and/or raise an exception for logb, also
9811 for ilogb we don't know FP_ILOGB0. */
9812 return NULL_TREE;
9813 case rvc_normal:
9814 /* For normal numbers, proceed iff radix == 2. In GCC,
9815 normalized significands are in the range [0.5, 1.0). We
9816 want the exponent as if they were [1.0, 2.0) so get the
9817 exponent and subtract 1. */
9818 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9819 return fold_convert_loc (loc, rettype,
9820 build_int_cst (NULL_TREE,
9821 REAL_EXP (value)-1));
9822 break;
9823 }
9824 }
9825
9826 return NULL_TREE;
9827 }
9828
9829 /* Fold a call to builtin significand, if radix == 2. */
9830
9831 static tree
9832 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9833 {
9834 if (! validate_arg (arg, REAL_TYPE))
9835 return NULL_TREE;
9836
9837 STRIP_NOPS (arg);
9838
9839 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9840 {
9841 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9842
9843 switch (value->cl)
9844 {
9845 case rvc_zero:
9846 case rvc_nan:
9847 case rvc_inf:
9848 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9849 return fold_convert_loc (loc, rettype, arg);
9850 case rvc_normal:
9851 /* For normal numbers, proceed iff radix == 2. */
9852 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9853 {
9854 REAL_VALUE_TYPE result = *value;
9855 /* In GCC, normalized significands are in the range [0.5,
9856 1.0). We want them to be [1.0, 2.0) so set the
9857 exponent to 1. */
9858 SET_REAL_EXP (&result, 1);
9859 return build_real (rettype, result);
9860 }
9861 break;
9862 }
9863 }
9864
9865 return NULL_TREE;
9866 }
9867
9868 /* Fold a call to builtin frexp, we can assume the base is 2. */
9869
9870 static tree
9871 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9872 {
9873 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9874 return NULL_TREE;
9875
9876 STRIP_NOPS (arg0);
9877
9878 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9879 return NULL_TREE;
9880
9881 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9882
9883 /* Proceed if a valid pointer type was passed in. */
9884 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9885 {
9886 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9887 tree frac, exp;
9888
9889 switch (value->cl)
9890 {
9891 case rvc_zero:
9892 /* For +-0, return (*exp = 0, +-0). */
9893 exp = integer_zero_node;
9894 frac = arg0;
9895 break;
9896 case rvc_nan:
9897 case rvc_inf:
9898 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9899 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9900 case rvc_normal:
9901 {
9902 /* Since the frexp function always expects base 2, and in
9903 GCC normalized significands are already in the range
9904 [0.5, 1.0), we have exactly what frexp wants. */
9905 REAL_VALUE_TYPE frac_rvt = *value;
9906 SET_REAL_EXP (&frac_rvt, 0);
9907 frac = build_real (rettype, frac_rvt);
9908 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9909 }
9910 break;
9911 default:
9912 gcc_unreachable ();
9913 }
9914
9915 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9916 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9917 TREE_SIDE_EFFECTS (arg1) = 1;
9918 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9919 }
9920
9921 return NULL_TREE;
9922 }
9923
9924 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9925 then we can assume the base is two. If it's false, then we have to
9926 check the mode of the TYPE parameter in certain cases. */
9927
9928 static tree
9929 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9930 tree type, bool ldexp)
9931 {
9932 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9933 {
9934 STRIP_NOPS (arg0);
9935 STRIP_NOPS (arg1);
9936
9937 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9938 if (real_zerop (arg0) || integer_zerop (arg1)
9939 || (TREE_CODE (arg0) == REAL_CST
9940 && !real_isfinite (&TREE_REAL_CST (arg0))))
9941 return omit_one_operand_loc (loc, type, arg0, arg1);
9942
9943 /* If both arguments are constant, then try to evaluate it. */
9944 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9945 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9946 && host_integerp (arg1, 0))
9947 {
9948 /* Bound the maximum adjustment to twice the range of the
9949 mode's valid exponents. Use abs to ensure the range is
9950 positive as a sanity check. */
9951 const long max_exp_adj = 2 *
9952 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9953 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9954
9955 /* Get the user-requested adjustment. */
9956 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9957
9958 /* The requested adjustment must be inside this range. This
9959 is a preliminary cap to avoid things like overflow, we
9960 may still fail to compute the result for other reasons. */
9961 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9962 {
9963 REAL_VALUE_TYPE initial_result;
9964
9965 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9966
9967 /* Ensure we didn't overflow. */
9968 if (! real_isinf (&initial_result))
9969 {
9970 const REAL_VALUE_TYPE trunc_result
9971 = real_value_truncate (TYPE_MODE (type), initial_result);
9972
9973 /* Only proceed if the target mode can hold the
9974 resulting value. */
9975 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9976 return build_real (type, trunc_result);
9977 }
9978 }
9979 }
9980 }
9981
9982 return NULL_TREE;
9983 }
9984
9985 /* Fold a call to builtin modf. */
9986
9987 static tree
9988 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9989 {
9990 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9991 return NULL_TREE;
9992
9993 STRIP_NOPS (arg0);
9994
9995 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9996 return NULL_TREE;
9997
9998 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9999
10000 /* Proceed if a valid pointer type was passed in. */
10001 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10002 {
10003 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10004 REAL_VALUE_TYPE trunc, frac;
10005
10006 switch (value->cl)
10007 {
10008 case rvc_nan:
10009 case rvc_zero:
10010 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10011 trunc = frac = *value;
10012 break;
10013 case rvc_inf:
10014 /* For +-Inf, return (*arg1 = arg0, +-0). */
10015 frac = dconst0;
10016 frac.sign = value->sign;
10017 trunc = *value;
10018 break;
10019 case rvc_normal:
10020 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10021 real_trunc (&trunc, VOIDmode, value);
10022 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10023 /* If the original number was negative and already
10024 integral, then the fractional part is -0.0. */
10025 if (value->sign && frac.cl == rvc_zero)
10026 frac.sign = value->sign;
10027 break;
10028 }
10029
10030 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10031 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10032 build_real (rettype, trunc));
10033 TREE_SIDE_EFFECTS (arg1) = 1;
10034 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10035 build_real (rettype, frac));
10036 }
10037
10038 return NULL_TREE;
10039 }
10040
10041 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10042 ARG is the argument for the call. */
10043
10044 static tree
10045 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10046 {
10047 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10048 REAL_VALUE_TYPE r;
10049
10050 if (!validate_arg (arg, REAL_TYPE))
10051 return NULL_TREE;
10052
10053 switch (builtin_index)
10054 {
10055 case BUILT_IN_ISINF:
10056 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10057 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10058
10059 if (TREE_CODE (arg) == REAL_CST)
10060 {
10061 r = TREE_REAL_CST (arg);
10062 if (real_isinf (&r))
10063 return real_compare (GT_EXPR, &r, &dconst0)
10064 ? integer_one_node : integer_minus_one_node;
10065 else
10066 return integer_zero_node;
10067 }
10068
10069 return NULL_TREE;
10070
10071 case BUILT_IN_ISINF_SIGN:
10072 {
10073 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10074 /* In a boolean context, GCC will fold the inner COND_EXPR to
10075 1. So e.g. "if (isinf_sign(x))" would be folded to just
10076 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10077 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10078 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10079 tree tmp = NULL_TREE;
10080
10081 arg = builtin_save_expr (arg);
10082
10083 if (signbit_fn && isinf_fn)
10084 {
10085 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10086 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10087
10088 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10089 signbit_call, integer_zero_node);
10090 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10091 isinf_call, integer_zero_node);
10092
10093 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10094 integer_minus_one_node, integer_one_node);
10095 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10096 isinf_call, tmp,
10097 integer_zero_node);
10098 }
10099
10100 return tmp;
10101 }
10102
10103 case BUILT_IN_ISFINITE:
10104 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10105 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10106 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10107
10108 if (TREE_CODE (arg) == REAL_CST)
10109 {
10110 r = TREE_REAL_CST (arg);
10111 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10112 }
10113
10114 return NULL_TREE;
10115
10116 case BUILT_IN_ISNAN:
10117 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10118 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10119
10120 if (TREE_CODE (arg) == REAL_CST)
10121 {
10122 r = TREE_REAL_CST (arg);
10123 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10124 }
10125
10126 arg = builtin_save_expr (arg);
10127 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10128
10129 default:
10130 gcc_unreachable ();
10131 }
10132 }
10133
10134 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10135 This builtin will generate code to return the appropriate floating
10136 point classification depending on the value of the floating point
10137 number passed in. The possible return values must be supplied as
10138 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10139 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10140 one floating point argument which is "type generic". */
10141
10142 static tree
10143 fold_builtin_fpclassify (location_t loc, tree exp)
10144 {
10145 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10146 arg, type, res, tmp;
10147 enum machine_mode mode;
10148 REAL_VALUE_TYPE r;
10149 char buf[128];
10150
10151 /* Verify the required arguments in the original call. */
10152 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10153 INTEGER_TYPE, INTEGER_TYPE,
10154 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10155 return NULL_TREE;
10156
10157 fp_nan = CALL_EXPR_ARG (exp, 0);
10158 fp_infinite = CALL_EXPR_ARG (exp, 1);
10159 fp_normal = CALL_EXPR_ARG (exp, 2);
10160 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10161 fp_zero = CALL_EXPR_ARG (exp, 4);
10162 arg = CALL_EXPR_ARG (exp, 5);
10163 type = TREE_TYPE (arg);
10164 mode = TYPE_MODE (type);
10165 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10166
10167 /* fpclassify(x) ->
10168 isnan(x) ? FP_NAN :
10169 (fabs(x) == Inf ? FP_INFINITE :
10170 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10171 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10172
10173 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10174 build_real (type, dconst0));
10175 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10176 tmp, fp_zero, fp_subnormal);
10177
10178 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10179 real_from_string (&r, buf);
10180 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10181 arg, build_real (type, r));
10182 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10183
10184 if (HONOR_INFINITIES (mode))
10185 {
10186 real_inf (&r);
10187 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10188 build_real (type, r));
10189 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10190 fp_infinite, res);
10191 }
10192
10193 if (HONOR_NANS (mode))
10194 {
10195 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10196 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10197 }
10198
10199 return res;
10200 }
10201
10202 /* Fold a call to an unordered comparison function such as
10203 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10204 being called and ARG0 and ARG1 are the arguments for the call.
10205 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10206 the opposite of the desired result. UNORDERED_CODE is used
10207 for modes that can hold NaNs and ORDERED_CODE is used for
10208 the rest. */
10209
10210 static tree
10211 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10212 enum tree_code unordered_code,
10213 enum tree_code ordered_code)
10214 {
10215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10216 enum tree_code code;
10217 tree type0, type1;
10218 enum tree_code code0, code1;
10219 tree cmp_type = NULL_TREE;
10220
10221 type0 = TREE_TYPE (arg0);
10222 type1 = TREE_TYPE (arg1);
10223
10224 code0 = TREE_CODE (type0);
10225 code1 = TREE_CODE (type1);
10226
10227 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10228 /* Choose the wider of two real types. */
10229 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10230 ? type0 : type1;
10231 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10232 cmp_type = type0;
10233 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10234 cmp_type = type1;
10235
10236 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10237 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10238
10239 if (unordered_code == UNORDERED_EXPR)
10240 {
10241 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10242 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10243 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10244 }
10245
10246 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10247 : ordered_code;
10248 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10249 fold_build2_loc (loc, code, type, arg0, arg1));
10250 }
10251
10252 /* Fold a call to built-in function FNDECL with 0 arguments.
10253 IGNORE is true if the result of the function call is ignored. This
10254 function returns NULL_TREE if no simplification was possible. */
10255
10256 static tree
10257 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10258 {
10259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10260 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10261 switch (fcode)
10262 {
10263 CASE_FLT_FN (BUILT_IN_INF):
10264 case BUILT_IN_INFD32:
10265 case BUILT_IN_INFD64:
10266 case BUILT_IN_INFD128:
10267 return fold_builtin_inf (loc, type, true);
10268
10269 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10270 return fold_builtin_inf (loc, type, false);
10271
10272 case BUILT_IN_CLASSIFY_TYPE:
10273 return fold_builtin_classify_type (NULL_TREE);
10274
10275 default:
10276 break;
10277 }
10278 return NULL_TREE;
10279 }
10280
10281 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10282 IGNORE is true if the result of the function call is ignored. This
10283 function returns NULL_TREE if no simplification was possible. */
10284
10285 static tree
10286 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10287 {
10288 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10289 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10290 switch (fcode)
10291 {
10292
10293 case BUILT_IN_CONSTANT_P:
10294 {
10295 tree val = fold_builtin_constant_p (arg0);
10296
10297 /* Gimplification will pull the CALL_EXPR for the builtin out of
10298 an if condition. When not optimizing, we'll not CSE it back.
10299 To avoid link error types of regressions, return false now. */
10300 if (!val && !optimize)
10301 val = integer_zero_node;
10302
10303 return val;
10304 }
10305
10306 case BUILT_IN_CLASSIFY_TYPE:
10307 return fold_builtin_classify_type (arg0);
10308
10309 case BUILT_IN_STRLEN:
10310 return fold_builtin_strlen (loc, arg0);
10311
10312 CASE_FLT_FN (BUILT_IN_FABS):
10313 return fold_builtin_fabs (loc, arg0, type);
10314
10315 case BUILT_IN_ABS:
10316 case BUILT_IN_LABS:
10317 case BUILT_IN_LLABS:
10318 case BUILT_IN_IMAXABS:
10319 return fold_builtin_abs (loc, arg0, type);
10320
10321 CASE_FLT_FN (BUILT_IN_CONJ):
10322 if (validate_arg (arg0, COMPLEX_TYPE)
10323 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10324 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10325 break;
10326
10327 CASE_FLT_FN (BUILT_IN_CREAL):
10328 if (validate_arg (arg0, COMPLEX_TYPE)
10329 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10330 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10331 break;
10332
10333 CASE_FLT_FN (BUILT_IN_CIMAG):
10334 if (validate_arg (arg0, COMPLEX_TYPE))
10335 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10336 break;
10337
10338 CASE_FLT_FN (BUILT_IN_CCOS):
10339 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10340
10341 CASE_FLT_FN (BUILT_IN_CCOSH):
10342 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10343
10344 #ifdef HAVE_mpc
10345 CASE_FLT_FN (BUILT_IN_CSIN):
10346 if (validate_arg (arg0, COMPLEX_TYPE)
10347 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10348 return do_mpc_arg1 (arg0, type, mpc_sin);
10349 break;
10350
10351 CASE_FLT_FN (BUILT_IN_CSINH):
10352 if (validate_arg (arg0, COMPLEX_TYPE)
10353 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10354 return do_mpc_arg1 (arg0, type, mpc_sinh);
10355 break;
10356
10357 CASE_FLT_FN (BUILT_IN_CTAN):
10358 if (validate_arg (arg0, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10360 return do_mpc_arg1 (arg0, type, mpc_tan);
10361 break;
10362
10363 CASE_FLT_FN (BUILT_IN_CTANH):
10364 if (validate_arg (arg0, COMPLEX_TYPE)
10365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10366 return do_mpc_arg1 (arg0, type, mpc_tanh);
10367 break;
10368
10369 CASE_FLT_FN (BUILT_IN_CLOG):
10370 if (validate_arg (arg0, COMPLEX_TYPE)
10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10372 return do_mpc_arg1 (arg0, type, mpc_log);
10373 break;
10374
10375 CASE_FLT_FN (BUILT_IN_CSQRT):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10378 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10379 break;
10380 #endif
10381
10382 CASE_FLT_FN (BUILT_IN_CABS):
10383 return fold_builtin_cabs (loc, arg0, type, fndecl);
10384
10385 CASE_FLT_FN (BUILT_IN_CARG):
10386 return fold_builtin_carg (loc, arg0, type);
10387
10388 CASE_FLT_FN (BUILT_IN_SQRT):
10389 return fold_builtin_sqrt (loc, arg0, type);
10390
10391 CASE_FLT_FN (BUILT_IN_CBRT):
10392 return fold_builtin_cbrt (loc, arg0, type);
10393
10394 CASE_FLT_FN (BUILT_IN_ASIN):
10395 if (validate_arg (arg0, REAL_TYPE))
10396 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10397 &dconstm1, &dconst1, true);
10398 break;
10399
10400 CASE_FLT_FN (BUILT_IN_ACOS):
10401 if (validate_arg (arg0, REAL_TYPE))
10402 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10403 &dconstm1, &dconst1, true);
10404 break;
10405
10406 CASE_FLT_FN (BUILT_IN_ATAN):
10407 if (validate_arg (arg0, REAL_TYPE))
10408 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10409 break;
10410
10411 CASE_FLT_FN (BUILT_IN_ASINH):
10412 if (validate_arg (arg0, REAL_TYPE))
10413 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10414 break;
10415
10416 CASE_FLT_FN (BUILT_IN_ACOSH):
10417 if (validate_arg (arg0, REAL_TYPE))
10418 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10419 &dconst1, NULL, true);
10420 break;
10421
10422 CASE_FLT_FN (BUILT_IN_ATANH):
10423 if (validate_arg (arg0, REAL_TYPE))
10424 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10425 &dconstm1, &dconst1, false);
10426 break;
10427
10428 CASE_FLT_FN (BUILT_IN_SIN):
10429 if (validate_arg (arg0, REAL_TYPE))
10430 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10431 break;
10432
10433 CASE_FLT_FN (BUILT_IN_COS):
10434 return fold_builtin_cos (loc, arg0, type, fndecl);
10435
10436 CASE_FLT_FN (BUILT_IN_TAN):
10437 return fold_builtin_tan (arg0, type);
10438
10439 CASE_FLT_FN (BUILT_IN_CEXP):
10440 return fold_builtin_cexp (loc, arg0, type);
10441
10442 CASE_FLT_FN (BUILT_IN_CEXPI):
10443 if (validate_arg (arg0, REAL_TYPE))
10444 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10445 break;
10446
10447 CASE_FLT_FN (BUILT_IN_SINH):
10448 if (validate_arg (arg0, REAL_TYPE))
10449 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10450 break;
10451
10452 CASE_FLT_FN (BUILT_IN_COSH):
10453 return fold_builtin_cosh (loc, arg0, type, fndecl);
10454
10455 CASE_FLT_FN (BUILT_IN_TANH):
10456 if (validate_arg (arg0, REAL_TYPE))
10457 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10458 break;
10459
10460 CASE_FLT_FN (BUILT_IN_ERF):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10463 break;
10464
10465 CASE_FLT_FN (BUILT_IN_ERFC):
10466 if (validate_arg (arg0, REAL_TYPE))
10467 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10468 break;
10469
10470 CASE_FLT_FN (BUILT_IN_TGAMMA):
10471 if (validate_arg (arg0, REAL_TYPE))
10472 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10473 break;
10474
10475 CASE_FLT_FN (BUILT_IN_EXP):
10476 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10477
10478 CASE_FLT_FN (BUILT_IN_EXP2):
10479 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10480
10481 CASE_FLT_FN (BUILT_IN_EXP10):
10482 CASE_FLT_FN (BUILT_IN_POW10):
10483 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10484
10485 CASE_FLT_FN (BUILT_IN_EXPM1):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10488 break;
10489
10490 CASE_FLT_FN (BUILT_IN_LOG):
10491 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10492
10493 CASE_FLT_FN (BUILT_IN_LOG2):
10494 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10495
10496 CASE_FLT_FN (BUILT_IN_LOG10):
10497 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10498
10499 CASE_FLT_FN (BUILT_IN_LOG1P):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10502 &dconstm1, NULL, false);
10503 break;
10504
10505 CASE_FLT_FN (BUILT_IN_J0):
10506 if (validate_arg (arg0, REAL_TYPE))
10507 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10508 NULL, NULL, 0);
10509 break;
10510
10511 CASE_FLT_FN (BUILT_IN_J1):
10512 if (validate_arg (arg0, REAL_TYPE))
10513 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10514 NULL, NULL, 0);
10515 break;
10516
10517 CASE_FLT_FN (BUILT_IN_Y0):
10518 if (validate_arg (arg0, REAL_TYPE))
10519 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10520 &dconst0, NULL, false);
10521 break;
10522
10523 CASE_FLT_FN (BUILT_IN_Y1):
10524 if (validate_arg (arg0, REAL_TYPE))
10525 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10526 &dconst0, NULL, false);
10527 break;
10528
10529 CASE_FLT_FN (BUILT_IN_NAN):
10530 case BUILT_IN_NAND32:
10531 case BUILT_IN_NAND64:
10532 case BUILT_IN_NAND128:
10533 return fold_builtin_nan (arg0, type, true);
10534
10535 CASE_FLT_FN (BUILT_IN_NANS):
10536 return fold_builtin_nan (arg0, type, false);
10537
10538 CASE_FLT_FN (BUILT_IN_FLOOR):
10539 return fold_builtin_floor (loc, fndecl, arg0);
10540
10541 CASE_FLT_FN (BUILT_IN_CEIL):
10542 return fold_builtin_ceil (loc, fndecl, arg0);
10543
10544 CASE_FLT_FN (BUILT_IN_TRUNC):
10545 return fold_builtin_trunc (loc, fndecl, arg0);
10546
10547 CASE_FLT_FN (BUILT_IN_ROUND):
10548 return fold_builtin_round (loc, fndecl, arg0);
10549
10550 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10551 CASE_FLT_FN (BUILT_IN_RINT):
10552 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10553
10554 CASE_FLT_FN (BUILT_IN_LCEIL):
10555 CASE_FLT_FN (BUILT_IN_LLCEIL):
10556 CASE_FLT_FN (BUILT_IN_LFLOOR):
10557 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10558 CASE_FLT_FN (BUILT_IN_LROUND):
10559 CASE_FLT_FN (BUILT_IN_LLROUND):
10560 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10561
10562 CASE_FLT_FN (BUILT_IN_LRINT):
10563 CASE_FLT_FN (BUILT_IN_LLRINT):
10564 return fold_fixed_mathfn (loc, fndecl, arg0);
10565
10566 case BUILT_IN_BSWAP32:
10567 case BUILT_IN_BSWAP64:
10568 return fold_builtin_bswap (fndecl, arg0);
10569
10570 CASE_INT_FN (BUILT_IN_FFS):
10571 CASE_INT_FN (BUILT_IN_CLZ):
10572 CASE_INT_FN (BUILT_IN_CTZ):
10573 CASE_INT_FN (BUILT_IN_POPCOUNT):
10574 CASE_INT_FN (BUILT_IN_PARITY):
10575 return fold_builtin_bitop (fndecl, arg0);
10576
10577 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10578 return fold_builtin_signbit (loc, arg0, type);
10579
10580 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10581 return fold_builtin_significand (loc, arg0, type);
10582
10583 CASE_FLT_FN (BUILT_IN_ILOGB):
10584 CASE_FLT_FN (BUILT_IN_LOGB):
10585 return fold_builtin_logb (loc, arg0, type);
10586
10587 case BUILT_IN_ISASCII:
10588 return fold_builtin_isascii (loc, arg0);
10589
10590 case BUILT_IN_TOASCII:
10591 return fold_builtin_toascii (loc, arg0);
10592
10593 case BUILT_IN_ISDIGIT:
10594 return fold_builtin_isdigit (loc, arg0);
10595
10596 CASE_FLT_FN (BUILT_IN_FINITE):
10597 case BUILT_IN_FINITED32:
10598 case BUILT_IN_FINITED64:
10599 case BUILT_IN_FINITED128:
10600 case BUILT_IN_ISFINITE:
10601 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10602
10603 CASE_FLT_FN (BUILT_IN_ISINF):
10604 case BUILT_IN_ISINFD32:
10605 case BUILT_IN_ISINFD64:
10606 case BUILT_IN_ISINFD128:
10607 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10608
10609 case BUILT_IN_ISINF_SIGN:
10610 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10611
10612 CASE_FLT_FN (BUILT_IN_ISNAN):
10613 case BUILT_IN_ISNAND32:
10614 case BUILT_IN_ISNAND64:
10615 case BUILT_IN_ISNAND128:
10616 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10617
10618 case BUILT_IN_PRINTF:
10619 case BUILT_IN_PRINTF_UNLOCKED:
10620 case BUILT_IN_VPRINTF:
10621 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10622
10623 default:
10624 break;
10625 }
10626
10627 return NULL_TREE;
10628
10629 }
10630
10631 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10632 IGNORE is true if the result of the function call is ignored. This
10633 function returns NULL_TREE if no simplification was possible. */
10634
10635 static tree
10636 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10637 {
10638 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10639 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10640
10641 switch (fcode)
10642 {
10643 CASE_FLT_FN (BUILT_IN_JN):
10644 if (validate_arg (arg0, INTEGER_TYPE)
10645 && validate_arg (arg1, REAL_TYPE))
10646 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10647 break;
10648
10649 CASE_FLT_FN (BUILT_IN_YN):
10650 if (validate_arg (arg0, INTEGER_TYPE)
10651 && validate_arg (arg1, REAL_TYPE))
10652 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10653 &dconst0, false);
10654 break;
10655
10656 CASE_FLT_FN (BUILT_IN_DREM):
10657 CASE_FLT_FN (BUILT_IN_REMAINDER):
10658 if (validate_arg (arg0, REAL_TYPE)
10659 && validate_arg(arg1, REAL_TYPE))
10660 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10661 break;
10662
10663 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10664 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10665 if (validate_arg (arg0, REAL_TYPE)
10666 && validate_arg(arg1, POINTER_TYPE))
10667 return do_mpfr_lgamma_r (arg0, arg1, type);
10668 break;
10669
10670 CASE_FLT_FN (BUILT_IN_ATAN2):
10671 if (validate_arg (arg0, REAL_TYPE)
10672 && validate_arg(arg1, REAL_TYPE))
10673 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10674 break;
10675
10676 CASE_FLT_FN (BUILT_IN_FDIM):
10677 if (validate_arg (arg0, REAL_TYPE)
10678 && validate_arg(arg1, REAL_TYPE))
10679 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10680 break;
10681
10682 CASE_FLT_FN (BUILT_IN_HYPOT):
10683 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10684
10685 #ifdef HAVE_mpc_pow
10686 CASE_FLT_FN (BUILT_IN_CPOW):
10687 if (validate_arg (arg0, COMPLEX_TYPE)
10688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10689 && validate_arg (arg1, COMPLEX_TYPE)
10690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10691 return do_mpc_arg2 (arg0, arg1, type, mpc_pow);
10692 break;
10693 #endif
10694
10695 CASE_FLT_FN (BUILT_IN_LDEXP):
10696 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10697 CASE_FLT_FN (BUILT_IN_SCALBN):
10698 CASE_FLT_FN (BUILT_IN_SCALBLN):
10699 return fold_builtin_load_exponent (loc, arg0, arg1,
10700 type, /*ldexp=*/false);
10701
10702 CASE_FLT_FN (BUILT_IN_FREXP):
10703 return fold_builtin_frexp (loc, arg0, arg1, type);
10704
10705 CASE_FLT_FN (BUILT_IN_MODF):
10706 return fold_builtin_modf (loc, arg0, arg1, type);
10707
10708 case BUILT_IN_BZERO:
10709 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10710
10711 case BUILT_IN_FPUTS:
10712 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10713
10714 case BUILT_IN_FPUTS_UNLOCKED:
10715 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10716
10717 case BUILT_IN_STRSTR:
10718 return fold_builtin_strstr (loc, arg0, arg1, type);
10719
10720 case BUILT_IN_STRCAT:
10721 return fold_builtin_strcat (loc, arg0, arg1);
10722
10723 case BUILT_IN_STRSPN:
10724 return fold_builtin_strspn (loc, arg0, arg1);
10725
10726 case BUILT_IN_STRCSPN:
10727 return fold_builtin_strcspn (loc, arg0, arg1);
10728
10729 case BUILT_IN_STRCHR:
10730 case BUILT_IN_INDEX:
10731 return fold_builtin_strchr (loc, arg0, arg1, type);
10732
10733 case BUILT_IN_STRRCHR:
10734 case BUILT_IN_RINDEX:
10735 return fold_builtin_strrchr (loc, arg0, arg1, type);
10736
10737 case BUILT_IN_STRCPY:
10738 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10739
10740 case BUILT_IN_STPCPY:
10741 if (ignore)
10742 {
10743 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10744 if (!fn)
10745 break;
10746
10747 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10748 }
10749 break;
10750
10751 case BUILT_IN_STRCMP:
10752 return fold_builtin_strcmp (loc, arg0, arg1);
10753
10754 case BUILT_IN_STRPBRK:
10755 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10756
10757 case BUILT_IN_EXPECT:
10758 return fold_builtin_expect (loc, arg0, arg1);
10759
10760 CASE_FLT_FN (BUILT_IN_POW):
10761 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10762
10763 CASE_FLT_FN (BUILT_IN_POWI):
10764 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10765
10766 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10767 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10768
10769 CASE_FLT_FN (BUILT_IN_FMIN):
10770 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10771
10772 CASE_FLT_FN (BUILT_IN_FMAX):
10773 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10774
10775 case BUILT_IN_ISGREATER:
10776 return fold_builtin_unordered_cmp (loc, fndecl,
10777 arg0, arg1, UNLE_EXPR, LE_EXPR);
10778 case BUILT_IN_ISGREATEREQUAL:
10779 return fold_builtin_unordered_cmp (loc, fndecl,
10780 arg0, arg1, UNLT_EXPR, LT_EXPR);
10781 case BUILT_IN_ISLESS:
10782 return fold_builtin_unordered_cmp (loc, fndecl,
10783 arg0, arg1, UNGE_EXPR, GE_EXPR);
10784 case BUILT_IN_ISLESSEQUAL:
10785 return fold_builtin_unordered_cmp (loc, fndecl,
10786 arg0, arg1, UNGT_EXPR, GT_EXPR);
10787 case BUILT_IN_ISLESSGREATER:
10788 return fold_builtin_unordered_cmp (loc, fndecl,
10789 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10790 case BUILT_IN_ISUNORDERED:
10791 return fold_builtin_unordered_cmp (loc, fndecl,
10792 arg0, arg1, UNORDERED_EXPR,
10793 NOP_EXPR);
10794
10795 /* We do the folding for va_start in the expander. */
10796 case BUILT_IN_VA_START:
10797 break;
10798
10799 case BUILT_IN_SPRINTF:
10800 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10801
10802 case BUILT_IN_OBJECT_SIZE:
10803 return fold_builtin_object_size (arg0, arg1);
10804
10805 case BUILT_IN_PRINTF:
10806 case BUILT_IN_PRINTF_UNLOCKED:
10807 case BUILT_IN_VPRINTF:
10808 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10809
10810 case BUILT_IN_PRINTF_CHK:
10811 case BUILT_IN_VPRINTF_CHK:
10812 if (!validate_arg (arg0, INTEGER_TYPE)
10813 || TREE_SIDE_EFFECTS (arg0))
10814 return NULL_TREE;
10815 else
10816 return fold_builtin_printf (loc, fndecl,
10817 arg1, NULL_TREE, ignore, fcode);
10818 break;
10819
10820 case BUILT_IN_FPRINTF:
10821 case BUILT_IN_FPRINTF_UNLOCKED:
10822 case BUILT_IN_VFPRINTF:
10823 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10824 ignore, fcode);
10825
10826 default:
10827 break;
10828 }
10829 return NULL_TREE;
10830 }
10831
10832 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10833 and ARG2. IGNORE is true if the result of the function call is ignored.
10834 This function returns NULL_TREE if no simplification was possible. */
10835
10836 static tree
10837 fold_builtin_3 (location_t loc, tree fndecl,
10838 tree arg0, tree arg1, tree arg2, bool ignore)
10839 {
10840 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10841 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10842 switch (fcode)
10843 {
10844
10845 CASE_FLT_FN (BUILT_IN_SINCOS):
10846 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10847
10848 CASE_FLT_FN (BUILT_IN_FMA):
10849 if (validate_arg (arg0, REAL_TYPE)
10850 && validate_arg(arg1, REAL_TYPE)
10851 && validate_arg(arg2, REAL_TYPE))
10852 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10853 break;
10854
10855 CASE_FLT_FN (BUILT_IN_REMQUO):
10856 if (validate_arg (arg0, REAL_TYPE)
10857 && validate_arg(arg1, REAL_TYPE)
10858 && validate_arg(arg2, POINTER_TYPE))
10859 return do_mpfr_remquo (arg0, arg1, arg2);
10860 break;
10861
10862 case BUILT_IN_MEMSET:
10863 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10864
10865 case BUILT_IN_BCOPY:
10866 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10867 void_type_node, true, /*endp=*/3);
10868
10869 case BUILT_IN_MEMCPY:
10870 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10871 type, ignore, /*endp=*/0);
10872
10873 case BUILT_IN_MEMPCPY:
10874 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10875 type, ignore, /*endp=*/1);
10876
10877 case BUILT_IN_MEMMOVE:
10878 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10879 type, ignore, /*endp=*/3);
10880
10881 case BUILT_IN_STRNCAT:
10882 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10883
10884 case BUILT_IN_STRNCPY:
10885 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10886
10887 case BUILT_IN_STRNCMP:
10888 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10889
10890 case BUILT_IN_MEMCHR:
10891 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10892
10893 case BUILT_IN_BCMP:
10894 case BUILT_IN_MEMCMP:
10895 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10896
10897 case BUILT_IN_SPRINTF:
10898 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10899
10900 case BUILT_IN_STRCPY_CHK:
10901 case BUILT_IN_STPCPY_CHK:
10902 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10903 ignore, fcode);
10904
10905 case BUILT_IN_STRCAT_CHK:
10906 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10907
10908 case BUILT_IN_PRINTF_CHK:
10909 case BUILT_IN_VPRINTF_CHK:
10910 if (!validate_arg (arg0, INTEGER_TYPE)
10911 || TREE_SIDE_EFFECTS (arg0))
10912 return NULL_TREE;
10913 else
10914 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10915 break;
10916
10917 case BUILT_IN_FPRINTF:
10918 case BUILT_IN_FPRINTF_UNLOCKED:
10919 case BUILT_IN_VFPRINTF:
10920 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10921 ignore, fcode);
10922
10923 case BUILT_IN_FPRINTF_CHK:
10924 case BUILT_IN_VFPRINTF_CHK:
10925 if (!validate_arg (arg1, INTEGER_TYPE)
10926 || TREE_SIDE_EFFECTS (arg1))
10927 return NULL_TREE;
10928 else
10929 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10930 ignore, fcode);
10931
10932 default:
10933 break;
10934 }
10935 return NULL_TREE;
10936 }
10937
10938 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10939 ARG2, and ARG3. IGNORE is true if the result of the function call is
10940 ignored. This function returns NULL_TREE if no simplification was
10941 possible. */
10942
10943 static tree
10944 fold_builtin_4 (location_t loc, tree fndecl,
10945 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10946 {
10947 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10948
10949 switch (fcode)
10950 {
10951 case BUILT_IN_MEMCPY_CHK:
10952 case BUILT_IN_MEMPCPY_CHK:
10953 case BUILT_IN_MEMMOVE_CHK:
10954 case BUILT_IN_MEMSET_CHK:
10955 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10956 NULL_TREE, ignore,
10957 DECL_FUNCTION_CODE (fndecl));
10958
10959 case BUILT_IN_STRNCPY_CHK:
10960 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10961
10962 case BUILT_IN_STRNCAT_CHK:
10963 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10964
10965 case BUILT_IN_FPRINTF_CHK:
10966 case BUILT_IN_VFPRINTF_CHK:
10967 if (!validate_arg (arg1, INTEGER_TYPE)
10968 || TREE_SIDE_EFFECTS (arg1))
10969 return NULL_TREE;
10970 else
10971 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10972 ignore, fcode);
10973 break;
10974
10975 default:
10976 break;
10977 }
10978 return NULL_TREE;
10979 }
10980
10981 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10982 arguments, where NARGS <= 4. IGNORE is true if the result of the
10983 function call is ignored. This function returns NULL_TREE if no
10984 simplification was possible. Note that this only folds builtins with
10985 fixed argument patterns. Foldings that do varargs-to-varargs
10986 transformations, or that match calls with more than 4 arguments,
10987 need to be handled with fold_builtin_varargs instead. */
10988
10989 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10990
10991 static tree
10992 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10993 {
10994 tree ret = NULL_TREE;
10995
10996 switch (nargs)
10997 {
10998 case 0:
10999 ret = fold_builtin_0 (loc, fndecl, ignore);
11000 break;
11001 case 1:
11002 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11003 break;
11004 case 2:
11005 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11006 break;
11007 case 3:
11008 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11009 break;
11010 case 4:
11011 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11012 ignore);
11013 break;
11014 default:
11015 break;
11016 }
11017 if (ret)
11018 {
11019 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11020 SET_EXPR_LOCATION (ret, loc);
11021 TREE_NO_WARNING (ret) = 1;
11022 return ret;
11023 }
11024 return NULL_TREE;
11025 }
11026
11027 /* Builtins with folding operations that operate on "..." arguments
11028 need special handling; we need to store the arguments in a convenient
11029 data structure before attempting any folding. Fortunately there are
11030 only a few builtins that fall into this category. FNDECL is the
11031 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11032 result of the function call is ignored. */
11033
11034 static tree
11035 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11036 bool ignore ATTRIBUTE_UNUSED)
11037 {
11038 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11039 tree ret = NULL_TREE;
11040
11041 switch (fcode)
11042 {
11043 case BUILT_IN_SPRINTF_CHK:
11044 case BUILT_IN_VSPRINTF_CHK:
11045 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11046 break;
11047
11048 case BUILT_IN_SNPRINTF_CHK:
11049 case BUILT_IN_VSNPRINTF_CHK:
11050 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11051 break;
11052
11053 case BUILT_IN_FPCLASSIFY:
11054 ret = fold_builtin_fpclassify (loc, exp);
11055 break;
11056
11057 default:
11058 break;
11059 }
11060 if (ret)
11061 {
11062 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11063 SET_EXPR_LOCATION (ret, loc);
11064 TREE_NO_WARNING (ret) = 1;
11065 return ret;
11066 }
11067 return NULL_TREE;
11068 }
11069
11070 /* Return true if FNDECL shouldn't be folded right now.
11071 If a built-in function has an inline attribute always_inline
11072 wrapper, defer folding it after always_inline functions have
11073 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11074 might not be performed. */
11075
11076 static bool
11077 avoid_folding_inline_builtin (tree fndecl)
11078 {
11079 return (DECL_DECLARED_INLINE_P (fndecl)
11080 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11081 && cfun
11082 && !cfun->always_inline_functions_inlined
11083 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11084 }
11085
11086 /* A wrapper function for builtin folding that prevents warnings for
11087 "statement without effect" and the like, caused by removing the
11088 call node earlier than the warning is generated. */
11089
11090 tree
11091 fold_call_expr (location_t loc, tree exp, bool ignore)
11092 {
11093 tree ret = NULL_TREE;
11094 tree fndecl = get_callee_fndecl (exp);
11095 if (fndecl
11096 && TREE_CODE (fndecl) == FUNCTION_DECL
11097 && DECL_BUILT_IN (fndecl)
11098 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11099 yet. Defer folding until we see all the arguments
11100 (after inlining). */
11101 && !CALL_EXPR_VA_ARG_PACK (exp))
11102 {
11103 int nargs = call_expr_nargs (exp);
11104
11105 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11106 instead last argument is __builtin_va_arg_pack (). Defer folding
11107 even in that case, until arguments are finalized. */
11108 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11109 {
11110 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11111 if (fndecl2
11112 && TREE_CODE (fndecl2) == FUNCTION_DECL
11113 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11114 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11115 return NULL_TREE;
11116 }
11117
11118 if (avoid_folding_inline_builtin (fndecl))
11119 return NULL_TREE;
11120
11121 /* FIXME: Don't use a list in this interface. */
11122 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11123 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11124 else
11125 {
11126 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11127 {
11128 tree *args = CALL_EXPR_ARGP (exp);
11129 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11130 }
11131 if (!ret)
11132 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11133 if (ret)
11134 return ret;
11135 }
11136 }
11137 return NULL_TREE;
11138 }
11139
11140 /* Conveniently construct a function call expression. FNDECL names the
11141 function to be called and ARGLIST is a TREE_LIST of arguments. */
11142
11143 tree
11144 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
11145 {
11146 tree fntype = TREE_TYPE (fndecl);
11147 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11148 int n = list_length (arglist);
11149 tree *argarray = (tree *) alloca (n * sizeof (tree));
11150 int i;
11151
11152 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11153 argarray[i] = TREE_VALUE (arglist);
11154 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11155 }
11156
11157 /* Conveniently construct a function call expression. FNDECL names the
11158 function to be called, N is the number of arguments, and the "..."
11159 parameters are the argument expressions. */
11160
11161 tree
11162 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11163 {
11164 va_list ap;
11165 tree fntype = TREE_TYPE (fndecl);
11166 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11167 tree *argarray = (tree *) alloca (n * sizeof (tree));
11168 int i;
11169
11170 va_start (ap, n);
11171 for (i = 0; i < n; i++)
11172 argarray[i] = va_arg (ap, tree);
11173 va_end (ap);
11174 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11175 }
11176
11177 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11178 N arguments are passed in the array ARGARRAY. */
11179
11180 tree
11181 fold_builtin_call_array (location_t loc, tree type,
11182 tree fn,
11183 int n,
11184 tree *argarray)
11185 {
11186 tree ret = NULL_TREE;
11187 int i;
11188 tree exp;
11189
11190 if (TREE_CODE (fn) == ADDR_EXPR)
11191 {
11192 tree fndecl = TREE_OPERAND (fn, 0);
11193 if (TREE_CODE (fndecl) == FUNCTION_DECL
11194 && DECL_BUILT_IN (fndecl))
11195 {
11196 /* If last argument is __builtin_va_arg_pack (), arguments to this
11197 function are not finalized yet. Defer folding until they are. */
11198 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11199 {
11200 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11201 if (fndecl2
11202 && TREE_CODE (fndecl2) == FUNCTION_DECL
11203 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11204 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11205 return build_call_array_loc (loc, type, fn, n, argarray);
11206 }
11207 if (avoid_folding_inline_builtin (fndecl))
11208 return build_call_array_loc (loc, type, fn, n, argarray);
11209 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11210 {
11211 tree arglist = NULL_TREE;
11212 for (i = n - 1; i >= 0; i--)
11213 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11214 ret = targetm.fold_builtin (fndecl, arglist, false);
11215 if (ret)
11216 return ret;
11217 return build_call_array_loc (loc, type, fn, n, argarray);
11218 }
11219 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11220 {
11221 /* First try the transformations that don't require consing up
11222 an exp. */
11223 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11224 if (ret)
11225 return ret;
11226 }
11227
11228 /* If we got this far, we need to build an exp. */
11229 exp = build_call_array_loc (loc, type, fn, n, argarray);
11230 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11231 return ret ? ret : exp;
11232 }
11233 }
11234
11235 return build_call_array_loc (loc, type, fn, n, argarray);
11236 }
11237
11238 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11239 along with N new arguments specified as the "..." parameters. SKIP
11240 is the number of arguments in EXP to be omitted. This function is used
11241 to do varargs-to-varargs transformations. */
11242
11243 static tree
11244 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11245 {
11246 int oldnargs = call_expr_nargs (exp);
11247 int nargs = oldnargs - skip + n;
11248 tree fntype = TREE_TYPE (fndecl);
11249 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11250 tree *buffer;
11251
11252 if (n > 0)
11253 {
11254 int i, j;
11255 va_list ap;
11256
11257 buffer = XALLOCAVEC (tree, nargs);
11258 va_start (ap, n);
11259 for (i = 0; i < n; i++)
11260 buffer[i] = va_arg (ap, tree);
11261 va_end (ap);
11262 for (j = skip; j < oldnargs; j++, i++)
11263 buffer[i] = CALL_EXPR_ARG (exp, j);
11264 }
11265 else
11266 buffer = CALL_EXPR_ARGP (exp) + skip;
11267
11268 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
11269 }
11270
11271 /* Validate a single argument ARG against a tree code CODE representing
11272 a type. */
11273
11274 static bool
11275 validate_arg (const_tree arg, enum tree_code code)
11276 {
11277 if (!arg)
11278 return false;
11279 else if (code == POINTER_TYPE)
11280 return POINTER_TYPE_P (TREE_TYPE (arg));
11281 else if (code == INTEGER_TYPE)
11282 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11283 return code == TREE_CODE (TREE_TYPE (arg));
11284 }
11285
11286 /* This function validates the types of a function call argument list
11287 against a specified list of tree_codes. If the last specifier is a 0,
11288 that represents an ellipses, otherwise the last specifier must be a
11289 VOID_TYPE.
11290
11291 This is the GIMPLE version of validate_arglist. Eventually we want to
11292 completely convert builtins.c to work from GIMPLEs and the tree based
11293 validate_arglist will then be removed. */
11294
11295 bool
11296 validate_gimple_arglist (const_gimple call, ...)
11297 {
11298 enum tree_code code;
11299 bool res = 0;
11300 va_list ap;
11301 const_tree arg;
11302 size_t i;
11303
11304 va_start (ap, call);
11305 i = 0;
11306
11307 do
11308 {
11309 code = (enum tree_code) va_arg (ap, int);
11310 switch (code)
11311 {
11312 case 0:
11313 /* This signifies an ellipses, any further arguments are all ok. */
11314 res = true;
11315 goto end;
11316 case VOID_TYPE:
11317 /* This signifies an endlink, if no arguments remain, return
11318 true, otherwise return false. */
11319 res = (i == gimple_call_num_args (call));
11320 goto end;
11321 default:
11322 /* If no parameters remain or the parameter's code does not
11323 match the specified code, return false. Otherwise continue
11324 checking any remaining arguments. */
11325 arg = gimple_call_arg (call, i++);
11326 if (!validate_arg (arg, code))
11327 goto end;
11328 break;
11329 }
11330 }
11331 while (1);
11332
11333 /* We need gotos here since we can only have one VA_CLOSE in a
11334 function. */
11335 end: ;
11336 va_end (ap);
11337
11338 return res;
11339 }
11340
11341 /* This function validates the types of a function call argument list
11342 against a specified list of tree_codes. If the last specifier is a 0,
11343 that represents an ellipses, otherwise the last specifier must be a
11344 VOID_TYPE. */
11345
11346 bool
11347 validate_arglist (const_tree callexpr, ...)
11348 {
11349 enum tree_code code;
11350 bool res = 0;
11351 va_list ap;
11352 const_call_expr_arg_iterator iter;
11353 const_tree arg;
11354
11355 va_start (ap, callexpr);
11356 init_const_call_expr_arg_iterator (callexpr, &iter);
11357
11358 do
11359 {
11360 code = (enum tree_code) va_arg (ap, int);
11361 switch (code)
11362 {
11363 case 0:
11364 /* This signifies an ellipses, any further arguments are all ok. */
11365 res = true;
11366 goto end;
11367 case VOID_TYPE:
11368 /* This signifies an endlink, if no arguments remain, return
11369 true, otherwise return false. */
11370 res = !more_const_call_expr_args_p (&iter);
11371 goto end;
11372 default:
11373 /* If no parameters remain or the parameter's code does not
11374 match the specified code, return false. Otherwise continue
11375 checking any remaining arguments. */
11376 arg = next_const_call_expr_arg (&iter);
11377 if (!validate_arg (arg, code))
11378 goto end;
11379 break;
11380 }
11381 }
11382 while (1);
11383
11384 /* We need gotos here since we can only have one VA_CLOSE in a
11385 function. */
11386 end: ;
11387 va_end (ap);
11388
11389 return res;
11390 }
11391
11392 /* Default target-specific builtin expander that does nothing. */
11393
11394 rtx
11395 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11396 rtx target ATTRIBUTE_UNUSED,
11397 rtx subtarget ATTRIBUTE_UNUSED,
11398 enum machine_mode mode ATTRIBUTE_UNUSED,
11399 int ignore ATTRIBUTE_UNUSED)
11400 {
11401 return NULL_RTX;
11402 }
11403
11404 /* Returns true is EXP represents data that would potentially reside
11405 in a readonly section. */
11406
11407 static bool
11408 readonly_data_expr (tree exp)
11409 {
11410 STRIP_NOPS (exp);
11411
11412 if (TREE_CODE (exp) != ADDR_EXPR)
11413 return false;
11414
11415 exp = get_base_address (TREE_OPERAND (exp, 0));
11416 if (!exp)
11417 return false;
11418
11419 /* Make sure we call decl_readonly_section only for trees it
11420 can handle (since it returns true for everything it doesn't
11421 understand). */
11422 if (TREE_CODE (exp) == STRING_CST
11423 || TREE_CODE (exp) == CONSTRUCTOR
11424 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11425 return decl_readonly_section (exp, 0);
11426 else
11427 return false;
11428 }
11429
11430 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11431 to the call, and TYPE is its return type.
11432
11433 Return NULL_TREE if no simplification was possible, otherwise return the
11434 simplified form of the call as a tree.
11435
11436 The simplified form may be a constant or other expression which
11437 computes the same value, but in a more efficient manner (including
11438 calls to other builtin functions).
11439
11440 The call may contain arguments which need to be evaluated, but
11441 which are not useful to determine the result of the call. In
11442 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11443 COMPOUND_EXPR will be an argument which must be evaluated.
11444 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11445 COMPOUND_EXPR in the chain will contain the tree for the simplified
11446 form of the builtin function call. */
11447
11448 static tree
11449 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11450 {
11451 if (!validate_arg (s1, POINTER_TYPE)
11452 || !validate_arg (s2, POINTER_TYPE))
11453 return NULL_TREE;
11454 else
11455 {
11456 tree fn;
11457 const char *p1, *p2;
11458
11459 p2 = c_getstr (s2);
11460 if (p2 == NULL)
11461 return NULL_TREE;
11462
11463 p1 = c_getstr (s1);
11464 if (p1 != NULL)
11465 {
11466 const char *r = strstr (p1, p2);
11467 tree tem;
11468
11469 if (r == NULL)
11470 return build_int_cst (TREE_TYPE (s1), 0);
11471
11472 /* Return an offset into the constant string argument. */
11473 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11474 s1, size_int (r - p1));
11475 return fold_convert_loc (loc, type, tem);
11476 }
11477
11478 /* The argument is const char *, and the result is char *, so we need
11479 a type conversion here to avoid a warning. */
11480 if (p2[0] == '\0')
11481 return fold_convert_loc (loc, type, s1);
11482
11483 if (p2[1] != '\0')
11484 return NULL_TREE;
11485
11486 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11487 if (!fn)
11488 return NULL_TREE;
11489
11490 /* New argument list transforming strstr(s1, s2) to
11491 strchr(s1, s2[0]). */
11492 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11493 }
11494 }
11495
11496 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11497 the call, and TYPE is its return type.
11498
11499 Return NULL_TREE if no simplification was possible, otherwise return the
11500 simplified form of the call as a tree.
11501
11502 The simplified form may be a constant or other expression which
11503 computes the same value, but in a more efficient manner (including
11504 calls to other builtin functions).
11505
11506 The call may contain arguments which need to be evaluated, but
11507 which are not useful to determine the result of the call. In
11508 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11509 COMPOUND_EXPR will be an argument which must be evaluated.
11510 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11511 COMPOUND_EXPR in the chain will contain the tree for the simplified
11512 form of the builtin function call. */
11513
11514 static tree
11515 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11516 {
11517 if (!validate_arg (s1, POINTER_TYPE)
11518 || !validate_arg (s2, INTEGER_TYPE))
11519 return NULL_TREE;
11520 else
11521 {
11522 const char *p1;
11523
11524 if (TREE_CODE (s2) != INTEGER_CST)
11525 return NULL_TREE;
11526
11527 p1 = c_getstr (s1);
11528 if (p1 != NULL)
11529 {
11530 char c;
11531 const char *r;
11532 tree tem;
11533
11534 if (target_char_cast (s2, &c))
11535 return NULL_TREE;
11536
11537 r = strchr (p1, c);
11538
11539 if (r == NULL)
11540 return build_int_cst (TREE_TYPE (s1), 0);
11541
11542 /* Return an offset into the constant string argument. */
11543 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11544 s1, size_int (r - p1));
11545 return fold_convert_loc (loc, type, tem);
11546 }
11547 return NULL_TREE;
11548 }
11549 }
11550
11551 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11552 the call, and TYPE is its return type.
11553
11554 Return NULL_TREE if no simplification was possible, otherwise return the
11555 simplified form of the call as a tree.
11556
11557 The simplified form may be a constant or other expression which
11558 computes the same value, but in a more efficient manner (including
11559 calls to other builtin functions).
11560
11561 The call may contain arguments which need to be evaluated, but
11562 which are not useful to determine the result of the call. In
11563 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11564 COMPOUND_EXPR will be an argument which must be evaluated.
11565 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11566 COMPOUND_EXPR in the chain will contain the tree for the simplified
11567 form of the builtin function call. */
11568
11569 static tree
11570 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11571 {
11572 if (!validate_arg (s1, POINTER_TYPE)
11573 || !validate_arg (s2, INTEGER_TYPE))
11574 return NULL_TREE;
11575 else
11576 {
11577 tree fn;
11578 const char *p1;
11579
11580 if (TREE_CODE (s2) != INTEGER_CST)
11581 return NULL_TREE;
11582
11583 p1 = c_getstr (s1);
11584 if (p1 != NULL)
11585 {
11586 char c;
11587 const char *r;
11588 tree tem;
11589
11590 if (target_char_cast (s2, &c))
11591 return NULL_TREE;
11592
11593 r = strrchr (p1, c);
11594
11595 if (r == NULL)
11596 return build_int_cst (TREE_TYPE (s1), 0);
11597
11598 /* Return an offset into the constant string argument. */
11599 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11600 s1, size_int (r - p1));
11601 return fold_convert_loc (loc, type, tem);
11602 }
11603
11604 if (! integer_zerop (s2))
11605 return NULL_TREE;
11606
11607 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11608 if (!fn)
11609 return NULL_TREE;
11610
11611 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11612 return build_call_expr_loc (loc, fn, 2, s1, s2);
11613 }
11614 }
11615
11616 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11617 to the call, and TYPE is its return type.
11618
11619 Return NULL_TREE if no simplification was possible, otherwise return the
11620 simplified form of the call as a tree.
11621
11622 The simplified form may be a constant or other expression which
11623 computes the same value, but in a more efficient manner (including
11624 calls to other builtin functions).
11625
11626 The call may contain arguments which need to be evaluated, but
11627 which are not useful to determine the result of the call. In
11628 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11629 COMPOUND_EXPR will be an argument which must be evaluated.
11630 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11631 COMPOUND_EXPR in the chain will contain the tree for the simplified
11632 form of the builtin function call. */
11633
11634 static tree
11635 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11636 {
11637 if (!validate_arg (s1, POINTER_TYPE)
11638 || !validate_arg (s2, POINTER_TYPE))
11639 return NULL_TREE;
11640 else
11641 {
11642 tree fn;
11643 const char *p1, *p2;
11644
11645 p2 = c_getstr (s2);
11646 if (p2 == NULL)
11647 return NULL_TREE;
11648
11649 p1 = c_getstr (s1);
11650 if (p1 != NULL)
11651 {
11652 const char *r = strpbrk (p1, p2);
11653 tree tem;
11654
11655 if (r == NULL)
11656 return build_int_cst (TREE_TYPE (s1), 0);
11657
11658 /* Return an offset into the constant string argument. */
11659 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11660 s1, size_int (r - p1));
11661 return fold_convert_loc (loc, type, tem);
11662 }
11663
11664 if (p2[0] == '\0')
11665 /* strpbrk(x, "") == NULL.
11666 Evaluate and ignore s1 in case it had side-effects. */
11667 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11668
11669 if (p2[1] != '\0')
11670 return NULL_TREE; /* Really call strpbrk. */
11671
11672 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11673 if (!fn)
11674 return NULL_TREE;
11675
11676 /* New argument list transforming strpbrk(s1, s2) to
11677 strchr(s1, s2[0]). */
11678 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11679 }
11680 }
11681
11682 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11683 to the call.
11684
11685 Return NULL_TREE if no simplification was possible, otherwise return the
11686 simplified form of the call as a tree.
11687
11688 The simplified form may be a constant or other expression which
11689 computes the same value, but in a more efficient manner (including
11690 calls to other builtin functions).
11691
11692 The call may contain arguments which need to be evaluated, but
11693 which are not useful to determine the result of the call. In
11694 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11695 COMPOUND_EXPR will be an argument which must be evaluated.
11696 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11697 COMPOUND_EXPR in the chain will contain the tree for the simplified
11698 form of the builtin function call. */
11699
11700 static tree
11701 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11702 {
11703 if (!validate_arg (dst, POINTER_TYPE)
11704 || !validate_arg (src, POINTER_TYPE))
11705 return NULL_TREE;
11706 else
11707 {
11708 const char *p = c_getstr (src);
11709
11710 /* If the string length is zero, return the dst parameter. */
11711 if (p && *p == '\0')
11712 return dst;
11713
11714 return NULL_TREE;
11715 }
11716 }
11717
11718 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11719 arguments to the call.
11720
11721 Return NULL_TREE if no simplification was possible, otherwise return the
11722 simplified form of the call as a tree.
11723
11724 The simplified form may be a constant or other expression which
11725 computes the same value, but in a more efficient manner (including
11726 calls to other builtin functions).
11727
11728 The call may contain arguments which need to be evaluated, but
11729 which are not useful to determine the result of the call. In
11730 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11731 COMPOUND_EXPR will be an argument which must be evaluated.
11732 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11733 COMPOUND_EXPR in the chain will contain the tree for the simplified
11734 form of the builtin function call. */
11735
11736 static tree
11737 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11738 {
11739 if (!validate_arg (dst, POINTER_TYPE)
11740 || !validate_arg (src, POINTER_TYPE)
11741 || !validate_arg (len, INTEGER_TYPE))
11742 return NULL_TREE;
11743 else
11744 {
11745 const char *p = c_getstr (src);
11746
11747 /* If the requested length is zero, or the src parameter string
11748 length is zero, return the dst parameter. */
11749 if (integer_zerop (len) || (p && *p == '\0'))
11750 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11751
11752 /* If the requested len is greater than or equal to the string
11753 length, call strcat. */
11754 if (TREE_CODE (len) == INTEGER_CST && p
11755 && compare_tree_int (len, strlen (p)) >= 0)
11756 {
11757 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11758
11759 /* If the replacement _DECL isn't initialized, don't do the
11760 transformation. */
11761 if (!fn)
11762 return NULL_TREE;
11763
11764 return build_call_expr_loc (loc, fn, 2, dst, src);
11765 }
11766 return NULL_TREE;
11767 }
11768 }
11769
11770 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11771 to the call.
11772
11773 Return NULL_TREE if no simplification was possible, otherwise return the
11774 simplified form of the call as a tree.
11775
11776 The simplified form may be a constant or other expression which
11777 computes the same value, but in a more efficient manner (including
11778 calls to other builtin functions).
11779
11780 The call may contain arguments which need to be evaluated, but
11781 which are not useful to determine the result of the call. In
11782 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11783 COMPOUND_EXPR will be an argument which must be evaluated.
11784 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11785 COMPOUND_EXPR in the chain will contain the tree for the simplified
11786 form of the builtin function call. */
11787
11788 static tree
11789 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11790 {
11791 if (!validate_arg (s1, POINTER_TYPE)
11792 || !validate_arg (s2, POINTER_TYPE))
11793 return NULL_TREE;
11794 else
11795 {
11796 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11797
11798 /* If both arguments are constants, evaluate at compile-time. */
11799 if (p1 && p2)
11800 {
11801 const size_t r = strspn (p1, p2);
11802 return size_int (r);
11803 }
11804
11805 /* If either argument is "", return NULL_TREE. */
11806 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11807 /* Evaluate and ignore both arguments in case either one has
11808 side-effects. */
11809 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11810 s1, s2);
11811 return NULL_TREE;
11812 }
11813 }
11814
11815 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11816 to the call.
11817
11818 Return NULL_TREE if no simplification was possible, otherwise return the
11819 simplified form of the call as a tree.
11820
11821 The simplified form may be a constant or other expression which
11822 computes the same value, but in a more efficient manner (including
11823 calls to other builtin functions).
11824
11825 The call may contain arguments which need to be evaluated, but
11826 which are not useful to determine the result of the call. In
11827 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11828 COMPOUND_EXPR will be an argument which must be evaluated.
11829 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11830 COMPOUND_EXPR in the chain will contain the tree for the simplified
11831 form of the builtin function call. */
11832
11833 static tree
11834 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11835 {
11836 if (!validate_arg (s1, POINTER_TYPE)
11837 || !validate_arg (s2, POINTER_TYPE))
11838 return NULL_TREE;
11839 else
11840 {
11841 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11842
11843 /* If both arguments are constants, evaluate at compile-time. */
11844 if (p1 && p2)
11845 {
11846 const size_t r = strcspn (p1, p2);
11847 return size_int (r);
11848 }
11849
11850 /* If the first argument is "", return NULL_TREE. */
11851 if (p1 && *p1 == '\0')
11852 {
11853 /* Evaluate and ignore argument s2 in case it has
11854 side-effects. */
11855 return omit_one_operand_loc (loc, size_type_node,
11856 size_zero_node, s2);
11857 }
11858
11859 /* If the second argument is "", return __builtin_strlen(s1). */
11860 if (p2 && *p2 == '\0')
11861 {
11862 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11863
11864 /* If the replacement _DECL isn't initialized, don't do the
11865 transformation. */
11866 if (!fn)
11867 return NULL_TREE;
11868
11869 return build_call_expr_loc (loc, fn, 1, s1);
11870 }
11871 return NULL_TREE;
11872 }
11873 }
11874
11875 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11876 to the call. IGNORE is true if the value returned
11877 by the builtin will be ignored. UNLOCKED is true is true if this
11878 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11879 the known length of the string. Return NULL_TREE if no simplification
11880 was possible. */
11881
11882 tree
11883 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11884 bool ignore, bool unlocked, tree len)
11885 {
11886 /* If we're using an unlocked function, assume the other unlocked
11887 functions exist explicitly. */
11888 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11889 : implicit_built_in_decls[BUILT_IN_FPUTC];
11890 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11891 : implicit_built_in_decls[BUILT_IN_FWRITE];
11892
11893 /* If the return value is used, don't do the transformation. */
11894 if (!ignore)
11895 return NULL_TREE;
11896
11897 /* Verify the arguments in the original call. */
11898 if (!validate_arg (arg0, POINTER_TYPE)
11899 || !validate_arg (arg1, POINTER_TYPE))
11900 return NULL_TREE;
11901
11902 if (! len)
11903 len = c_strlen (arg0, 0);
11904
11905 /* Get the length of the string passed to fputs. If the length
11906 can't be determined, punt. */
11907 if (!len
11908 || TREE_CODE (len) != INTEGER_CST)
11909 return NULL_TREE;
11910
11911 switch (compare_tree_int (len, 1))
11912 {
11913 case -1: /* length is 0, delete the call entirely . */
11914 return omit_one_operand_loc (loc, integer_type_node,
11915 integer_zero_node, arg1);;
11916
11917 case 0: /* length is 1, call fputc. */
11918 {
11919 const char *p = c_getstr (arg0);
11920
11921 if (p != NULL)
11922 {
11923 if (fn_fputc)
11924 return build_call_expr_loc (loc, fn_fputc, 2,
11925 build_int_cst (NULL_TREE, p[0]), arg1);
11926 else
11927 return NULL_TREE;
11928 }
11929 }
11930 /* FALLTHROUGH */
11931 case 1: /* length is greater than 1, call fwrite. */
11932 {
11933 /* If optimizing for size keep fputs. */
11934 if (optimize_function_for_size_p (cfun))
11935 return NULL_TREE;
11936 /* New argument list transforming fputs(string, stream) to
11937 fwrite(string, 1, len, stream). */
11938 if (fn_fwrite)
11939 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11940 size_one_node, len, arg1);
11941 else
11942 return NULL_TREE;
11943 }
11944 default:
11945 gcc_unreachable ();
11946 }
11947 return NULL_TREE;
11948 }
11949
11950 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11951 produced. False otherwise. This is done so that we don't output the error
11952 or warning twice or three times. */
11953
11954 bool
11955 fold_builtin_next_arg (tree exp, bool va_start_p)
11956 {
11957 tree fntype = TREE_TYPE (current_function_decl);
11958 int nargs = call_expr_nargs (exp);
11959 tree arg;
11960
11961 if (TYPE_ARG_TYPES (fntype) == 0
11962 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11963 == void_type_node))
11964 {
11965 error ("%<va_start%> used in function with fixed args");
11966 return true;
11967 }
11968
11969 if (va_start_p)
11970 {
11971 if (va_start_p && (nargs != 2))
11972 {
11973 error ("wrong number of arguments to function %<va_start%>");
11974 return true;
11975 }
11976 arg = CALL_EXPR_ARG (exp, 1);
11977 }
11978 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11979 when we checked the arguments and if needed issued a warning. */
11980 else
11981 {
11982 if (nargs == 0)
11983 {
11984 /* Evidently an out of date version of <stdarg.h>; can't validate
11985 va_start's second argument, but can still work as intended. */
11986 warning (0, "%<__builtin_next_arg%> called without an argument");
11987 return true;
11988 }
11989 else if (nargs > 1)
11990 {
11991 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11992 return true;
11993 }
11994 arg = CALL_EXPR_ARG (exp, 0);
11995 }
11996
11997 if (TREE_CODE (arg) == SSA_NAME)
11998 arg = SSA_NAME_VAR (arg);
11999
12000 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12001 or __builtin_next_arg (0) the first time we see it, after checking
12002 the arguments and if needed issuing a warning. */
12003 if (!integer_zerop (arg))
12004 {
12005 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12006
12007 /* Strip off all nops for the sake of the comparison. This
12008 is not quite the same as STRIP_NOPS. It does more.
12009 We must also strip off INDIRECT_EXPR for C++ reference
12010 parameters. */
12011 while (CONVERT_EXPR_P (arg)
12012 || TREE_CODE (arg) == INDIRECT_REF)
12013 arg = TREE_OPERAND (arg, 0);
12014 if (arg != last_parm)
12015 {
12016 /* FIXME: Sometimes with the tree optimizers we can get the
12017 not the last argument even though the user used the last
12018 argument. We just warn and set the arg to be the last
12019 argument so that we will get wrong-code because of
12020 it. */
12021 warning (0, "second parameter of %<va_start%> not last named argument");
12022 }
12023
12024 /* Undefined by C99 7.15.1.4p4 (va_start):
12025 "If the parameter parmN is declared with the register storage
12026 class, with a function or array type, or with a type that is
12027 not compatible with the type that results after application of
12028 the default argument promotions, the behavior is undefined."
12029 */
12030 else if (DECL_REGISTER (arg))
12031 warning (0, "undefined behaviour when second parameter of "
12032 "%<va_start%> is declared with %<register%> storage");
12033
12034 /* We want to verify the second parameter just once before the tree
12035 optimizers are run and then avoid keeping it in the tree,
12036 as otherwise we could warn even for correct code like:
12037 void foo (int i, ...)
12038 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12039 if (va_start_p)
12040 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12041 else
12042 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12043 }
12044 return false;
12045 }
12046
12047
12048 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12049 ORIG may be null if this is a 2-argument call. We don't attempt to
12050 simplify calls with more than 3 arguments.
12051
12052 Return NULL_TREE if no simplification was possible, otherwise return the
12053 simplified form of the call as a tree. If IGNORED is true, it means that
12054 the caller does not use the returned value of the function. */
12055
12056 static tree
12057 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12058 tree orig, int ignored)
12059 {
12060 tree call, retval;
12061 const char *fmt_str = NULL;
12062
12063 /* Verify the required arguments in the original call. We deal with two
12064 types of sprintf() calls: 'sprintf (str, fmt)' and
12065 'sprintf (dest, "%s", orig)'. */
12066 if (!validate_arg (dest, POINTER_TYPE)
12067 || !validate_arg (fmt, POINTER_TYPE))
12068 return NULL_TREE;
12069 if (orig && !validate_arg (orig, POINTER_TYPE))
12070 return NULL_TREE;
12071
12072 /* Check whether the format is a literal string constant. */
12073 fmt_str = c_getstr (fmt);
12074 if (fmt_str == NULL)
12075 return NULL_TREE;
12076
12077 call = NULL_TREE;
12078 retval = NULL_TREE;
12079
12080 if (!init_target_chars ())
12081 return NULL_TREE;
12082
12083 /* If the format doesn't contain % args or %%, use strcpy. */
12084 if (strchr (fmt_str, target_percent) == NULL)
12085 {
12086 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12087
12088 if (!fn)
12089 return NULL_TREE;
12090
12091 /* Don't optimize sprintf (buf, "abc", ptr++). */
12092 if (orig)
12093 return NULL_TREE;
12094
12095 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12096 'format' is known to contain no % formats. */
12097 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12098 if (!ignored)
12099 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12100 }
12101
12102 /* If the format is "%s", use strcpy if the result isn't used. */
12103 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12104 {
12105 tree fn;
12106 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12107
12108 if (!fn)
12109 return NULL_TREE;
12110
12111 /* Don't crash on sprintf (str1, "%s"). */
12112 if (!orig)
12113 return NULL_TREE;
12114
12115 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12116 if (!ignored)
12117 {
12118 retval = c_strlen (orig, 1);
12119 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12120 return NULL_TREE;
12121 }
12122 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12123 }
12124
12125 if (call && retval)
12126 {
12127 retval = fold_convert_loc
12128 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12129 retval);
12130 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12131 }
12132 else
12133 return call;
12134 }
12135
12136 /* Expand a call EXP to __builtin_object_size. */
12137
12138 rtx
12139 expand_builtin_object_size (tree exp)
12140 {
12141 tree ost;
12142 int object_size_type;
12143 tree fndecl = get_callee_fndecl (exp);
12144
12145 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12146 {
12147 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12148 exp, fndecl);
12149 expand_builtin_trap ();
12150 return const0_rtx;
12151 }
12152
12153 ost = CALL_EXPR_ARG (exp, 1);
12154 STRIP_NOPS (ost);
12155
12156 if (TREE_CODE (ost) != INTEGER_CST
12157 || tree_int_cst_sgn (ost) < 0
12158 || compare_tree_int (ost, 3) > 0)
12159 {
12160 error ("%Klast argument of %D is not integer constant between 0 and 3",
12161 exp, fndecl);
12162 expand_builtin_trap ();
12163 return const0_rtx;
12164 }
12165
12166 object_size_type = tree_low_cst (ost, 0);
12167
12168 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12169 }
12170
12171 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12172 FCODE is the BUILT_IN_* to use.
12173 Return NULL_RTX if we failed; the caller should emit a normal call,
12174 otherwise try to get the result in TARGET, if convenient (and in
12175 mode MODE if that's convenient). */
12176
12177 static rtx
12178 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12179 enum built_in_function fcode)
12180 {
12181 tree dest, src, len, size;
12182
12183 if (!validate_arglist (exp,
12184 POINTER_TYPE,
12185 fcode == BUILT_IN_MEMSET_CHK
12186 ? INTEGER_TYPE : POINTER_TYPE,
12187 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12188 return NULL_RTX;
12189
12190 dest = CALL_EXPR_ARG (exp, 0);
12191 src = CALL_EXPR_ARG (exp, 1);
12192 len = CALL_EXPR_ARG (exp, 2);
12193 size = CALL_EXPR_ARG (exp, 3);
12194
12195 if (! host_integerp (size, 1))
12196 return NULL_RTX;
12197
12198 if (host_integerp (len, 1) || integer_all_onesp (size))
12199 {
12200 tree fn;
12201
12202 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12203 {
12204 warning_at (tree_nonartificial_location (exp),
12205 0, "%Kcall to %D will always overflow destination buffer",
12206 exp, get_callee_fndecl (exp));
12207 return NULL_RTX;
12208 }
12209
12210 fn = NULL_TREE;
12211 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12212 mem{cpy,pcpy,move,set} is available. */
12213 switch (fcode)
12214 {
12215 case BUILT_IN_MEMCPY_CHK:
12216 fn = built_in_decls[BUILT_IN_MEMCPY];
12217 break;
12218 case BUILT_IN_MEMPCPY_CHK:
12219 fn = built_in_decls[BUILT_IN_MEMPCPY];
12220 break;
12221 case BUILT_IN_MEMMOVE_CHK:
12222 fn = built_in_decls[BUILT_IN_MEMMOVE];
12223 break;
12224 case BUILT_IN_MEMSET_CHK:
12225 fn = built_in_decls[BUILT_IN_MEMSET];
12226 break;
12227 default:
12228 break;
12229 }
12230
12231 if (! fn)
12232 return NULL_RTX;
12233
12234 fn = build_call_expr (fn, 3, dest, src, len);
12235 STRIP_TYPE_NOPS (fn);
12236 while (TREE_CODE (fn) == COMPOUND_EXPR)
12237 {
12238 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12239 EXPAND_NORMAL);
12240 fn = TREE_OPERAND (fn, 1);
12241 }
12242 if (TREE_CODE (fn) == CALL_EXPR)
12243 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12244 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12245 }
12246 else if (fcode == BUILT_IN_MEMSET_CHK)
12247 return NULL_RTX;
12248 else
12249 {
12250 unsigned int dest_align
12251 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12252
12253 /* If DEST is not a pointer type, call the normal function. */
12254 if (dest_align == 0)
12255 return NULL_RTX;
12256
12257 /* If SRC and DEST are the same (and not volatile), do nothing. */
12258 if (operand_equal_p (src, dest, 0))
12259 {
12260 tree expr;
12261
12262 if (fcode != BUILT_IN_MEMPCPY_CHK)
12263 {
12264 /* Evaluate and ignore LEN in case it has side-effects. */
12265 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12266 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12267 }
12268
12269 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12270 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12271 }
12272
12273 /* __memmove_chk special case. */
12274 if (fcode == BUILT_IN_MEMMOVE_CHK)
12275 {
12276 unsigned int src_align
12277 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12278
12279 if (src_align == 0)
12280 return NULL_RTX;
12281
12282 /* If src is categorized for a readonly section we can use
12283 normal __memcpy_chk. */
12284 if (readonly_data_expr (src))
12285 {
12286 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12287 if (!fn)
12288 return NULL_RTX;
12289 fn = build_call_expr (fn, 4, dest, src, len, size);
12290 STRIP_TYPE_NOPS (fn);
12291 while (TREE_CODE (fn) == COMPOUND_EXPR)
12292 {
12293 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12294 EXPAND_NORMAL);
12295 fn = TREE_OPERAND (fn, 1);
12296 }
12297 if (TREE_CODE (fn) == CALL_EXPR)
12298 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12299 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12300 }
12301 }
12302 return NULL_RTX;
12303 }
12304 }
12305
12306 /* Emit warning if a buffer overflow is detected at compile time. */
12307
12308 static void
12309 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12310 {
12311 int is_strlen = 0;
12312 tree len, size;
12313 location_t loc = tree_nonartificial_location (exp);
12314
12315 switch (fcode)
12316 {
12317 case BUILT_IN_STRCPY_CHK:
12318 case BUILT_IN_STPCPY_CHK:
12319 /* For __strcat_chk the warning will be emitted only if overflowing
12320 by at least strlen (dest) + 1 bytes. */
12321 case BUILT_IN_STRCAT_CHK:
12322 len = CALL_EXPR_ARG (exp, 1);
12323 size = CALL_EXPR_ARG (exp, 2);
12324 is_strlen = 1;
12325 break;
12326 case BUILT_IN_STRNCAT_CHK:
12327 case BUILT_IN_STRNCPY_CHK:
12328 len = CALL_EXPR_ARG (exp, 2);
12329 size = CALL_EXPR_ARG (exp, 3);
12330 break;
12331 case BUILT_IN_SNPRINTF_CHK:
12332 case BUILT_IN_VSNPRINTF_CHK:
12333 len = CALL_EXPR_ARG (exp, 1);
12334 size = CALL_EXPR_ARG (exp, 3);
12335 break;
12336 default:
12337 gcc_unreachable ();
12338 }
12339
12340 if (!len || !size)
12341 return;
12342
12343 if (! host_integerp (size, 1) || integer_all_onesp (size))
12344 return;
12345
12346 if (is_strlen)
12347 {
12348 len = c_strlen (len, 1);
12349 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12350 return;
12351 }
12352 else if (fcode == BUILT_IN_STRNCAT_CHK)
12353 {
12354 tree src = CALL_EXPR_ARG (exp, 1);
12355 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12356 return;
12357 src = c_strlen (src, 1);
12358 if (! src || ! host_integerp (src, 1))
12359 {
12360 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12361 exp, get_callee_fndecl (exp));
12362 return;
12363 }
12364 else if (tree_int_cst_lt (src, size))
12365 return;
12366 }
12367 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12368 return;
12369
12370 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12371 exp, get_callee_fndecl (exp));
12372 }
12373
12374 /* Emit warning if a buffer overflow is detected at compile time
12375 in __sprintf_chk/__vsprintf_chk calls. */
12376
12377 static void
12378 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12379 {
12380 tree dest, size, len, fmt, flag;
12381 const char *fmt_str;
12382 int nargs = call_expr_nargs (exp);
12383
12384 /* Verify the required arguments in the original call. */
12385
12386 if (nargs < 4)
12387 return;
12388 dest = CALL_EXPR_ARG (exp, 0);
12389 flag = CALL_EXPR_ARG (exp, 1);
12390 size = CALL_EXPR_ARG (exp, 2);
12391 fmt = CALL_EXPR_ARG (exp, 3);
12392
12393 if (! host_integerp (size, 1) || integer_all_onesp (size))
12394 return;
12395
12396 /* Check whether the format is a literal string constant. */
12397 fmt_str = c_getstr (fmt);
12398 if (fmt_str == NULL)
12399 return;
12400
12401 if (!init_target_chars ())
12402 return;
12403
12404 /* If the format doesn't contain % args or %%, we know its size. */
12405 if (strchr (fmt_str, target_percent) == 0)
12406 len = build_int_cstu (size_type_node, strlen (fmt_str));
12407 /* If the format is "%s" and first ... argument is a string literal,
12408 we know it too. */
12409 else if (fcode == BUILT_IN_SPRINTF_CHK
12410 && strcmp (fmt_str, target_percent_s) == 0)
12411 {
12412 tree arg;
12413
12414 if (nargs < 5)
12415 return;
12416 arg = CALL_EXPR_ARG (exp, 4);
12417 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12418 return;
12419
12420 len = c_strlen (arg, 1);
12421 if (!len || ! host_integerp (len, 1))
12422 return;
12423 }
12424 else
12425 return;
12426
12427 if (! tree_int_cst_lt (len, size))
12428 warning_at (tree_nonartificial_location (exp),
12429 0, "%Kcall to %D will always overflow destination buffer",
12430 exp, get_callee_fndecl (exp));
12431 }
12432
12433 /* Emit warning if a free is called with address of a variable. */
12434
12435 static void
12436 maybe_emit_free_warning (tree exp)
12437 {
12438 tree arg = CALL_EXPR_ARG (exp, 0);
12439
12440 STRIP_NOPS (arg);
12441 if (TREE_CODE (arg) != ADDR_EXPR)
12442 return;
12443
12444 arg = get_base_address (TREE_OPERAND (arg, 0));
12445 if (arg == NULL || INDIRECT_REF_P (arg))
12446 return;
12447
12448 if (SSA_VAR_P (arg))
12449 warning_at (tree_nonartificial_location (exp),
12450 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12451 else
12452 warning_at (tree_nonartificial_location (exp),
12453 0, "%Kattempt to free a non-heap object", exp);
12454 }
12455
12456 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12457 if possible. */
12458
12459 tree
12460 fold_builtin_object_size (tree ptr, tree ost)
12461 {
12462 tree ret = NULL_TREE;
12463 int object_size_type;
12464
12465 if (!validate_arg (ptr, POINTER_TYPE)
12466 || !validate_arg (ost, INTEGER_TYPE))
12467 return NULL_TREE;
12468
12469 STRIP_NOPS (ost);
12470
12471 if (TREE_CODE (ost) != INTEGER_CST
12472 || tree_int_cst_sgn (ost) < 0
12473 || compare_tree_int (ost, 3) > 0)
12474 return NULL_TREE;
12475
12476 object_size_type = tree_low_cst (ost, 0);
12477
12478 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12479 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12480 and (size_t) 0 for types 2 and 3. */
12481 if (TREE_SIDE_EFFECTS (ptr))
12482 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12483
12484 if (TREE_CODE (ptr) == ADDR_EXPR)
12485 ret = build_int_cstu (size_type_node,
12486 compute_builtin_object_size (ptr, object_size_type));
12487
12488 else if (TREE_CODE (ptr) == SSA_NAME)
12489 {
12490 unsigned HOST_WIDE_INT bytes;
12491
12492 /* If object size is not known yet, delay folding until
12493 later. Maybe subsequent passes will help determining
12494 it. */
12495 bytes = compute_builtin_object_size (ptr, object_size_type);
12496 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12497 ? -1 : 0))
12498 ret = build_int_cstu (size_type_node, bytes);
12499 }
12500
12501 if (ret)
12502 {
12503 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12504 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12505 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12506 ret = NULL_TREE;
12507 }
12508
12509 return ret;
12510 }
12511
12512 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12513 DEST, SRC, LEN, and SIZE are the arguments to the call.
12514 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12515 code of the builtin. If MAXLEN is not NULL, it is maximum length
12516 passed as third argument. */
12517
12518 tree
12519 fold_builtin_memory_chk (location_t loc, tree fndecl,
12520 tree dest, tree src, tree len, tree size,
12521 tree maxlen, bool ignore,
12522 enum built_in_function fcode)
12523 {
12524 tree fn;
12525
12526 if (!validate_arg (dest, POINTER_TYPE)
12527 || !validate_arg (src,
12528 (fcode == BUILT_IN_MEMSET_CHK
12529 ? INTEGER_TYPE : POINTER_TYPE))
12530 || !validate_arg (len, INTEGER_TYPE)
12531 || !validate_arg (size, INTEGER_TYPE))
12532 return NULL_TREE;
12533
12534 /* If SRC and DEST are the same (and not volatile), return DEST
12535 (resp. DEST+LEN for __mempcpy_chk). */
12536 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12537 {
12538 if (fcode != BUILT_IN_MEMPCPY_CHK)
12539 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12540 dest, len);
12541 else
12542 {
12543 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12544 dest, len);
12545 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12546 }
12547 }
12548
12549 if (! host_integerp (size, 1))
12550 return NULL_TREE;
12551
12552 if (! integer_all_onesp (size))
12553 {
12554 if (! host_integerp (len, 1))
12555 {
12556 /* If LEN is not constant, try MAXLEN too.
12557 For MAXLEN only allow optimizing into non-_ocs function
12558 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12559 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12560 {
12561 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12562 {
12563 /* (void) __mempcpy_chk () can be optimized into
12564 (void) __memcpy_chk (). */
12565 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12566 if (!fn)
12567 return NULL_TREE;
12568
12569 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12570 }
12571 return NULL_TREE;
12572 }
12573 }
12574 else
12575 maxlen = len;
12576
12577 if (tree_int_cst_lt (size, maxlen))
12578 return NULL_TREE;
12579 }
12580
12581 fn = NULL_TREE;
12582 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12583 mem{cpy,pcpy,move,set} is available. */
12584 switch (fcode)
12585 {
12586 case BUILT_IN_MEMCPY_CHK:
12587 fn = built_in_decls[BUILT_IN_MEMCPY];
12588 break;
12589 case BUILT_IN_MEMPCPY_CHK:
12590 fn = built_in_decls[BUILT_IN_MEMPCPY];
12591 break;
12592 case BUILT_IN_MEMMOVE_CHK:
12593 fn = built_in_decls[BUILT_IN_MEMMOVE];
12594 break;
12595 case BUILT_IN_MEMSET_CHK:
12596 fn = built_in_decls[BUILT_IN_MEMSET];
12597 break;
12598 default:
12599 break;
12600 }
12601
12602 if (!fn)
12603 return NULL_TREE;
12604
12605 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12606 }
12607
12608 /* Fold a call to the __st[rp]cpy_chk builtin.
12609 DEST, SRC, and SIZE are the arguments to the call.
12610 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12611 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12612 strings passed as second argument. */
12613
12614 tree
12615 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12616 tree src, tree size,
12617 tree maxlen, bool ignore,
12618 enum built_in_function fcode)
12619 {
12620 tree len, fn;
12621
12622 if (!validate_arg (dest, POINTER_TYPE)
12623 || !validate_arg (src, POINTER_TYPE)
12624 || !validate_arg (size, INTEGER_TYPE))
12625 return NULL_TREE;
12626
12627 /* If SRC and DEST are the same (and not volatile), return DEST. */
12628 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12629 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12630
12631 if (! host_integerp (size, 1))
12632 return NULL_TREE;
12633
12634 if (! integer_all_onesp (size))
12635 {
12636 len = c_strlen (src, 1);
12637 if (! len || ! host_integerp (len, 1))
12638 {
12639 /* If LEN is not constant, try MAXLEN too.
12640 For MAXLEN only allow optimizing into non-_ocs function
12641 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12642 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12643 {
12644 if (fcode == BUILT_IN_STPCPY_CHK)
12645 {
12646 if (! ignore)
12647 return NULL_TREE;
12648
12649 /* If return value of __stpcpy_chk is ignored,
12650 optimize into __strcpy_chk. */
12651 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12652 if (!fn)
12653 return NULL_TREE;
12654
12655 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12656 }
12657
12658 if (! len || TREE_SIDE_EFFECTS (len))
12659 return NULL_TREE;
12660
12661 /* If c_strlen returned something, but not a constant,
12662 transform __strcpy_chk into __memcpy_chk. */
12663 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12664 if (!fn)
12665 return NULL_TREE;
12666
12667 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12668 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12669 build_call_expr_loc (loc, fn, 4,
12670 dest, src, len, size));
12671 }
12672 }
12673 else
12674 maxlen = len;
12675
12676 if (! tree_int_cst_lt (maxlen, size))
12677 return NULL_TREE;
12678 }
12679
12680 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12681 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12682 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12683 if (!fn)
12684 return NULL_TREE;
12685
12686 return build_call_expr_loc (loc, fn, 2, dest, src);
12687 }
12688
12689 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12690 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12691 length passed as third argument. */
12692
12693 tree
12694 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12695 tree len, tree size, tree maxlen)
12696 {
12697 tree fn;
12698
12699 if (!validate_arg (dest, POINTER_TYPE)
12700 || !validate_arg (src, POINTER_TYPE)
12701 || !validate_arg (len, INTEGER_TYPE)
12702 || !validate_arg (size, INTEGER_TYPE))
12703 return NULL_TREE;
12704
12705 if (! host_integerp (size, 1))
12706 return NULL_TREE;
12707
12708 if (! integer_all_onesp (size))
12709 {
12710 if (! host_integerp (len, 1))
12711 {
12712 /* If LEN is not constant, try MAXLEN too.
12713 For MAXLEN only allow optimizing into non-_ocs function
12714 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12715 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12716 return NULL_TREE;
12717 }
12718 else
12719 maxlen = len;
12720
12721 if (tree_int_cst_lt (size, maxlen))
12722 return NULL_TREE;
12723 }
12724
12725 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12726 fn = built_in_decls[BUILT_IN_STRNCPY];
12727 if (!fn)
12728 return NULL_TREE;
12729
12730 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12731 }
12732
12733 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12734 are the arguments to the call. */
12735
12736 static tree
12737 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12738 tree src, tree size)
12739 {
12740 tree fn;
12741 const char *p;
12742
12743 if (!validate_arg (dest, POINTER_TYPE)
12744 || !validate_arg (src, POINTER_TYPE)
12745 || !validate_arg (size, INTEGER_TYPE))
12746 return NULL_TREE;
12747
12748 p = c_getstr (src);
12749 /* If the SRC parameter is "", return DEST. */
12750 if (p && *p == '\0')
12751 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12752
12753 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12754 return NULL_TREE;
12755
12756 /* If __builtin_strcat_chk is used, assume strcat is available. */
12757 fn = built_in_decls[BUILT_IN_STRCAT];
12758 if (!fn)
12759 return NULL_TREE;
12760
12761 return build_call_expr_loc (loc, fn, 2, dest, src);
12762 }
12763
12764 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12765 LEN, and SIZE. */
12766
12767 static tree
12768 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12769 tree dest, tree src, tree len, tree size)
12770 {
12771 tree fn;
12772 const char *p;
12773
12774 if (!validate_arg (dest, POINTER_TYPE)
12775 || !validate_arg (src, POINTER_TYPE)
12776 || !validate_arg (size, INTEGER_TYPE)
12777 || !validate_arg (size, INTEGER_TYPE))
12778 return NULL_TREE;
12779
12780 p = c_getstr (src);
12781 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12782 if (p && *p == '\0')
12783 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12784 else if (integer_zerop (len))
12785 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12786
12787 if (! host_integerp (size, 1))
12788 return NULL_TREE;
12789
12790 if (! integer_all_onesp (size))
12791 {
12792 tree src_len = c_strlen (src, 1);
12793 if (src_len
12794 && host_integerp (src_len, 1)
12795 && host_integerp (len, 1)
12796 && ! tree_int_cst_lt (len, src_len))
12797 {
12798 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12799 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12800 if (!fn)
12801 return NULL_TREE;
12802
12803 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12804 }
12805 return NULL_TREE;
12806 }
12807
12808 /* If __builtin_strncat_chk is used, assume strncat is available. */
12809 fn = built_in_decls[BUILT_IN_STRNCAT];
12810 if (!fn)
12811 return NULL_TREE;
12812
12813 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12814 }
12815
12816 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12817 a normal call should be emitted rather than expanding the function
12818 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12819
12820 static tree
12821 fold_builtin_sprintf_chk (location_t loc, tree exp,
12822 enum built_in_function fcode)
12823 {
12824 tree dest, size, len, fn, fmt, flag;
12825 const char *fmt_str;
12826 int nargs = call_expr_nargs (exp);
12827
12828 /* Verify the required arguments in the original call. */
12829 if (nargs < 4)
12830 return NULL_TREE;
12831 dest = CALL_EXPR_ARG (exp, 0);
12832 if (!validate_arg (dest, POINTER_TYPE))
12833 return NULL_TREE;
12834 flag = CALL_EXPR_ARG (exp, 1);
12835 if (!validate_arg (flag, INTEGER_TYPE))
12836 return NULL_TREE;
12837 size = CALL_EXPR_ARG (exp, 2);
12838 if (!validate_arg (size, INTEGER_TYPE))
12839 return NULL_TREE;
12840 fmt = CALL_EXPR_ARG (exp, 3);
12841 if (!validate_arg (fmt, POINTER_TYPE))
12842 return NULL_TREE;
12843
12844 if (! host_integerp (size, 1))
12845 return NULL_TREE;
12846
12847 len = NULL_TREE;
12848
12849 if (!init_target_chars ())
12850 return NULL_TREE;
12851
12852 /* Check whether the format is a literal string constant. */
12853 fmt_str = c_getstr (fmt);
12854 if (fmt_str != NULL)
12855 {
12856 /* If the format doesn't contain % args or %%, we know the size. */
12857 if (strchr (fmt_str, target_percent) == 0)
12858 {
12859 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12860 len = build_int_cstu (size_type_node, strlen (fmt_str));
12861 }
12862 /* If the format is "%s" and first ... argument is a string literal,
12863 we know the size too. */
12864 else if (fcode == BUILT_IN_SPRINTF_CHK
12865 && strcmp (fmt_str, target_percent_s) == 0)
12866 {
12867 tree arg;
12868
12869 if (nargs == 5)
12870 {
12871 arg = CALL_EXPR_ARG (exp, 4);
12872 if (validate_arg (arg, POINTER_TYPE))
12873 {
12874 len = c_strlen (arg, 1);
12875 if (! len || ! host_integerp (len, 1))
12876 len = NULL_TREE;
12877 }
12878 }
12879 }
12880 }
12881
12882 if (! integer_all_onesp (size))
12883 {
12884 if (! len || ! tree_int_cst_lt (len, size))
12885 return NULL_TREE;
12886 }
12887
12888 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12889 or if format doesn't contain % chars or is "%s". */
12890 if (! integer_zerop (flag))
12891 {
12892 if (fmt_str == NULL)
12893 return NULL_TREE;
12894 if (strchr (fmt_str, target_percent) != NULL
12895 && strcmp (fmt_str, target_percent_s))
12896 return NULL_TREE;
12897 }
12898
12899 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12900 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12901 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12902 if (!fn)
12903 return NULL_TREE;
12904
12905 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12906 }
12907
12908 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12909 a normal call should be emitted rather than expanding the function
12910 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12911 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12912 passed as second argument. */
12913
12914 tree
12915 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12916 enum built_in_function fcode)
12917 {
12918 tree dest, size, len, fn, fmt, flag;
12919 const char *fmt_str;
12920
12921 /* Verify the required arguments in the original call. */
12922 if (call_expr_nargs (exp) < 5)
12923 return NULL_TREE;
12924 dest = CALL_EXPR_ARG (exp, 0);
12925 if (!validate_arg (dest, POINTER_TYPE))
12926 return NULL_TREE;
12927 len = CALL_EXPR_ARG (exp, 1);
12928 if (!validate_arg (len, INTEGER_TYPE))
12929 return NULL_TREE;
12930 flag = CALL_EXPR_ARG (exp, 2);
12931 if (!validate_arg (flag, INTEGER_TYPE))
12932 return NULL_TREE;
12933 size = CALL_EXPR_ARG (exp, 3);
12934 if (!validate_arg (size, INTEGER_TYPE))
12935 return NULL_TREE;
12936 fmt = CALL_EXPR_ARG (exp, 4);
12937 if (!validate_arg (fmt, POINTER_TYPE))
12938 return NULL_TREE;
12939
12940 if (! host_integerp (size, 1))
12941 return NULL_TREE;
12942
12943 if (! integer_all_onesp (size))
12944 {
12945 if (! host_integerp (len, 1))
12946 {
12947 /* If LEN is not constant, try MAXLEN too.
12948 For MAXLEN only allow optimizing into non-_ocs function
12949 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12950 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12951 return NULL_TREE;
12952 }
12953 else
12954 maxlen = len;
12955
12956 if (tree_int_cst_lt (size, maxlen))
12957 return NULL_TREE;
12958 }
12959
12960 if (!init_target_chars ())
12961 return NULL_TREE;
12962
12963 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12964 or if format doesn't contain % chars or is "%s". */
12965 if (! integer_zerop (flag))
12966 {
12967 fmt_str = c_getstr (fmt);
12968 if (fmt_str == NULL)
12969 return NULL_TREE;
12970 if (strchr (fmt_str, target_percent) != NULL
12971 && strcmp (fmt_str, target_percent_s))
12972 return NULL_TREE;
12973 }
12974
12975 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12976 available. */
12977 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12978 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12979 if (!fn)
12980 return NULL_TREE;
12981
12982 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12983 }
12984
12985 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12986 FMT and ARG are the arguments to the call; we don't fold cases with
12987 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12988
12989 Return NULL_TREE if no simplification was possible, otherwise return the
12990 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12991 code of the function to be simplified. */
12992
12993 static tree
12994 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12995 tree arg, bool ignore,
12996 enum built_in_function fcode)
12997 {
12998 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12999 const char *fmt_str = NULL;
13000
13001 /* If the return value is used, don't do the transformation. */
13002 if (! ignore)
13003 return NULL_TREE;
13004
13005 /* Verify the required arguments in the original call. */
13006 if (!validate_arg (fmt, POINTER_TYPE))
13007 return NULL_TREE;
13008
13009 /* Check whether the format is a literal string constant. */
13010 fmt_str = c_getstr (fmt);
13011 if (fmt_str == NULL)
13012 return NULL_TREE;
13013
13014 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13015 {
13016 /* If we're using an unlocked function, assume the other
13017 unlocked functions exist explicitly. */
13018 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
13019 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
13020 }
13021 else
13022 {
13023 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
13024 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
13025 }
13026
13027 if (!init_target_chars ())
13028 return NULL_TREE;
13029
13030 if (strcmp (fmt_str, target_percent_s) == 0
13031 || strchr (fmt_str, target_percent) == NULL)
13032 {
13033 const char *str;
13034
13035 if (strcmp (fmt_str, target_percent_s) == 0)
13036 {
13037 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13038 return NULL_TREE;
13039
13040 if (!arg || !validate_arg (arg, POINTER_TYPE))
13041 return NULL_TREE;
13042
13043 str = c_getstr (arg);
13044 if (str == NULL)
13045 return NULL_TREE;
13046 }
13047 else
13048 {
13049 /* The format specifier doesn't contain any '%' characters. */
13050 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13051 && arg)
13052 return NULL_TREE;
13053 str = fmt_str;
13054 }
13055
13056 /* If the string was "", printf does nothing. */
13057 if (str[0] == '\0')
13058 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13059
13060 /* If the string has length of 1, call putchar. */
13061 if (str[1] == '\0')
13062 {
13063 /* Given printf("c"), (where c is any one character,)
13064 convert "c"[0] to an int and pass that to the replacement
13065 function. */
13066 newarg = build_int_cst (NULL_TREE, str[0]);
13067 if (fn_putchar)
13068 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13069 }
13070 else
13071 {
13072 /* If the string was "string\n", call puts("string"). */
13073 size_t len = strlen (str);
13074 if ((unsigned char)str[len - 1] == target_newline)
13075 {
13076 /* Create a NUL-terminated string that's one char shorter
13077 than the original, stripping off the trailing '\n'. */
13078 char *newstr = XALLOCAVEC (char, len);
13079 memcpy (newstr, str, len - 1);
13080 newstr[len - 1] = 0;
13081
13082 newarg = build_string_literal (len, newstr);
13083 if (fn_puts)
13084 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13085 }
13086 else
13087 /* We'd like to arrange to call fputs(string,stdout) here,
13088 but we need stdout and don't have a way to get it yet. */
13089 return NULL_TREE;
13090 }
13091 }
13092
13093 /* The other optimizations can be done only on the non-va_list variants. */
13094 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13095 return NULL_TREE;
13096
13097 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13098 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13099 {
13100 if (!arg || !validate_arg (arg, POINTER_TYPE))
13101 return NULL_TREE;
13102 if (fn_puts)
13103 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13104 }
13105
13106 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13107 else if (strcmp (fmt_str, target_percent_c) == 0)
13108 {
13109 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13110 return NULL_TREE;
13111 if (fn_putchar)
13112 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13113 }
13114
13115 if (!call)
13116 return NULL_TREE;
13117
13118 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13119 }
13120
13121 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13122 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13123 more than 3 arguments, and ARG may be null in the 2-argument case.
13124
13125 Return NULL_TREE if no simplification was possible, otherwise return the
13126 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13127 code of the function to be simplified. */
13128
13129 static tree
13130 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13131 tree fmt, tree arg, bool ignore,
13132 enum built_in_function fcode)
13133 {
13134 tree fn_fputc, fn_fputs, call = NULL_TREE;
13135 const char *fmt_str = NULL;
13136
13137 /* If the return value is used, don't do the transformation. */
13138 if (! ignore)
13139 return NULL_TREE;
13140
13141 /* Verify the required arguments in the original call. */
13142 if (!validate_arg (fp, POINTER_TYPE))
13143 return NULL_TREE;
13144 if (!validate_arg (fmt, POINTER_TYPE))
13145 return NULL_TREE;
13146
13147 /* Check whether the format is a literal string constant. */
13148 fmt_str = c_getstr (fmt);
13149 if (fmt_str == NULL)
13150 return NULL_TREE;
13151
13152 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13153 {
13154 /* If we're using an unlocked function, assume the other
13155 unlocked functions exist explicitly. */
13156 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13157 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13158 }
13159 else
13160 {
13161 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13162 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13163 }
13164
13165 if (!init_target_chars ())
13166 return NULL_TREE;
13167
13168 /* If the format doesn't contain % args or %%, use strcpy. */
13169 if (strchr (fmt_str, target_percent) == NULL)
13170 {
13171 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13172 && arg)
13173 return NULL_TREE;
13174
13175 /* If the format specifier was "", fprintf does nothing. */
13176 if (fmt_str[0] == '\0')
13177 {
13178 /* If FP has side-effects, just wait until gimplification is
13179 done. */
13180 if (TREE_SIDE_EFFECTS (fp))
13181 return NULL_TREE;
13182
13183 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13184 }
13185
13186 /* When "string" doesn't contain %, replace all cases of
13187 fprintf (fp, string) with fputs (string, fp). The fputs
13188 builtin will take care of special cases like length == 1. */
13189 if (fn_fputs)
13190 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13191 }
13192
13193 /* The other optimizations can be done only on the non-va_list variants. */
13194 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13195 return NULL_TREE;
13196
13197 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13198 else if (strcmp (fmt_str, target_percent_s) == 0)
13199 {
13200 if (!arg || !validate_arg (arg, POINTER_TYPE))
13201 return NULL_TREE;
13202 if (fn_fputs)
13203 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13204 }
13205
13206 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13207 else if (strcmp (fmt_str, target_percent_c) == 0)
13208 {
13209 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13210 return NULL_TREE;
13211 if (fn_fputc)
13212 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13213 }
13214
13215 if (!call)
13216 return NULL_TREE;
13217 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13218 }
13219
13220 /* Initialize format string characters in the target charset. */
13221
13222 static bool
13223 init_target_chars (void)
13224 {
13225 static bool init;
13226 if (!init)
13227 {
13228 target_newline = lang_hooks.to_target_charset ('\n');
13229 target_percent = lang_hooks.to_target_charset ('%');
13230 target_c = lang_hooks.to_target_charset ('c');
13231 target_s = lang_hooks.to_target_charset ('s');
13232 if (target_newline == 0 || target_percent == 0 || target_c == 0
13233 || target_s == 0)
13234 return false;
13235
13236 target_percent_c[0] = target_percent;
13237 target_percent_c[1] = target_c;
13238 target_percent_c[2] = '\0';
13239
13240 target_percent_s[0] = target_percent;
13241 target_percent_s[1] = target_s;
13242 target_percent_s[2] = '\0';
13243
13244 target_percent_s_newline[0] = target_percent;
13245 target_percent_s_newline[1] = target_s;
13246 target_percent_s_newline[2] = target_newline;
13247 target_percent_s_newline[3] = '\0';
13248
13249 init = true;
13250 }
13251 return true;
13252 }
13253
13254 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13255 and no overflow/underflow occurred. INEXACT is true if M was not
13256 exactly calculated. TYPE is the tree type for the result. This
13257 function assumes that you cleared the MPFR flags and then
13258 calculated M to see if anything subsequently set a flag prior to
13259 entering this function. Return NULL_TREE if any checks fail. */
13260
13261 static tree
13262 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13263 {
13264 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13265 overflow/underflow occurred. If -frounding-math, proceed iff the
13266 result of calling FUNC was exact. */
13267 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13268 && (!flag_rounding_math || !inexact))
13269 {
13270 REAL_VALUE_TYPE rr;
13271
13272 real_from_mpfr (&rr, m, type, GMP_RNDN);
13273 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13274 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13275 but the mpft_t is not, then we underflowed in the
13276 conversion. */
13277 if (real_isfinite (&rr)
13278 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13279 {
13280 REAL_VALUE_TYPE rmode;
13281
13282 real_convert (&rmode, TYPE_MODE (type), &rr);
13283 /* Proceed iff the specified mode can hold the value. */
13284 if (real_identical (&rmode, &rr))
13285 return build_real (type, rmode);
13286 }
13287 }
13288 return NULL_TREE;
13289 }
13290
13291 #ifdef HAVE_mpc
13292 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13293 number and no overflow/underflow occurred. INEXACT is true if M
13294 was not exactly calculated. TYPE is the tree type for the result.
13295 This function assumes that you cleared the MPFR flags and then
13296 calculated M to see if anything subsequently set a flag prior to
13297 entering this function. Return NULL_TREE if any checks fail. */
13298
13299 static tree
13300 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13301 {
13302 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13303 overflow/underflow occurred. If -frounding-math, proceed iff the
13304 result of calling FUNC was exact. */
13305 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13306 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13307 && (!flag_rounding_math || !inexact))
13308 {
13309 REAL_VALUE_TYPE re, im;
13310
13311 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13312 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13313 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13314 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13315 but the mpft_t is not, then we underflowed in the
13316 conversion. */
13317 if (real_isfinite (&re) && real_isfinite (&im)
13318 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13319 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13320 {
13321 REAL_VALUE_TYPE re_mode, im_mode;
13322
13323 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13324 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13325 /* Proceed iff the specified mode can hold the value. */
13326 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13327 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13328 build_real (TREE_TYPE (type), im_mode));
13329 }
13330 }
13331 return NULL_TREE;
13332 }
13333 #endif /* HAVE_mpc */
13334
13335 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13336 FUNC on it and return the resulting value as a tree with type TYPE.
13337 If MIN and/or MAX are not NULL, then the supplied ARG must be
13338 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13339 acceptable values, otherwise they are not. The mpfr precision is
13340 set to the precision of TYPE. We assume that function FUNC returns
13341 zero if the result could be calculated exactly within the requested
13342 precision. */
13343
13344 static tree
13345 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13346 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13347 bool inclusive)
13348 {
13349 tree result = NULL_TREE;
13350
13351 STRIP_NOPS (arg);
13352
13353 /* To proceed, MPFR must exactly represent the target floating point
13354 format, which only happens when the target base equals two. */
13355 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13356 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13357 {
13358 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13359
13360 if (real_isfinite (ra)
13361 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13362 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13363 {
13364 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13365 const int prec = fmt->p;
13366 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13367 int inexact;
13368 mpfr_t m;
13369
13370 mpfr_init2 (m, prec);
13371 mpfr_from_real (m, ra, GMP_RNDN);
13372 mpfr_clear_flags ();
13373 inexact = func (m, m, rnd);
13374 result = do_mpfr_ckconv (m, type, inexact);
13375 mpfr_clear (m);
13376 }
13377 }
13378
13379 return result;
13380 }
13381
13382 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13383 FUNC on it and return the resulting value as a tree with type TYPE.
13384 The mpfr precision is set to the precision of TYPE. We assume that
13385 function FUNC returns zero if the result could be calculated
13386 exactly within the requested precision. */
13387
13388 static tree
13389 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13390 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13391 {
13392 tree result = NULL_TREE;
13393
13394 STRIP_NOPS (arg1);
13395 STRIP_NOPS (arg2);
13396
13397 /* To proceed, MPFR must exactly represent the target floating point
13398 format, which only happens when the target base equals two. */
13399 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13400 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13401 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13402 {
13403 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13404 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13405
13406 if (real_isfinite (ra1) && real_isfinite (ra2))
13407 {
13408 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13409 const int prec = fmt->p;
13410 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13411 int inexact;
13412 mpfr_t m1, m2;
13413
13414 mpfr_inits2 (prec, m1, m2, NULL);
13415 mpfr_from_real (m1, ra1, GMP_RNDN);
13416 mpfr_from_real (m2, ra2, GMP_RNDN);
13417 mpfr_clear_flags ();
13418 inexact = func (m1, m1, m2, rnd);
13419 result = do_mpfr_ckconv (m1, type, inexact);
13420 mpfr_clears (m1, m2, NULL);
13421 }
13422 }
13423
13424 return result;
13425 }
13426
13427 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13428 FUNC on it and return the resulting value as a tree with type TYPE.
13429 The mpfr precision is set to the precision of TYPE. We assume that
13430 function FUNC returns zero if the result could be calculated
13431 exactly within the requested precision. */
13432
13433 static tree
13434 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13435 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13436 {
13437 tree result = NULL_TREE;
13438
13439 STRIP_NOPS (arg1);
13440 STRIP_NOPS (arg2);
13441 STRIP_NOPS (arg3);
13442
13443 /* To proceed, MPFR must exactly represent the target floating point
13444 format, which only happens when the target base equals two. */
13445 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13446 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13447 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13448 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13449 {
13450 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13451 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13452 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13453
13454 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13455 {
13456 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13457 const int prec = fmt->p;
13458 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13459 int inexact;
13460 mpfr_t m1, m2, m3;
13461
13462 mpfr_inits2 (prec, m1, m2, m3, NULL);
13463 mpfr_from_real (m1, ra1, GMP_RNDN);
13464 mpfr_from_real (m2, ra2, GMP_RNDN);
13465 mpfr_from_real (m3, ra3, GMP_RNDN);
13466 mpfr_clear_flags ();
13467 inexact = func (m1, m1, m2, m3, rnd);
13468 result = do_mpfr_ckconv (m1, type, inexact);
13469 mpfr_clears (m1, m2, m3, NULL);
13470 }
13471 }
13472
13473 return result;
13474 }
13475
13476 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13477 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13478 If ARG_SINP and ARG_COSP are NULL then the result is returned
13479 as a complex value.
13480 The type is taken from the type of ARG and is used for setting the
13481 precision of the calculation and results. */
13482
13483 static tree
13484 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13485 {
13486 tree const type = TREE_TYPE (arg);
13487 tree result = NULL_TREE;
13488
13489 STRIP_NOPS (arg);
13490
13491 /* To proceed, MPFR must exactly represent the target floating point
13492 format, which only happens when the target base equals two. */
13493 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13494 && TREE_CODE (arg) == REAL_CST
13495 && !TREE_OVERFLOW (arg))
13496 {
13497 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13498
13499 if (real_isfinite (ra))
13500 {
13501 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13502 const int prec = fmt->p;
13503 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13504 tree result_s, result_c;
13505 int inexact;
13506 mpfr_t m, ms, mc;
13507
13508 mpfr_inits2 (prec, m, ms, mc, NULL);
13509 mpfr_from_real (m, ra, GMP_RNDN);
13510 mpfr_clear_flags ();
13511 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13512 result_s = do_mpfr_ckconv (ms, type, inexact);
13513 result_c = do_mpfr_ckconv (mc, type, inexact);
13514 mpfr_clears (m, ms, mc, NULL);
13515 if (result_s && result_c)
13516 {
13517 /* If we are to return in a complex value do so. */
13518 if (!arg_sinp && !arg_cosp)
13519 return build_complex (build_complex_type (type),
13520 result_c, result_s);
13521
13522 /* Dereference the sin/cos pointer arguments. */
13523 arg_sinp = build_fold_indirect_ref (arg_sinp);
13524 arg_cosp = build_fold_indirect_ref (arg_cosp);
13525 /* Proceed if valid pointer type were passed in. */
13526 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13527 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13528 {
13529 /* Set the values. */
13530 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13531 result_s);
13532 TREE_SIDE_EFFECTS (result_s) = 1;
13533 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13534 result_c);
13535 TREE_SIDE_EFFECTS (result_c) = 1;
13536 /* Combine the assignments into a compound expr. */
13537 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13538 result_s, result_c));
13539 }
13540 }
13541 }
13542 }
13543 return result;
13544 }
13545
13546 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13547 two-argument mpfr order N Bessel function FUNC on them and return
13548 the resulting value as a tree with type TYPE. The mpfr precision
13549 is set to the precision of TYPE. We assume that function FUNC
13550 returns zero if the result could be calculated exactly within the
13551 requested precision. */
13552 static tree
13553 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13554 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13555 const REAL_VALUE_TYPE *min, bool inclusive)
13556 {
13557 tree result = NULL_TREE;
13558
13559 STRIP_NOPS (arg1);
13560 STRIP_NOPS (arg2);
13561
13562 /* To proceed, MPFR must exactly represent the target floating point
13563 format, which only happens when the target base equals two. */
13564 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13565 && host_integerp (arg1, 0)
13566 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13567 {
13568 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13569 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13570
13571 if (n == (long)n
13572 && real_isfinite (ra)
13573 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13574 {
13575 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13576 const int prec = fmt->p;
13577 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13578 int inexact;
13579 mpfr_t m;
13580
13581 mpfr_init2 (m, prec);
13582 mpfr_from_real (m, ra, GMP_RNDN);
13583 mpfr_clear_flags ();
13584 inexact = func (m, n, m, rnd);
13585 result = do_mpfr_ckconv (m, type, inexact);
13586 mpfr_clear (m);
13587 }
13588 }
13589
13590 return result;
13591 }
13592
13593 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13594 the pointer *(ARG_QUO) and return the result. The type is taken
13595 from the type of ARG0 and is used for setting the precision of the
13596 calculation and results. */
13597
13598 static tree
13599 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13600 {
13601 tree const type = TREE_TYPE (arg0);
13602 tree result = NULL_TREE;
13603
13604 STRIP_NOPS (arg0);
13605 STRIP_NOPS (arg1);
13606
13607 /* To proceed, MPFR must exactly represent the target floating point
13608 format, which only happens when the target base equals two. */
13609 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13610 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13611 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13612 {
13613 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13614 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13615
13616 if (real_isfinite (ra0) && real_isfinite (ra1))
13617 {
13618 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13619 const int prec = fmt->p;
13620 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13621 tree result_rem;
13622 long integer_quo;
13623 mpfr_t m0, m1;
13624
13625 mpfr_inits2 (prec, m0, m1, NULL);
13626 mpfr_from_real (m0, ra0, GMP_RNDN);
13627 mpfr_from_real (m1, ra1, GMP_RNDN);
13628 mpfr_clear_flags ();
13629 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13630 /* Remquo is independent of the rounding mode, so pass
13631 inexact=0 to do_mpfr_ckconv(). */
13632 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13633 mpfr_clears (m0, m1, NULL);
13634 if (result_rem)
13635 {
13636 /* MPFR calculates quo in the host's long so it may
13637 return more bits in quo than the target int can hold
13638 if sizeof(host long) > sizeof(target int). This can
13639 happen even for native compilers in LP64 mode. In
13640 these cases, modulo the quo value with the largest
13641 number that the target int can hold while leaving one
13642 bit for the sign. */
13643 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13644 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13645
13646 /* Dereference the quo pointer argument. */
13647 arg_quo = build_fold_indirect_ref (arg_quo);
13648 /* Proceed iff a valid pointer type was passed in. */
13649 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13650 {
13651 /* Set the value. */
13652 tree result_quo = fold_build2 (MODIFY_EXPR,
13653 TREE_TYPE (arg_quo), arg_quo,
13654 build_int_cst (NULL, integer_quo));
13655 TREE_SIDE_EFFECTS (result_quo) = 1;
13656 /* Combine the quo assignment with the rem. */
13657 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13658 result_quo, result_rem));
13659 }
13660 }
13661 }
13662 }
13663 return result;
13664 }
13665
13666 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13667 resulting value as a tree with type TYPE. The mpfr precision is
13668 set to the precision of TYPE. We assume that this mpfr function
13669 returns zero if the result could be calculated exactly within the
13670 requested precision. In addition, the integer pointer represented
13671 by ARG_SG will be dereferenced and set to the appropriate signgam
13672 (-1,1) value. */
13673
13674 static tree
13675 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13676 {
13677 tree result = NULL_TREE;
13678
13679 STRIP_NOPS (arg);
13680
13681 /* To proceed, MPFR must exactly represent the target floating point
13682 format, which only happens when the target base equals two. Also
13683 verify ARG is a constant and that ARG_SG is an int pointer. */
13684 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13685 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13686 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13687 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13688 {
13689 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13690
13691 /* In addition to NaN and Inf, the argument cannot be zero or a
13692 negative integer. */
13693 if (real_isfinite (ra)
13694 && ra->cl != rvc_zero
13695 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13696 {
13697 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13698 const int prec = fmt->p;
13699 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13700 int inexact, sg;
13701 mpfr_t m;
13702 tree result_lg;
13703
13704 mpfr_init2 (m, prec);
13705 mpfr_from_real (m, ra, GMP_RNDN);
13706 mpfr_clear_flags ();
13707 inexact = mpfr_lgamma (m, &sg, m, rnd);
13708 result_lg = do_mpfr_ckconv (m, type, inexact);
13709 mpfr_clear (m);
13710 if (result_lg)
13711 {
13712 tree result_sg;
13713
13714 /* Dereference the arg_sg pointer argument. */
13715 arg_sg = build_fold_indirect_ref (arg_sg);
13716 /* Assign the signgam value into *arg_sg. */
13717 result_sg = fold_build2 (MODIFY_EXPR,
13718 TREE_TYPE (arg_sg), arg_sg,
13719 build_int_cst (NULL, sg));
13720 TREE_SIDE_EFFECTS (result_sg) = 1;
13721 /* Combine the signgam assignment with the lgamma result. */
13722 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13723 result_sg, result_lg));
13724 }
13725 }
13726 }
13727
13728 return result;
13729 }
13730
13731 #ifdef HAVE_mpc
13732 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13733 function FUNC on it and return the resulting value as a tree with
13734 type TYPE. The mpfr precision is set to the precision of TYPE. We
13735 assume that function FUNC returns zero if the result could be
13736 calculated exactly within the requested precision. */
13737
13738 static tree
13739 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13740 {
13741 tree result = NULL_TREE;
13742
13743 STRIP_NOPS (arg);
13744
13745 /* To proceed, MPFR must exactly represent the target floating point
13746 format, which only happens when the target base equals two. */
13747 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13749 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13750 {
13751 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13752 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13753
13754 if (real_isfinite (re) && real_isfinite (im))
13755 {
13756 const struct real_format *const fmt =
13757 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13758 const int prec = fmt->p;
13759 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13760 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13761 int inexact;
13762 mpc_t m;
13763
13764 mpc_init2 (m, prec);
13765 mpfr_from_real (mpc_realref(m), re, rnd);
13766 mpfr_from_real (mpc_imagref(m), im, rnd);
13767 mpfr_clear_flags ();
13768 inexact = func (m, m, crnd);
13769 result = do_mpc_ckconv (m, type, inexact);
13770 mpc_clear (m);
13771 }
13772 }
13773
13774 return result;
13775 }
13776
13777 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13778 mpc function FUNC on it and return the resulting value as a tree
13779 with type TYPE. The mpfr precision is set to the precision of
13780 TYPE. We assume that function FUNC returns zero if the result
13781 could be calculated exactly within the requested precision. */
13782
13783 #ifdef HAVE_mpc
13784 tree
13785 do_mpc_arg2 (tree arg0, tree arg1, tree type,
13786 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13787 {
13788 tree result = NULL_TREE;
13789
13790 STRIP_NOPS (arg0);
13791 STRIP_NOPS (arg1);
13792
13793 /* To proceed, MPFR must exactly represent the target floating point
13794 format, which only happens when the target base equals two. */
13795 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13796 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13797 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13798 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13799 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13800 {
13801 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13802 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13803 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13804 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13805
13806 if (real_isfinite (re0) && real_isfinite (im0)
13807 && real_isfinite (re1) && real_isfinite (im1))
13808 {
13809 const struct real_format *const fmt =
13810 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13811 const int prec = fmt->p;
13812 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13813 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13814 int inexact;
13815 mpc_t m0, m1;
13816
13817 mpc_init2 (m0, prec);
13818 mpc_init2 (m1, prec);
13819 mpfr_from_real (mpc_realref(m0), re0, rnd);
13820 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13821 mpfr_from_real (mpc_realref(m1), re1, rnd);
13822 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13823 mpfr_clear_flags ();
13824 inexact = func (m0, m0, m1, crnd);
13825 result = do_mpc_ckconv (m0, type, inexact);
13826 mpc_clear (m0);
13827 mpc_clear (m1);
13828 }
13829 }
13830
13831 return result;
13832 }
13833 # endif
13834 #endif /* HAVE_mpc */
13835
13836 /* FIXME tuples.
13837 The functions below provide an alternate interface for folding
13838 builtin function calls presented as GIMPLE_CALL statements rather
13839 than as CALL_EXPRs. The folded result is still expressed as a
13840 tree. There is too much code duplication in the handling of
13841 varargs functions, and a more intrusive re-factoring would permit
13842 better sharing of code between the tree and statement-based
13843 versions of these functions. */
13844
13845 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13846 along with N new arguments specified as the "..." parameters. SKIP
13847 is the number of arguments in STMT to be omitted. This function is used
13848 to do varargs-to-varargs transformations. */
13849
13850 static tree
13851 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13852 {
13853 int oldnargs = gimple_call_num_args (stmt);
13854 int nargs = oldnargs - skip + n;
13855 tree fntype = TREE_TYPE (fndecl);
13856 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13857 tree *buffer;
13858 int i, j;
13859 va_list ap;
13860 location_t loc = gimple_location (stmt);
13861
13862 buffer = XALLOCAVEC (tree, nargs);
13863 va_start (ap, n);
13864 for (i = 0; i < n; i++)
13865 buffer[i] = va_arg (ap, tree);
13866 va_end (ap);
13867 for (j = skip; j < oldnargs; j++, i++)
13868 buffer[i] = gimple_call_arg (stmt, j);
13869
13870 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13871 }
13872
13873 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13874 a normal call should be emitted rather than expanding the function
13875 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13876
13877 static tree
13878 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13879 {
13880 tree dest, size, len, fn, fmt, flag;
13881 const char *fmt_str;
13882 int nargs = gimple_call_num_args (stmt);
13883
13884 /* Verify the required arguments in the original call. */
13885 if (nargs < 4)
13886 return NULL_TREE;
13887 dest = gimple_call_arg (stmt, 0);
13888 if (!validate_arg (dest, POINTER_TYPE))
13889 return NULL_TREE;
13890 flag = gimple_call_arg (stmt, 1);
13891 if (!validate_arg (flag, INTEGER_TYPE))
13892 return NULL_TREE;
13893 size = gimple_call_arg (stmt, 2);
13894 if (!validate_arg (size, INTEGER_TYPE))
13895 return NULL_TREE;
13896 fmt = gimple_call_arg (stmt, 3);
13897 if (!validate_arg (fmt, POINTER_TYPE))
13898 return NULL_TREE;
13899
13900 if (! host_integerp (size, 1))
13901 return NULL_TREE;
13902
13903 len = NULL_TREE;
13904
13905 if (!init_target_chars ())
13906 return NULL_TREE;
13907
13908 /* Check whether the format is a literal string constant. */
13909 fmt_str = c_getstr (fmt);
13910 if (fmt_str != NULL)
13911 {
13912 /* If the format doesn't contain % args or %%, we know the size. */
13913 if (strchr (fmt_str, target_percent) == 0)
13914 {
13915 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13916 len = build_int_cstu (size_type_node, strlen (fmt_str));
13917 }
13918 /* If the format is "%s" and first ... argument is a string literal,
13919 we know the size too. */
13920 else if (fcode == BUILT_IN_SPRINTF_CHK
13921 && strcmp (fmt_str, target_percent_s) == 0)
13922 {
13923 tree arg;
13924
13925 if (nargs == 5)
13926 {
13927 arg = gimple_call_arg (stmt, 4);
13928 if (validate_arg (arg, POINTER_TYPE))
13929 {
13930 len = c_strlen (arg, 1);
13931 if (! len || ! host_integerp (len, 1))
13932 len = NULL_TREE;
13933 }
13934 }
13935 }
13936 }
13937
13938 if (! integer_all_onesp (size))
13939 {
13940 if (! len || ! tree_int_cst_lt (len, size))
13941 return NULL_TREE;
13942 }
13943
13944 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13945 or if format doesn't contain % chars or is "%s". */
13946 if (! integer_zerop (flag))
13947 {
13948 if (fmt_str == NULL)
13949 return NULL_TREE;
13950 if (strchr (fmt_str, target_percent) != NULL
13951 && strcmp (fmt_str, target_percent_s))
13952 return NULL_TREE;
13953 }
13954
13955 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13956 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13957 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13958 if (!fn)
13959 return NULL_TREE;
13960
13961 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13962 }
13963
13964 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13965 a normal call should be emitted rather than expanding the function
13966 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13967 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13968 passed as second argument. */
13969
13970 tree
13971 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13972 enum built_in_function fcode)
13973 {
13974 tree dest, size, len, fn, fmt, flag;
13975 const char *fmt_str;
13976
13977 /* Verify the required arguments in the original call. */
13978 if (gimple_call_num_args (stmt) < 5)
13979 return NULL_TREE;
13980 dest = gimple_call_arg (stmt, 0);
13981 if (!validate_arg (dest, POINTER_TYPE))
13982 return NULL_TREE;
13983 len = gimple_call_arg (stmt, 1);
13984 if (!validate_arg (len, INTEGER_TYPE))
13985 return NULL_TREE;
13986 flag = gimple_call_arg (stmt, 2);
13987 if (!validate_arg (flag, INTEGER_TYPE))
13988 return NULL_TREE;
13989 size = gimple_call_arg (stmt, 3);
13990 if (!validate_arg (size, INTEGER_TYPE))
13991 return NULL_TREE;
13992 fmt = gimple_call_arg (stmt, 4);
13993 if (!validate_arg (fmt, POINTER_TYPE))
13994 return NULL_TREE;
13995
13996 if (! host_integerp (size, 1))
13997 return NULL_TREE;
13998
13999 if (! integer_all_onesp (size))
14000 {
14001 if (! host_integerp (len, 1))
14002 {
14003 /* If LEN is not constant, try MAXLEN too.
14004 For MAXLEN only allow optimizing into non-_ocs function
14005 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
14006 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
14007 return NULL_TREE;
14008 }
14009 else
14010 maxlen = len;
14011
14012 if (tree_int_cst_lt (size, maxlen))
14013 return NULL_TREE;
14014 }
14015
14016 if (!init_target_chars ())
14017 return NULL_TREE;
14018
14019 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
14020 or if format doesn't contain % chars or is "%s". */
14021 if (! integer_zerop (flag))
14022 {
14023 fmt_str = c_getstr (fmt);
14024 if (fmt_str == NULL)
14025 return NULL_TREE;
14026 if (strchr (fmt_str, target_percent) != NULL
14027 && strcmp (fmt_str, target_percent_s))
14028 return NULL_TREE;
14029 }
14030
14031 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
14032 available. */
14033 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
14034 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
14035 if (!fn)
14036 return NULL_TREE;
14037
14038 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
14039 }
14040
14041 /* Builtins with folding operations that operate on "..." arguments
14042 need special handling; we need to store the arguments in a convenient
14043 data structure before attempting any folding. Fortunately there are
14044 only a few builtins that fall into this category. FNDECL is the
14045 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14046 result of the function call is ignored. */
14047
14048 static tree
14049 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14050 bool ignore ATTRIBUTE_UNUSED)
14051 {
14052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14053 tree ret = NULL_TREE;
14054
14055 switch (fcode)
14056 {
14057 case BUILT_IN_SPRINTF_CHK:
14058 case BUILT_IN_VSPRINTF_CHK:
14059 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14060 break;
14061
14062 case BUILT_IN_SNPRINTF_CHK:
14063 case BUILT_IN_VSNPRINTF_CHK:
14064 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14065
14066 default:
14067 break;
14068 }
14069 if (ret)
14070 {
14071 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14072 TREE_NO_WARNING (ret) = 1;
14073 return ret;
14074 }
14075 return NULL_TREE;
14076 }
14077
14078 /* A wrapper function for builtin folding that prevents warnings for
14079 "statement without effect" and the like, caused by removing the
14080 call node earlier than the warning is generated. */
14081
14082 tree
14083 fold_call_stmt (gimple stmt, bool ignore)
14084 {
14085 tree ret = NULL_TREE;
14086 tree fndecl = gimple_call_fndecl (stmt);
14087 location_t loc = gimple_location (stmt);
14088 if (fndecl
14089 && TREE_CODE (fndecl) == FUNCTION_DECL
14090 && DECL_BUILT_IN (fndecl)
14091 && !gimple_call_va_arg_pack_p (stmt))
14092 {
14093 int nargs = gimple_call_num_args (stmt);
14094
14095 if (avoid_folding_inline_builtin (fndecl))
14096 return NULL_TREE;
14097 /* FIXME: Don't use a list in this interface. */
14098 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14099 {
14100 tree arglist = NULL_TREE;
14101 int i;
14102 for (i = nargs - 1; i >= 0; i--)
14103 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
14104 return targetm.fold_builtin (fndecl, arglist, ignore);
14105 }
14106 else
14107 {
14108 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14109 {
14110 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
14111 int i;
14112 for (i = 0; i < nargs; i++)
14113 args[i] = gimple_call_arg (stmt, i);
14114 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14115 }
14116 if (!ret)
14117 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14118 if (ret)
14119 {
14120 /* Propagate location information from original call to
14121 expansion of builtin. Otherwise things like
14122 maybe_emit_chk_warning, that operate on the expansion
14123 of a builtin, will use the wrong location information. */
14124 if (gimple_has_location (stmt))
14125 {
14126 tree realret = ret;
14127 if (TREE_CODE (ret) == NOP_EXPR)
14128 realret = TREE_OPERAND (ret, 0);
14129 if (CAN_HAVE_LOCATION_P (realret)
14130 && !EXPR_HAS_LOCATION (realret))
14131 SET_EXPR_LOCATION (realret, loc);
14132 return realret;
14133 }
14134 return ret;
14135 }
14136 }
14137 }
14138 return NULL_TREE;
14139 }