re PR middle-end/42505 (loop canonicalization causes a lot of unnecessary temporary...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
53
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
183
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
207
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227
228 /* Return true if NAME starts with __builtin_ or __sync_. */
229
230 bool
231 is_builtin_name (const char *name)
232 {
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
238 }
239
240
241 /* Return true if DECL is a function symbol representing a built-in. */
242
243 bool
244 is_builtin_fn (tree decl)
245 {
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
247 }
248
249
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
253
254 static bool
255 called_as_built_in (tree node)
256 {
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
262 }
263
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
267
268 int
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
270 {
271 unsigned int inner;
272
273 inner = max_align;
274 if (handled_component_p (exp))
275 {
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
280
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
286 {
287 tree next_offset;
288
289 if (TREE_CODE (offset) == PLUS_EXPR)
290 {
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
293 }
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
297 {
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
302
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
305 }
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
308 {
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
314
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
317 }
318 else
319 {
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
322 }
323 offset = next_offset;
324 }
325 }
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
341 }
342
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
345
346 bool
347 can_trust_pointer_alignment (void)
348 {
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
351 }
352
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
357
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
360
361 int
362 get_pointer_alignment (tree exp, unsigned int max_align)
363 {
364 unsigned int align, inner;
365
366 if (!can_trust_pointer_alignment ())
367 return 0;
368
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
371
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
374
375 while (1)
376 {
377 switch (TREE_CODE (exp))
378 {
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
383
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
387
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
394
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
399
400 exp = TREE_OPERAND (exp, 0);
401 break;
402
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
406
407 default:
408 return align;
409 }
410 }
411 }
412
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
416
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
423
424 The value returned is of type `ssizetype'.
425
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
428
429 tree
430 c_strlen (tree src, int only_value)
431 {
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
437
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
441 {
442 tree len1, len2;
443
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
448 }
449
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
453
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
458
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
462
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
465
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
467 {
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
472
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
476
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
483
484 return size_diffop_loc (loc, size_int (max), offset_node);
485 }
486
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
495
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
499 {
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
502 {
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
505 }
506 return NULL_TREE;
507 }
508
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
512
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
516 }
517
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
520
521 static const char *
522 c_getstr (tree src)
523 {
524 tree offset_node;
525
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
529
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
535
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537 }
538
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
541
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
544 {
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
548
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
550
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
555 {
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
564
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
568 }
569 return immed_double_const (c[0], c[1], mode);
570 }
571
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
575
576 static int
577 target_char_cast (tree cst, char *p)
578 {
579 unsigned HOST_WIDE_INT val, hostval;
580
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
584
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
588
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
592
593 if (val != hostval)
594 return 1;
595
596 *p = hostval;
597 return 0;
598 }
599
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
603
604 static tree
605 builtin_save_expr (tree exp)
606 {
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
611
612 return save_expr (exp);
613 }
614
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
618
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
621 {
622 int i;
623
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
628
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
633
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
641 {
642 tem = hard_frame_pointer_rtx;
643
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
646 }
647 #endif
648
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
656
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
665
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
668 {
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
677 }
678
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
687
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
697 }
698
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
701
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
705
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
708 {
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
712
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
715
716 buf_addr = convert_memory_address (Pmode, buf_addr);
717
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
719
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
723
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
727
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
730
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
733
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
739
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
745
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
749
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
752 }
753
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
756
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
759 {
760 rtx chain;
761
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
765
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
771
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
777 {
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
782 }
783
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
786 {
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
790
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
795
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
798 {
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
803 }
804 }
805 #endif
806
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
818
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
823 }
824
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
829
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
832 {
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
835
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
840
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
843
844 buf_addr = convert_memory_address (Pmode, buf_addr);
845
846 buf_addr = force_reg (Pmode, buf_addr);
847
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
851
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
858 {
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
862
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
868
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
879 {
880 lab = copy_to_reg (lab);
881
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
884
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
887
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
891 }
892 }
893
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
900 {
901 gcc_assert (insn != last);
902
903 if (JUMP_P (insn))
904 {
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
907 }
908 else if (CALL_P (insn))
909 break;
910 }
911 }
912
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
915
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
918 {
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
921
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
924
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
927
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
938
939 crtl->has_nonlocal_goto = 1;
940
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
947 {
948 r_label = copy_to_reg (r_label);
949
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
960
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
965
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
978
979 emit_indirect_jump (r_label);
980 }
981
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
985 {
986 if (JUMP_P (insn))
987 {
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
990 }
991 else if (CALL_P (insn))
992 break;
993 }
994
995 return const0_rtx;
996 }
997
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1002
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1005 {
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1008
1009
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1017
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1023
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1028
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1030 }
1031
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1035
1036 static void
1037 expand_builtin_prefetch (tree exp)
1038 {
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1042
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1045
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1047
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1060
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1063
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1066 {
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1069 }
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1073 {
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1077 }
1078
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1081 {
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1084 }
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1088 {
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1091 }
1092
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1095 {
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1100 {
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1103 }
1104 emit_insn (gen_prefetch (op0, op1, op2));
1105 }
1106 #endif
1107
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1112 }
1113
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1118
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1121 {
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1125
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1130
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1133
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1140
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1153
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1158 {
1159 set_mem_attributes (mem, exp, 0);
1160
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1163
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1170 {
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1174
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1180
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1182
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1186
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1189
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1191 {
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1195
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1203
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1211 {
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1218 }
1219
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1226 {
1227 offset = -1;
1228 length = -1;
1229 }
1230
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1233 }
1234
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1238 {
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1241 }
1242 }
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1245 }
1246
1247 return mem;
1248 }
1249 \f
1250 /* Built-in functions to perform an untyped call and return. */
1251
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1258
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1265
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1268
1269 static int
1270 apply_args_size (void)
1271 {
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1276
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1279 {
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1282
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1287
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1290 {
1291 mode = reg_raw_mode[regno];
1292
1293 gcc_assert (mode != VOIDmode);
1294
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1300 }
1301 else
1302 {
1303 apply_args_mode[regno] = VOIDmode;
1304 }
1305 }
1306 return size;
1307 }
1308
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1311
1312 static int
1313 apply_result_size (void)
1314 {
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1318
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1321 {
1322 size = 0;
1323
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (targetm.calls.function_value_regno_p (regno))
1326 {
1327 mode = reg_raw_mode[regno];
1328
1329 gcc_assert (mode != VOIDmode);
1330
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1336 }
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1339
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1345 }
1346 return size;
1347 }
1348
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1353
1354 static rtx
1355 result_vector (int savep, rtx result)
1356 {
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1361
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1365 {
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1375 }
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1377 }
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1379
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1382
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1385 {
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1390
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1394
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1399
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1403 {
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1407
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1409
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1412 }
1413
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1420 tem
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1425
1426 size = GET_MODE_SIZE (Pmode);
1427
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1431 {
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1435 }
1436
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1439 }
1440
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1447
1448 static rtx
1449 expand_builtin_apply_args (void)
1450 {
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1455 {
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1461
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1466
1467 apply_args_value = temp;
1468
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1483 }
1484 }
1485
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1488
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1491 {
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1498
1499 arguments = convert_memory_address (Pmode, arguments);
1500
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1503
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1511
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1517
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1525
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1529
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1536
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1549
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1554
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1559
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1564 {
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1572 }
1573
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1578 {
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1585 }
1586
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1589
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1595
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1605 {
1606 rtx valreg = 0;
1607
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1614 {
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1616
1617 valreg = gen_rtx_REG (mode, regno);
1618 }
1619
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1623
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1625 }
1626 else
1627 #endif
1628 gcc_unreachable ();
1629
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1634
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1642
1643 OK_DEFER_POP;
1644
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1648 }
1649
1650 /* Perform an untyped return. */
1651
1652 static void
1653 expand_builtin_return (rtx result)
1654 {
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1659
1660 result = convert_memory_address (Pmode, result);
1661
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1664
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1667 {
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1671 }
1672 #endif
1673
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1678 {
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1684
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1690 }
1691
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1694
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1698 }
1699
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1701
1702 static enum type_class
1703 type_to_class (tree type)
1704 {
1705 switch (TREE_CODE (type))
1706 {
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1725 }
1726 }
1727
1728 /* Expand a call EXP to __builtin_classify_type. */
1729
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1732 {
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1736 }
1737
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1751
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1756
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1759 {
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1763
1764 switch (fn)
1765 {
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1849
1850 default:
1851 return NULL_TREE;
1852 }
1853
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1862 }
1863
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1865
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1868 {
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1870 }
1871
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1875
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1878 {
1879 rtx lab = gen_label_rtx ();
1880
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1887
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1891 {
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1901 }
1902 #endif
1903
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1906
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1913 }
1914
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1920
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1923 {
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1930
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1933
1934 arg = CALL_EXPR_ARG (exp, 0);
1935
1936 switch (DECL_FUNCTION_CODE (fndecl))
1937 {
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1988 }
1989
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1992
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1995
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
1998 {
1999 target = gen_reg_rtx (mode);
2000
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2005
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2007
2008 start_sequence ();
2009
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2013
2014 if (target != 0)
2015 {
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2018
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2024 }
2025
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2030 }
2031
2032 return expand_call (exp, target, target == const0_rtx);
2033 }
2034
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2041
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2044 {
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2052
2053 switch (DECL_FUNCTION_CODE (fndecl))
2054 {
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2061 }
2062
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2065
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2068
2069 switch (DECL_FUNCTION_CODE (fndecl))
2070 {
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2093 }
2094
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2097
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2100 return NULL_RTX;
2101
2102 target = gen_reg_rtx (mode);
2103
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2106
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2110
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2113
2114 start_sequence ();
2115
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2120
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2125 {
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2128 }
2129
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2132
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2137
2138 return target;
2139 }
2140
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2147
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2150 {
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2156
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2159
2160 arg = CALL_EXPR_ARG (exp, 0);
2161
2162 switch (DECL_FUNCTION_CODE (fndecl))
2163 {
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2169 }
2170
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2173
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2178 {
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2185 }
2186
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2189 {
2190 target = gen_reg_rtx (mode);
2191
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2196
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2198
2199 start_sequence ();
2200
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2204 {
2205 int result;
2206
2207 switch (DECL_FUNCTION_CODE (fndecl))
2208 {
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2217 }
2218 gcc_assert (result);
2219 }
2220 else
2221 {
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2223 }
2224
2225 if (target != 0)
2226 {
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2232 }
2233
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2238 }
2239
2240 target = expand_call (exp, target, target == const0_rtx);
2241
2242 return target;
2243 }
2244
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2248
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2251 {
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2255
2256 switch (DECL_FUNCTION_CODE (fndecl))
2257 {
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2275 }
2276
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2280
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2283
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode);
2286 return CODE_FOR_nothing;
2287 }
2288
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2296
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2299 {
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2305
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2308
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2312
2313 if (icode != CODE_FOR_nothing)
2314 {
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2318 if (!target
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2322
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2325
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2330
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2332
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2335
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 return target;
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2342 }
2343
2344 return NULL_RTX;
2345 }
2346
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2350 function. */
2351
2352 static rtx
2353 expand_builtin_sincos (tree exp)
2354 {
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2358 int result;
2359 location_t loc = EXPR_LOCATION (exp);
2360
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 return NULL_RTX;
2364
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2368
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2371
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2374 return NULL_RTX;
2375
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2378
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2382
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2387
2388 /* Move target1 and target2 to the memory locations indicated
2389 by op1 and op2. */
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2392
2393 return const0_rtx;
2394 }
2395
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2400
2401 static rtx
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2403 {
2404 tree fndecl = get_callee_fndecl (exp);
2405 tree arg, type;
2406 enum machine_mode mode;
2407 rtx op0, op1, op2;
2408 location_t loc = EXPR_LOCATION (exp);
2409
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2411 return NULL_RTX;
2412
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2416
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2421 {
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2424
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2426
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2429 }
2430 else if (TARGET_HAS_SINCOS)
2431 {
2432 tree call, fn = NULL_TREE;
2433 tree top1, top2;
2434 rtx op1a, op2a;
2435
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 else
2443 gcc_unreachable ();
2444
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2451
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2456 }
2457 else
2458 {
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2461
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2468 else
2469 gcc_unreachable ();
2470
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477 const char *name = NULL;
2478
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 name = "cexpf";
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 name = "cexp";
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 name = "cexpl";
2485
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2488 }
2489
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2492
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2497 }
2498
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2504 }
2505
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2510
2511 static tree
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2513 {
2514 va_list ap;
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2517
2518 va_start (ap, n);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 va_end (ap);
2521 SET_EXPR_LOCATION (fn, loc);
2522 return fn;
2523 }
2524
2525 /* Expand a call to one of the builtin rounding functions gcc defines
2526 as an extension (lfloor and lceil). As these are gcc extensions we
2527 do not need to worry about setting errno to EDOM.
2528 If expanding via optab fails, lower expression to (int)(floor(x)).
2529 EXP is the expression that is a call to the builtin function;
2530 if convenient, the result should be placed in TARGET. */
2531
2532 static rtx
2533 expand_builtin_int_roundingfn (tree exp, rtx target)
2534 {
2535 convert_optab builtin_optab;
2536 rtx op0, insns, tmp;
2537 tree fndecl = get_callee_fndecl (exp);
2538 enum built_in_function fallback_fn;
2539 tree fallback_fndecl;
2540 enum machine_mode mode;
2541 tree arg;
2542
2543 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2544 gcc_unreachable ();
2545
2546 arg = CALL_EXPR_ARG (exp, 0);
2547
2548 switch (DECL_FUNCTION_CODE (fndecl))
2549 {
2550 CASE_FLT_FN (BUILT_IN_LCEIL):
2551 CASE_FLT_FN (BUILT_IN_LLCEIL):
2552 builtin_optab = lceil_optab;
2553 fallback_fn = BUILT_IN_CEIL;
2554 break;
2555
2556 CASE_FLT_FN (BUILT_IN_LFLOOR):
2557 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2558 builtin_optab = lfloor_optab;
2559 fallback_fn = BUILT_IN_FLOOR;
2560 break;
2561
2562 default:
2563 gcc_unreachable ();
2564 }
2565
2566 /* Make a suitable register to place result in. */
2567 mode = TYPE_MODE (TREE_TYPE (exp));
2568
2569 target = gen_reg_rtx (mode);
2570
2571 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2572 need to expand the argument again. This way, we will not perform
2573 side-effects more the once. */
2574 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2575
2576 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2577
2578 start_sequence ();
2579
2580 /* Compute into TARGET. */
2581 if (expand_sfix_optab (target, op0, builtin_optab))
2582 {
2583 /* Output the entire sequence. */
2584 insns = get_insns ();
2585 end_sequence ();
2586 emit_insn (insns);
2587 return target;
2588 }
2589
2590 /* If we were unable to expand via the builtin, stop the sequence
2591 (without outputting the insns). */
2592 end_sequence ();
2593
2594 /* Fall back to floating point rounding optab. */
2595 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2596
2597 /* For non-C99 targets we may end up without a fallback fndecl here
2598 if the user called __builtin_lfloor directly. In this case emit
2599 a call to the floor/ceil variants nevertheless. This should result
2600 in the best user experience for not full C99 targets. */
2601 if (fallback_fndecl == NULL_TREE)
2602 {
2603 tree fntype;
2604 const char *name = NULL;
2605
2606 switch (DECL_FUNCTION_CODE (fndecl))
2607 {
2608 case BUILT_IN_LCEIL:
2609 case BUILT_IN_LLCEIL:
2610 name = "ceil";
2611 break;
2612 case BUILT_IN_LCEILF:
2613 case BUILT_IN_LLCEILF:
2614 name = "ceilf";
2615 break;
2616 case BUILT_IN_LCEILL:
2617 case BUILT_IN_LLCEILL:
2618 name = "ceill";
2619 break;
2620 case BUILT_IN_LFLOOR:
2621 case BUILT_IN_LLFLOOR:
2622 name = "floor";
2623 break;
2624 case BUILT_IN_LFLOORF:
2625 case BUILT_IN_LLFLOORF:
2626 name = "floorf";
2627 break;
2628 case BUILT_IN_LFLOORL:
2629 case BUILT_IN_LLFLOORL:
2630 name = "floorl";
2631 break;
2632 default:
2633 gcc_unreachable ();
2634 }
2635
2636 fntype = build_function_type_list (TREE_TYPE (arg),
2637 TREE_TYPE (arg), NULL_TREE);
2638 fallback_fndecl = build_fn_decl (name, fntype);
2639 }
2640
2641 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2642
2643 tmp = expand_normal (exp);
2644
2645 /* Truncate the result of floating point optab to integer
2646 via expand_fix (). */
2647 target = gen_reg_rtx (mode);
2648 expand_fix (target, tmp, 0);
2649
2650 return target;
2651 }
2652
2653 /* Expand a call to one of the builtin math functions doing integer
2654 conversion (lrint).
2655 Return 0 if a normal call should be emitted rather than expanding the
2656 function in-line. EXP is the expression that is a call to the builtin
2657 function; if convenient, the result should be placed in TARGET. */
2658
2659 static rtx
2660 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2661 {
2662 convert_optab builtin_optab;
2663 rtx op0, insns;
2664 tree fndecl = get_callee_fndecl (exp);
2665 tree arg;
2666 enum machine_mode mode;
2667
2668 /* There's no easy way to detect the case we need to set EDOM. */
2669 if (flag_errno_math)
2670 return NULL_RTX;
2671
2672 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2673 gcc_unreachable ();
2674
2675 arg = CALL_EXPR_ARG (exp, 0);
2676
2677 switch (DECL_FUNCTION_CODE (fndecl))
2678 {
2679 CASE_FLT_FN (BUILT_IN_LRINT):
2680 CASE_FLT_FN (BUILT_IN_LLRINT):
2681 builtin_optab = lrint_optab; break;
2682 CASE_FLT_FN (BUILT_IN_LROUND):
2683 CASE_FLT_FN (BUILT_IN_LLROUND):
2684 builtin_optab = lround_optab; break;
2685 default:
2686 gcc_unreachable ();
2687 }
2688
2689 /* Make a suitable register to place result in. */
2690 mode = TYPE_MODE (TREE_TYPE (exp));
2691
2692 target = gen_reg_rtx (mode);
2693
2694 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2695 need to expand the argument again. This way, we will not perform
2696 side-effects more the once. */
2697 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2698
2699 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2700
2701 start_sequence ();
2702
2703 if (expand_sfix_optab (target, op0, builtin_optab))
2704 {
2705 /* Output the entire sequence. */
2706 insns = get_insns ();
2707 end_sequence ();
2708 emit_insn (insns);
2709 return target;
2710 }
2711
2712 /* If we were unable to expand via the builtin, stop the sequence
2713 (without outputting the insns) and call to the library function
2714 with the stabilized argument list. */
2715 end_sequence ();
2716
2717 target = expand_call (exp, target, target == const0_rtx);
2718
2719 return target;
2720 }
2721
2722 /* To evaluate powi(x,n), the floating point value x raised to the
2723 constant integer exponent n, we use a hybrid algorithm that
2724 combines the "window method" with look-up tables. For an
2725 introduction to exponentiation algorithms and "addition chains",
2726 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2727 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2728 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2729 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2730
2731 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2732 multiplications to inline before calling the system library's pow
2733 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2734 so this default never requires calling pow, powf or powl. */
2735
2736 #ifndef POWI_MAX_MULTS
2737 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2738 #endif
2739
2740 /* The size of the "optimal power tree" lookup table. All
2741 exponents less than this value are simply looked up in the
2742 powi_table below. This threshold is also used to size the
2743 cache of pseudo registers that hold intermediate results. */
2744 #define POWI_TABLE_SIZE 256
2745
2746 /* The size, in bits of the window, used in the "window method"
2747 exponentiation algorithm. This is equivalent to a radix of
2748 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2749 #define POWI_WINDOW_SIZE 3
2750
2751 /* The following table is an efficient representation of an
2752 "optimal power tree". For each value, i, the corresponding
2753 value, j, in the table states than an optimal evaluation
2754 sequence for calculating pow(x,i) can be found by evaluating
2755 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2756 100 integers is given in Knuth's "Seminumerical algorithms". */
2757
2758 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2759 {
2760 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2761 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2762 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2763 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2764 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2765 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2766 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2767 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2768 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2769 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2770 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2771 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2772 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2773 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2774 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2775 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2776 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2777 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2778 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2779 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2780 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2781 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2782 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2783 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2784 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2785 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2786 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2787 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2788 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2789 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2790 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2791 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2792 };
2793
2794
2795 /* Return the number of multiplications required to calculate
2796 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2797 subroutine of powi_cost. CACHE is an array indicating
2798 which exponents have already been calculated. */
2799
2800 static int
2801 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2802 {
2803 /* If we've already calculated this exponent, then this evaluation
2804 doesn't require any additional multiplications. */
2805 if (cache[n])
2806 return 0;
2807
2808 cache[n] = true;
2809 return powi_lookup_cost (n - powi_table[n], cache)
2810 + powi_lookup_cost (powi_table[n], cache) + 1;
2811 }
2812
2813 /* Return the number of multiplications required to calculate
2814 powi(x,n) for an arbitrary x, given the exponent N. This
2815 function needs to be kept in sync with expand_powi below. */
2816
2817 static int
2818 powi_cost (HOST_WIDE_INT n)
2819 {
2820 bool cache[POWI_TABLE_SIZE];
2821 unsigned HOST_WIDE_INT digit;
2822 unsigned HOST_WIDE_INT val;
2823 int result;
2824
2825 if (n == 0)
2826 return 0;
2827
2828 /* Ignore the reciprocal when calculating the cost. */
2829 val = (n < 0) ? -n : n;
2830
2831 /* Initialize the exponent cache. */
2832 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2833 cache[1] = true;
2834
2835 result = 0;
2836
2837 while (val >= POWI_TABLE_SIZE)
2838 {
2839 if (val & 1)
2840 {
2841 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2842 result += powi_lookup_cost (digit, cache)
2843 + POWI_WINDOW_SIZE + 1;
2844 val >>= POWI_WINDOW_SIZE;
2845 }
2846 else
2847 {
2848 val >>= 1;
2849 result++;
2850 }
2851 }
2852
2853 return result + powi_lookup_cost (val, cache);
2854 }
2855
2856 /* Recursive subroutine of expand_powi. This function takes the array,
2857 CACHE, of already calculated exponents and an exponent N and returns
2858 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2859
2860 static rtx
2861 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2862 {
2863 unsigned HOST_WIDE_INT digit;
2864 rtx target, result;
2865 rtx op0, op1;
2866
2867 if (n < POWI_TABLE_SIZE)
2868 {
2869 if (cache[n])
2870 return cache[n];
2871
2872 target = gen_reg_rtx (mode);
2873 cache[n] = target;
2874
2875 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2876 op1 = expand_powi_1 (mode, powi_table[n], cache);
2877 }
2878 else if (n & 1)
2879 {
2880 target = gen_reg_rtx (mode);
2881 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2882 op0 = expand_powi_1 (mode, n - digit, cache);
2883 op1 = expand_powi_1 (mode, digit, cache);
2884 }
2885 else
2886 {
2887 target = gen_reg_rtx (mode);
2888 op0 = expand_powi_1 (mode, n >> 1, cache);
2889 op1 = op0;
2890 }
2891
2892 result = expand_mult (mode, op0, op1, target, 0);
2893 if (result != target)
2894 emit_move_insn (target, result);
2895 return target;
2896 }
2897
2898 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2899 floating point operand in mode MODE, and N is the exponent. This
2900 function needs to be kept in sync with powi_cost above. */
2901
2902 static rtx
2903 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2904 {
2905 rtx cache[POWI_TABLE_SIZE];
2906 rtx result;
2907
2908 if (n == 0)
2909 return CONST1_RTX (mode);
2910
2911 memset (cache, 0, sizeof (cache));
2912 cache[1] = x;
2913
2914 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2915
2916 /* If the original exponent was negative, reciprocate the result. */
2917 if (n < 0)
2918 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2919 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2920
2921 return result;
2922 }
2923
2924 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2925 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2926 if we can simplify it. */
2927 static rtx
2928 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2929 rtx subtarget)
2930 {
2931 if (TREE_CODE (arg1) == REAL_CST
2932 && !TREE_OVERFLOW (arg1)
2933 && flag_unsafe_math_optimizations)
2934 {
2935 enum machine_mode mode = TYPE_MODE (type);
2936 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2937 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2938 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2939 tree op = NULL_TREE;
2940
2941 if (sqrtfn)
2942 {
2943 /* Optimize pow (x, 0.5) into sqrt. */
2944 if (REAL_VALUES_EQUAL (c, dconsthalf))
2945 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2946
2947 else
2948 {
2949 REAL_VALUE_TYPE dconst1_4 = dconst1;
2950 REAL_VALUE_TYPE dconst3_4;
2951 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2952
2953 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2954 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2955
2956 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2957 machines that a builtin sqrt instruction is smaller than a
2958 call to pow with 0.25, so do this optimization even if
2959 -Os. */
2960 if (REAL_VALUES_EQUAL (c, dconst1_4))
2961 {
2962 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2963 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2964 }
2965
2966 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2967 are optimizing for space. */
2968 else if (optimize_insn_for_speed_p ()
2969 && !TREE_SIDE_EFFECTS (arg0)
2970 && REAL_VALUES_EQUAL (c, dconst3_4))
2971 {
2972 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2973 tree sqrt2 = builtin_save_expr (sqrt1);
2974 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2975 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2976 }
2977 }
2978 }
2979
2980 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2981 cbrt/sqrts instead of pow (x, 1./6.). */
2982 if (cbrtfn && ! op
2983 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2984 {
2985 /* First try 1/3. */
2986 REAL_VALUE_TYPE dconst1_3
2987 = real_value_truncate (mode, dconst_third ());
2988
2989 if (REAL_VALUES_EQUAL (c, dconst1_3))
2990 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2991
2992 /* Now try 1/6. */
2993 else if (optimize_insn_for_speed_p ())
2994 {
2995 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2996 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
2997
2998 if (REAL_VALUES_EQUAL (c, dconst1_6))
2999 {
3000 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3001 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3002 }
3003 }
3004 }
3005
3006 if (op)
3007 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3008 }
3009
3010 return NULL_RTX;
3011 }
3012
3013 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3014 a normal call should be emitted rather than expanding the function
3015 in-line. EXP is the expression that is a call to the builtin
3016 function; if convenient, the result should be placed in TARGET. */
3017
3018 static rtx
3019 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3020 {
3021 tree arg0, arg1;
3022 tree fn, narg0;
3023 tree type = TREE_TYPE (exp);
3024 REAL_VALUE_TYPE cint, c, c2;
3025 HOST_WIDE_INT n;
3026 rtx op, op2;
3027 enum machine_mode mode = TYPE_MODE (type);
3028
3029 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3030 return NULL_RTX;
3031
3032 arg0 = CALL_EXPR_ARG (exp, 0);
3033 arg1 = CALL_EXPR_ARG (exp, 1);
3034
3035 if (TREE_CODE (arg1) != REAL_CST
3036 || TREE_OVERFLOW (arg1))
3037 return expand_builtin_mathfn_2 (exp, target, subtarget);
3038
3039 /* Handle constant exponents. */
3040
3041 /* For integer valued exponents we can expand to an optimal multiplication
3042 sequence using expand_powi. */
3043 c = TREE_REAL_CST (arg1);
3044 n = real_to_integer (&c);
3045 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3046 if (real_identical (&c, &cint)
3047 && ((n >= -1 && n <= 2)
3048 || (flag_unsafe_math_optimizations
3049 && optimize_insn_for_speed_p ()
3050 && powi_cost (n) <= POWI_MAX_MULTS)))
3051 {
3052 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3053 if (n != 1)
3054 {
3055 op = force_reg (mode, op);
3056 op = expand_powi (op, mode, n);
3057 }
3058 return op;
3059 }
3060
3061 narg0 = builtin_save_expr (arg0);
3062
3063 /* If the exponent is not integer valued, check if it is half of an integer.
3064 In this case we can expand to sqrt (x) * x**(n/2). */
3065 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3066 if (fn != NULL_TREE)
3067 {
3068 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3069 n = real_to_integer (&c2);
3070 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3071 if (real_identical (&c2, &cint)
3072 && ((flag_unsafe_math_optimizations
3073 && optimize_insn_for_speed_p ()
3074 && powi_cost (n/2) <= POWI_MAX_MULTS)
3075 /* Even the c == 0.5 case cannot be done unconditionally
3076 when we need to preserve signed zeros, as
3077 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3078 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3079 /* For c == 1.5 we can assume that x * sqrt (x) is always
3080 smaller than pow (x, 1.5) if sqrt will not be expanded
3081 as a call. */
3082 || (n == 3
3083 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3084 {
3085 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3086 narg0);
3087 /* Use expand_expr in case the newly built call expression
3088 was folded to a non-call. */
3089 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3090 if (n != 1)
3091 {
3092 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3093 op2 = force_reg (mode, op2);
3094 op2 = expand_powi (op2, mode, abs (n / 2));
3095 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3096 0, OPTAB_LIB_WIDEN);
3097 /* If the original exponent was negative, reciprocate the
3098 result. */
3099 if (n < 0)
3100 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3101 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3102 }
3103 return op;
3104 }
3105 }
3106
3107 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3108 call. */
3109 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3110 subtarget);
3111 if (op)
3112 return op;
3113
3114 /* Try if the exponent is a third of an integer. In this case
3115 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3116 different from pow (x, 1./3.) due to rounding and behavior
3117 with negative x we need to constrain this transformation to
3118 unsafe math and positive x or finite math. */
3119 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3120 if (fn != NULL_TREE
3121 && flag_unsafe_math_optimizations
3122 && (tree_expr_nonnegative_p (arg0)
3123 || !HONOR_NANS (mode)))
3124 {
3125 REAL_VALUE_TYPE dconst3;
3126 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3127 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3128 real_round (&c2, mode, &c2);
3129 n = real_to_integer (&c2);
3130 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3131 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3132 real_convert (&c2, mode, &c2);
3133 if (real_identical (&c2, &c)
3134 && ((optimize_insn_for_speed_p ()
3135 && powi_cost (n/3) <= POWI_MAX_MULTS)
3136 || n == 1))
3137 {
3138 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3139 narg0);
3140 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3141 if (abs (n) % 3 == 2)
3142 op = expand_simple_binop (mode, MULT, op, op, op,
3143 0, OPTAB_LIB_WIDEN);
3144 if (n != 1)
3145 {
3146 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3147 op2 = force_reg (mode, op2);
3148 op2 = expand_powi (op2, mode, abs (n / 3));
3149 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3150 0, OPTAB_LIB_WIDEN);
3151 /* If the original exponent was negative, reciprocate the
3152 result. */
3153 if (n < 0)
3154 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3155 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3156 }
3157 return op;
3158 }
3159 }
3160
3161 /* Fall back to optab expansion. */
3162 return expand_builtin_mathfn_2 (exp, target, subtarget);
3163 }
3164
3165 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3166 a normal call should be emitted rather than expanding the function
3167 in-line. EXP is the expression that is a call to the builtin
3168 function; if convenient, the result should be placed in TARGET. */
3169
3170 static rtx
3171 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3172 {
3173 tree arg0, arg1;
3174 rtx op0, op1;
3175 enum machine_mode mode;
3176 enum machine_mode mode2;
3177
3178 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3179 return NULL_RTX;
3180
3181 arg0 = CALL_EXPR_ARG (exp, 0);
3182 arg1 = CALL_EXPR_ARG (exp, 1);
3183 mode = TYPE_MODE (TREE_TYPE (exp));
3184
3185 /* Handle constant power. */
3186
3187 if (TREE_CODE (arg1) == INTEGER_CST
3188 && !TREE_OVERFLOW (arg1))
3189 {
3190 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3191
3192 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3193 Otherwise, check the number of multiplications required. */
3194 if ((TREE_INT_CST_HIGH (arg1) == 0
3195 || TREE_INT_CST_HIGH (arg1) == -1)
3196 && ((n >= -1 && n <= 2)
3197 || (optimize_insn_for_speed_p ()
3198 && powi_cost (n) <= POWI_MAX_MULTS)))
3199 {
3200 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3201 op0 = force_reg (mode, op0);
3202 return expand_powi (op0, mode, n);
3203 }
3204 }
3205
3206 /* Emit a libcall to libgcc. */
3207
3208 /* Mode of the 2nd argument must match that of an int. */
3209 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3210
3211 if (target == NULL_RTX)
3212 target = gen_reg_rtx (mode);
3213
3214 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3215 if (GET_MODE (op0) != mode)
3216 op0 = convert_to_mode (mode, op0, 0);
3217 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3218 if (GET_MODE (op1) != mode2)
3219 op1 = convert_to_mode (mode2, op1, 0);
3220
3221 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3222 target, LCT_CONST, mode, 2,
3223 op0, mode, op1, mode2);
3224
3225 return target;
3226 }
3227
3228 /* Expand expression EXP which is a call to the strlen builtin. Return
3229 NULL_RTX if we failed the caller should emit a normal call, otherwise
3230 try to get the result in TARGET, if convenient. */
3231
3232 static rtx
3233 expand_builtin_strlen (tree exp, rtx target,
3234 enum machine_mode target_mode)
3235 {
3236 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3237 return NULL_RTX;
3238 else
3239 {
3240 rtx pat;
3241 tree len;
3242 tree src = CALL_EXPR_ARG (exp, 0);
3243 rtx result, src_reg, char_rtx, before_strlen;
3244 enum machine_mode insn_mode = target_mode, char_mode;
3245 enum insn_code icode = CODE_FOR_nothing;
3246 int align;
3247
3248 /* If the length can be computed at compile-time, return it. */
3249 len = c_strlen (src, 0);
3250 if (len)
3251 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3252
3253 /* If the length can be computed at compile-time and is constant
3254 integer, but there are side-effects in src, evaluate
3255 src for side-effects, then return len.
3256 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3257 can be optimized into: i++; x = 3; */
3258 len = c_strlen (src, 1);
3259 if (len && TREE_CODE (len) == INTEGER_CST)
3260 {
3261 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3262 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3263 }
3264
3265 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3266
3267 /* If SRC is not a pointer type, don't do this operation inline. */
3268 if (align == 0)
3269 return NULL_RTX;
3270
3271 /* Bail out if we can't compute strlen in the right mode. */
3272 while (insn_mode != VOIDmode)
3273 {
3274 icode = optab_handler (strlen_optab, insn_mode);
3275 if (icode != CODE_FOR_nothing)
3276 break;
3277
3278 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3279 }
3280 if (insn_mode == VOIDmode)
3281 return NULL_RTX;
3282
3283 /* Make a place to write the result of the instruction. */
3284 result = target;
3285 if (! (result != 0
3286 && REG_P (result)
3287 && GET_MODE (result) == insn_mode
3288 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3289 result = gen_reg_rtx (insn_mode);
3290
3291 /* Make a place to hold the source address. We will not expand
3292 the actual source until we are sure that the expansion will
3293 not fail -- there are trees that cannot be expanded twice. */
3294 src_reg = gen_reg_rtx (Pmode);
3295
3296 /* Mark the beginning of the strlen sequence so we can emit the
3297 source operand later. */
3298 before_strlen = get_last_insn ();
3299
3300 char_rtx = const0_rtx;
3301 char_mode = insn_data[(int) icode].operand[2].mode;
3302 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3303 char_mode))
3304 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3305
3306 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3307 char_rtx, GEN_INT (align));
3308 if (! pat)
3309 return NULL_RTX;
3310 emit_insn (pat);
3311
3312 /* Now that we are assured of success, expand the source. */
3313 start_sequence ();
3314 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3315 if (pat != src_reg)
3316 emit_move_insn (src_reg, pat);
3317 pat = get_insns ();
3318 end_sequence ();
3319
3320 if (before_strlen)
3321 emit_insn_after (pat, before_strlen);
3322 else
3323 emit_insn_before (pat, get_insns ());
3324
3325 /* Return the value in the proper mode for this function. */
3326 if (GET_MODE (result) == target_mode)
3327 target = result;
3328 else if (target != 0)
3329 convert_move (target, result, 0);
3330 else
3331 target = convert_to_mode (target_mode, result, 0);
3332
3333 return target;
3334 }
3335 }
3336
3337 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3338 bytes from constant string DATA + OFFSET and return it as target
3339 constant. */
3340
3341 static rtx
3342 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3343 enum machine_mode mode)
3344 {
3345 const char *str = (const char *) data;
3346
3347 gcc_assert (offset >= 0
3348 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3349 <= strlen (str) + 1));
3350
3351 return c_readstr (str + offset, mode);
3352 }
3353
3354 /* Expand a call EXP to the memcpy builtin.
3355 Return NULL_RTX if we failed, the caller should emit a normal call,
3356 otherwise try to get the result in TARGET, if convenient (and in
3357 mode MODE if that's convenient). */
3358
3359 static rtx
3360 expand_builtin_memcpy (tree exp, rtx target)
3361 {
3362 if (!validate_arglist (exp,
3363 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3364 return NULL_RTX;
3365 else
3366 {
3367 tree dest = CALL_EXPR_ARG (exp, 0);
3368 tree src = CALL_EXPR_ARG (exp, 1);
3369 tree len = CALL_EXPR_ARG (exp, 2);
3370 const char *src_str;
3371 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3372 unsigned int dest_align
3373 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3374 rtx dest_mem, src_mem, dest_addr, len_rtx;
3375 HOST_WIDE_INT expected_size = -1;
3376 unsigned int expected_align = 0;
3377
3378 /* If DEST is not a pointer type, call the normal function. */
3379 if (dest_align == 0)
3380 return NULL_RTX;
3381
3382 /* If either SRC is not a pointer type, don't do this
3383 operation in-line. */
3384 if (src_align == 0)
3385 return NULL_RTX;
3386
3387 if (currently_expanding_gimple_stmt)
3388 stringop_block_profile (currently_expanding_gimple_stmt,
3389 &expected_align, &expected_size);
3390
3391 if (expected_align < dest_align)
3392 expected_align = dest_align;
3393 dest_mem = get_memory_rtx (dest, len);
3394 set_mem_align (dest_mem, dest_align);
3395 len_rtx = expand_normal (len);
3396 src_str = c_getstr (src);
3397
3398 /* If SRC is a string constant and block move would be done
3399 by pieces, we can avoid loading the string from memory
3400 and only stored the computed constants. */
3401 if (src_str
3402 && CONST_INT_P (len_rtx)
3403 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3404 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3405 CONST_CAST (char *, src_str),
3406 dest_align, false))
3407 {
3408 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3409 builtin_memcpy_read_str,
3410 CONST_CAST (char *, src_str),
3411 dest_align, false, 0);
3412 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3413 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3414 return dest_mem;
3415 }
3416
3417 src_mem = get_memory_rtx (src, len);
3418 set_mem_align (src_mem, src_align);
3419
3420 /* Copy word part most expediently. */
3421 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3422 CALL_EXPR_TAILCALL (exp)
3423 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3424 expected_align, expected_size);
3425
3426 if (dest_addr == 0)
3427 {
3428 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3429 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3430 }
3431 return dest_addr;
3432 }
3433 }
3434
3435 /* Expand a call EXP to the mempcpy builtin.
3436 Return NULL_RTX if we failed; the caller should emit a normal call,
3437 otherwise try to get the result in TARGET, if convenient (and in
3438 mode MODE if that's convenient). If ENDP is 0 return the
3439 destination pointer, if ENDP is 1 return the end pointer ala
3440 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3441 stpcpy. */
3442
3443 static rtx
3444 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3445 {
3446 if (!validate_arglist (exp,
3447 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3448 return NULL_RTX;
3449 else
3450 {
3451 tree dest = CALL_EXPR_ARG (exp, 0);
3452 tree src = CALL_EXPR_ARG (exp, 1);
3453 tree len = CALL_EXPR_ARG (exp, 2);
3454 return expand_builtin_mempcpy_args (dest, src, len,
3455 target, mode, /*endp=*/ 1);
3456 }
3457 }
3458
3459 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3460 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3461 so that this can also be called without constructing an actual CALL_EXPR.
3462 The other arguments and return value are the same as for
3463 expand_builtin_mempcpy. */
3464
3465 static rtx
3466 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3467 rtx target, enum machine_mode mode, int endp)
3468 {
3469 /* If return value is ignored, transform mempcpy into memcpy. */
3470 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3471 {
3472 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3473 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3474 dest, src, len);
3475 return expand_expr (result, target, mode, EXPAND_NORMAL);
3476 }
3477 else
3478 {
3479 const char *src_str;
3480 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3481 unsigned int dest_align
3482 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3483 rtx dest_mem, src_mem, len_rtx;
3484
3485 /* If either SRC or DEST is not a pointer type, don't do this
3486 operation in-line. */
3487 if (dest_align == 0 || src_align == 0)
3488 return NULL_RTX;
3489
3490 /* If LEN is not constant, call the normal function. */
3491 if (! host_integerp (len, 1))
3492 return NULL_RTX;
3493
3494 len_rtx = expand_normal (len);
3495 src_str = c_getstr (src);
3496
3497 /* If SRC is a string constant and block move would be done
3498 by pieces, we can avoid loading the string from memory
3499 and only stored the computed constants. */
3500 if (src_str
3501 && CONST_INT_P (len_rtx)
3502 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3503 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3504 CONST_CAST (char *, src_str),
3505 dest_align, false))
3506 {
3507 dest_mem = get_memory_rtx (dest, len);
3508 set_mem_align (dest_mem, dest_align);
3509 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3510 builtin_memcpy_read_str,
3511 CONST_CAST (char *, src_str),
3512 dest_align, false, endp);
3513 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3514 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3515 return dest_mem;
3516 }
3517
3518 if (CONST_INT_P (len_rtx)
3519 && can_move_by_pieces (INTVAL (len_rtx),
3520 MIN (dest_align, src_align)))
3521 {
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 src_mem = get_memory_rtx (src, len);
3525 set_mem_align (src_mem, src_align);
3526 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3527 MIN (dest_align, src_align), endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3530 return dest_mem;
3531 }
3532
3533 return NULL_RTX;
3534 }
3535 }
3536
3537 #ifndef HAVE_movstr
3538 # define HAVE_movstr 0
3539 # define CODE_FOR_movstr CODE_FOR_nothing
3540 #endif
3541
3542 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3543 we failed, the caller should emit a normal call, otherwise try to
3544 get the result in TARGET, if convenient. If ENDP is 0 return the
3545 destination pointer, if ENDP is 1 return the end pointer ala
3546 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3547 stpcpy. */
3548
3549 static rtx
3550 expand_movstr (tree dest, tree src, rtx target, int endp)
3551 {
3552 rtx end;
3553 rtx dest_mem;
3554 rtx src_mem;
3555 rtx insn;
3556 const struct insn_data_d * data;
3557
3558 if (!HAVE_movstr)
3559 return NULL_RTX;
3560
3561 dest_mem = get_memory_rtx (dest, NULL);
3562 src_mem = get_memory_rtx (src, NULL);
3563 data = insn_data + CODE_FOR_movstr;
3564 if (!endp)
3565 {
3566 target = force_reg (Pmode, XEXP (dest_mem, 0));
3567 dest_mem = replace_equiv_address (dest_mem, target);
3568 end = gen_reg_rtx (Pmode);
3569 }
3570 else
3571 {
3572 if (target == 0
3573 || target == const0_rtx
3574 || ! (*data->operand[0].predicate) (target, Pmode))
3575 {
3576 end = gen_reg_rtx (Pmode);
3577 if (target != const0_rtx)
3578 target = end;
3579 }
3580 else
3581 end = target;
3582 }
3583
3584 if (data->operand[0].mode != VOIDmode)
3585 end = gen_lowpart (data->operand[0].mode, end);
3586
3587 insn = data->genfun (end, dest_mem, src_mem);
3588
3589 gcc_assert (insn);
3590
3591 emit_insn (insn);
3592
3593 /* movstr is supposed to set end to the address of the NUL
3594 terminator. If the caller requested a mempcpy-like return value,
3595 adjust it. */
3596 if (endp == 1 && target != const0_rtx)
3597 {
3598 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3599 emit_move_insn (target, force_operand (tem, NULL_RTX));
3600 }
3601
3602 return target;
3603 }
3604
3605 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3606 NULL_RTX if we failed the caller should emit a normal call, otherwise
3607 try to get the result in TARGET, if convenient (and in mode MODE if that's
3608 convenient). */
3609
3610 static rtx
3611 expand_builtin_strcpy (tree exp, rtx target)
3612 {
3613 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3614 {
3615 tree dest = CALL_EXPR_ARG (exp, 0);
3616 tree src = CALL_EXPR_ARG (exp, 1);
3617 return expand_builtin_strcpy_args (dest, src, target);
3618 }
3619 return NULL_RTX;
3620 }
3621
3622 /* Helper function to do the actual work for expand_builtin_strcpy. The
3623 arguments to the builtin_strcpy call DEST and SRC are broken out
3624 so that this can also be called without constructing an actual CALL_EXPR.
3625 The other arguments and return value are the same as for
3626 expand_builtin_strcpy. */
3627
3628 static rtx
3629 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3630 {
3631 return expand_movstr (dest, src, target, /*endp=*/0);
3632 }
3633
3634 /* Expand a call EXP to the stpcpy builtin.
3635 Return NULL_RTX if we failed the caller should emit a normal call,
3636 otherwise try to get the result in TARGET, if convenient (and in
3637 mode MODE if that's convenient). */
3638
3639 static rtx
3640 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3641 {
3642 tree dst, src;
3643 location_t loc = EXPR_LOCATION (exp);
3644
3645 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3646 return NULL_RTX;
3647
3648 dst = CALL_EXPR_ARG (exp, 0);
3649 src = CALL_EXPR_ARG (exp, 1);
3650
3651 /* If return value is ignored, transform stpcpy into strcpy. */
3652 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3653 {
3654 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3655 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3656 return expand_expr (result, target, mode, EXPAND_NORMAL);
3657 }
3658 else
3659 {
3660 tree len, lenp1;
3661 rtx ret;
3662
3663 /* Ensure we get an actual string whose length can be evaluated at
3664 compile-time, not an expression containing a string. This is
3665 because the latter will potentially produce pessimized code
3666 when used to produce the return value. */
3667 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3668 return expand_movstr (dst, src, target, /*endp=*/2);
3669
3670 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3671 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3672 target, mode, /*endp=*/2);
3673
3674 if (ret)
3675 return ret;
3676
3677 if (TREE_CODE (len) == INTEGER_CST)
3678 {
3679 rtx len_rtx = expand_normal (len);
3680
3681 if (CONST_INT_P (len_rtx))
3682 {
3683 ret = expand_builtin_strcpy_args (dst, src, target);
3684
3685 if (ret)
3686 {
3687 if (! target)
3688 {
3689 if (mode != VOIDmode)
3690 target = gen_reg_rtx (mode);
3691 else
3692 target = gen_reg_rtx (GET_MODE (ret));
3693 }
3694 if (GET_MODE (target) != GET_MODE (ret))
3695 ret = gen_lowpart (GET_MODE (target), ret);
3696
3697 ret = plus_constant (ret, INTVAL (len_rtx));
3698 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3699 gcc_assert (ret);
3700
3701 return target;
3702 }
3703 }
3704 }
3705
3706 return expand_movstr (dst, src, target, /*endp=*/2);
3707 }
3708 }
3709
3710 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3711 bytes from constant string DATA + OFFSET and return it as target
3712 constant. */
3713
3714 rtx
3715 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3716 enum machine_mode mode)
3717 {
3718 const char *str = (const char *) data;
3719
3720 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3721 return const0_rtx;
3722
3723 return c_readstr (str + offset, mode);
3724 }
3725
3726 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3727 NULL_RTX if we failed the caller should emit a normal call. */
3728
3729 static rtx
3730 expand_builtin_strncpy (tree exp, rtx target)
3731 {
3732 location_t loc = EXPR_LOCATION (exp);
3733
3734 if (validate_arglist (exp,
3735 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3736 {
3737 tree dest = CALL_EXPR_ARG (exp, 0);
3738 tree src = CALL_EXPR_ARG (exp, 1);
3739 tree len = CALL_EXPR_ARG (exp, 2);
3740 tree slen = c_strlen (src, 1);
3741
3742 /* We must be passed a constant len and src parameter. */
3743 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3744 return NULL_RTX;
3745
3746 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3747
3748 /* We're required to pad with trailing zeros if the requested
3749 len is greater than strlen(s2)+1. In that case try to
3750 use store_by_pieces, if it fails, punt. */
3751 if (tree_int_cst_lt (slen, len))
3752 {
3753 unsigned int dest_align
3754 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3755 const char *p = c_getstr (src);
3756 rtx dest_mem;
3757
3758 if (!p || dest_align == 0 || !host_integerp (len, 1)
3759 || !can_store_by_pieces (tree_low_cst (len, 1),
3760 builtin_strncpy_read_str,
3761 CONST_CAST (char *, p),
3762 dest_align, false))
3763 return NULL_RTX;
3764
3765 dest_mem = get_memory_rtx (dest, len);
3766 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3767 builtin_strncpy_read_str,
3768 CONST_CAST (char *, p), dest_align, false, 0);
3769 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3770 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3771 return dest_mem;
3772 }
3773 }
3774 return NULL_RTX;
3775 }
3776
3777 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3778 bytes from constant string DATA + OFFSET and return it as target
3779 constant. */
3780
3781 rtx
3782 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3783 enum machine_mode mode)
3784 {
3785 const char *c = (const char *) data;
3786 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3787
3788 memset (p, *c, GET_MODE_SIZE (mode));
3789
3790 return c_readstr (p, mode);
3791 }
3792
3793 /* Callback routine for store_by_pieces. Return the RTL of a register
3794 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3795 char value given in the RTL register data. For example, if mode is
3796 4 bytes wide, return the RTL for 0x01010101*data. */
3797
3798 static rtx
3799 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3800 enum machine_mode mode)
3801 {
3802 rtx target, coeff;
3803 size_t size;
3804 char *p;
3805
3806 size = GET_MODE_SIZE (mode);
3807 if (size == 1)
3808 return (rtx) data;
3809
3810 p = XALLOCAVEC (char, size);
3811 memset (p, 1, size);
3812 coeff = c_readstr (p, mode);
3813
3814 target = convert_to_mode (mode, (rtx) data, 1);
3815 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3816 return force_reg (mode, target);
3817 }
3818
3819 /* Expand expression EXP, which is a call to the memset builtin. Return
3820 NULL_RTX if we failed the caller should emit a normal call, otherwise
3821 try to get the result in TARGET, if convenient (and in mode MODE if that's
3822 convenient). */
3823
3824 static rtx
3825 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3826 {
3827 if (!validate_arglist (exp,
3828 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3829 return NULL_RTX;
3830 else
3831 {
3832 tree dest = CALL_EXPR_ARG (exp, 0);
3833 tree val = CALL_EXPR_ARG (exp, 1);
3834 tree len = CALL_EXPR_ARG (exp, 2);
3835 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3836 }
3837 }
3838
3839 /* Helper function to do the actual work for expand_builtin_memset. The
3840 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3841 so that this can also be called without constructing an actual CALL_EXPR.
3842 The other arguments and return value are the same as for
3843 expand_builtin_memset. */
3844
3845 static rtx
3846 expand_builtin_memset_args (tree dest, tree val, tree len,
3847 rtx target, enum machine_mode mode, tree orig_exp)
3848 {
3849 tree fndecl, fn;
3850 enum built_in_function fcode;
3851 char c;
3852 unsigned int dest_align;
3853 rtx dest_mem, dest_addr, len_rtx;
3854 HOST_WIDE_INT expected_size = -1;
3855 unsigned int expected_align = 0;
3856
3857 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3858
3859 /* If DEST is not a pointer type, don't do this operation in-line. */
3860 if (dest_align == 0)
3861 return NULL_RTX;
3862
3863 if (currently_expanding_gimple_stmt)
3864 stringop_block_profile (currently_expanding_gimple_stmt,
3865 &expected_align, &expected_size);
3866
3867 if (expected_align < dest_align)
3868 expected_align = dest_align;
3869
3870 /* If the LEN parameter is zero, return DEST. */
3871 if (integer_zerop (len))
3872 {
3873 /* Evaluate and ignore VAL in case it has side-effects. */
3874 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3875 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3876 }
3877
3878 /* Stabilize the arguments in case we fail. */
3879 dest = builtin_save_expr (dest);
3880 val = builtin_save_expr (val);
3881 len = builtin_save_expr (len);
3882
3883 len_rtx = expand_normal (len);
3884 dest_mem = get_memory_rtx (dest, len);
3885
3886 if (TREE_CODE (val) != INTEGER_CST)
3887 {
3888 rtx val_rtx;
3889
3890 val_rtx = expand_normal (val);
3891 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3892 val_rtx, 0);
3893
3894 /* Assume that we can memset by pieces if we can store
3895 * the coefficients by pieces (in the required modes).
3896 * We can't pass builtin_memset_gen_str as that emits RTL. */
3897 c = 1;
3898 if (host_integerp (len, 1)
3899 && can_store_by_pieces (tree_low_cst (len, 1),
3900 builtin_memset_read_str, &c, dest_align,
3901 true))
3902 {
3903 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3904 val_rtx);
3905 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3906 builtin_memset_gen_str, val_rtx, dest_align,
3907 true, 0);
3908 }
3909 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3910 dest_align, expected_align,
3911 expected_size))
3912 goto do_libcall;
3913
3914 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3915 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3916 return dest_mem;
3917 }
3918
3919 if (target_char_cast (val, &c))
3920 goto do_libcall;
3921
3922 if (c)
3923 {
3924 if (host_integerp (len, 1)
3925 && can_store_by_pieces (tree_low_cst (len, 1),
3926 builtin_memset_read_str, &c, dest_align,
3927 true))
3928 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3929 builtin_memset_read_str, &c, dest_align, true, 0);
3930 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3931 dest_align, expected_align,
3932 expected_size))
3933 goto do_libcall;
3934
3935 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3936 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3937 return dest_mem;
3938 }
3939
3940 set_mem_align (dest_mem, dest_align);
3941 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3942 CALL_EXPR_TAILCALL (orig_exp)
3943 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3944 expected_align, expected_size);
3945
3946 if (dest_addr == 0)
3947 {
3948 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3949 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3950 }
3951
3952 return dest_addr;
3953
3954 do_libcall:
3955 fndecl = get_callee_fndecl (orig_exp);
3956 fcode = DECL_FUNCTION_CODE (fndecl);
3957 if (fcode == BUILT_IN_MEMSET)
3958 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3959 dest, val, len);
3960 else if (fcode == BUILT_IN_BZERO)
3961 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3962 dest, len);
3963 else
3964 gcc_unreachable ();
3965 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3966 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3967 return expand_call (fn, target, target == const0_rtx);
3968 }
3969
3970 /* Expand expression EXP, which is a call to the bzero builtin. Return
3971 NULL_RTX if we failed the caller should emit a normal call. */
3972
3973 static rtx
3974 expand_builtin_bzero (tree exp)
3975 {
3976 tree dest, size;
3977 location_t loc = EXPR_LOCATION (exp);
3978
3979 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3980 return NULL_RTX;
3981
3982 dest = CALL_EXPR_ARG (exp, 0);
3983 size = CALL_EXPR_ARG (exp, 1);
3984
3985 /* New argument list transforming bzero(ptr x, int y) to
3986 memset(ptr x, int 0, size_t y). This is done this way
3987 so that if it isn't expanded inline, we fallback to
3988 calling bzero instead of memset. */
3989
3990 return expand_builtin_memset_args (dest, integer_zero_node,
3991 fold_convert_loc (loc, sizetype, size),
3992 const0_rtx, VOIDmode, exp);
3993 }
3994
3995 /* Expand expression EXP, which is a call to the memcmp built-in function.
3996 Return NULL_RTX if we failed and the
3997 caller should emit a normal call, otherwise try to get the result in
3998 TARGET, if convenient (and in mode MODE, if that's convenient). */
3999
4000 static rtx
4001 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4002 ATTRIBUTE_UNUSED enum machine_mode mode)
4003 {
4004 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4005
4006 if (!validate_arglist (exp,
4007 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4008 return NULL_RTX;
4009
4010 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4011 {
4012 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4013 rtx result;
4014 rtx insn;
4015 tree arg1 = CALL_EXPR_ARG (exp, 0);
4016 tree arg2 = CALL_EXPR_ARG (exp, 1);
4017 tree len = CALL_EXPR_ARG (exp, 2);
4018
4019 int arg1_align
4020 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4021 int arg2_align
4022 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4023 enum machine_mode insn_mode;
4024
4025 #ifdef HAVE_cmpmemsi
4026 if (HAVE_cmpmemsi)
4027 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4028 else
4029 #endif
4030 #ifdef HAVE_cmpstrnsi
4031 if (HAVE_cmpstrnsi)
4032 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4033 else
4034 #endif
4035 return NULL_RTX;
4036
4037 /* If we don't have POINTER_TYPE, call the function. */
4038 if (arg1_align == 0 || arg2_align == 0)
4039 return NULL_RTX;
4040
4041 /* Make a place to write the result of the instruction. */
4042 result = target;
4043 if (! (result != 0
4044 && REG_P (result) && GET_MODE (result) == insn_mode
4045 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4046 result = gen_reg_rtx (insn_mode);
4047
4048 arg1_rtx = get_memory_rtx (arg1, len);
4049 arg2_rtx = get_memory_rtx (arg2, len);
4050 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4051
4052 /* Set MEM_SIZE as appropriate. */
4053 if (CONST_INT_P (arg3_rtx))
4054 {
4055 set_mem_size (arg1_rtx, arg3_rtx);
4056 set_mem_size (arg2_rtx, arg3_rtx);
4057 }
4058
4059 #ifdef HAVE_cmpmemsi
4060 if (HAVE_cmpmemsi)
4061 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4062 GEN_INT (MIN (arg1_align, arg2_align)));
4063 else
4064 #endif
4065 #ifdef HAVE_cmpstrnsi
4066 if (HAVE_cmpstrnsi)
4067 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4068 GEN_INT (MIN (arg1_align, arg2_align)));
4069 else
4070 #endif
4071 gcc_unreachable ();
4072
4073 if (insn)
4074 emit_insn (insn);
4075 else
4076 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4077 TYPE_MODE (integer_type_node), 3,
4078 XEXP (arg1_rtx, 0), Pmode,
4079 XEXP (arg2_rtx, 0), Pmode,
4080 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4081 TYPE_UNSIGNED (sizetype)),
4082 TYPE_MODE (sizetype));
4083
4084 /* Return the value in the proper mode for this function. */
4085 mode = TYPE_MODE (TREE_TYPE (exp));
4086 if (GET_MODE (result) == mode)
4087 return result;
4088 else if (target != 0)
4089 {
4090 convert_move (target, result, 0);
4091 return target;
4092 }
4093 else
4094 return convert_to_mode (mode, result, 0);
4095 }
4096 #endif
4097
4098 return NULL_RTX;
4099 }
4100
4101 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4102 if we failed the caller should emit a normal call, otherwise try to get
4103 the result in TARGET, if convenient. */
4104
4105 static rtx
4106 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4107 {
4108 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4109 return NULL_RTX;
4110
4111 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4112 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4113 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4114 {
4115 rtx arg1_rtx, arg2_rtx;
4116 rtx result, insn = NULL_RTX;
4117 tree fndecl, fn;
4118 tree arg1 = CALL_EXPR_ARG (exp, 0);
4119 tree arg2 = CALL_EXPR_ARG (exp, 1);
4120
4121 int arg1_align
4122 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4123 int arg2_align
4124 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4125
4126 /* If we don't have POINTER_TYPE, call the function. */
4127 if (arg1_align == 0 || arg2_align == 0)
4128 return NULL_RTX;
4129
4130 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4131 arg1 = builtin_save_expr (arg1);
4132 arg2 = builtin_save_expr (arg2);
4133
4134 arg1_rtx = get_memory_rtx (arg1, NULL);
4135 arg2_rtx = get_memory_rtx (arg2, NULL);
4136
4137 #ifdef HAVE_cmpstrsi
4138 /* Try to call cmpstrsi. */
4139 if (HAVE_cmpstrsi)
4140 {
4141 enum machine_mode insn_mode
4142 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4143
4144 /* Make a place to write the result of the instruction. */
4145 result = target;
4146 if (! (result != 0
4147 && REG_P (result) && GET_MODE (result) == insn_mode
4148 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4149 result = gen_reg_rtx (insn_mode);
4150
4151 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4152 GEN_INT (MIN (arg1_align, arg2_align)));
4153 }
4154 #endif
4155 #ifdef HAVE_cmpstrnsi
4156 /* Try to determine at least one length and call cmpstrnsi. */
4157 if (!insn && HAVE_cmpstrnsi)
4158 {
4159 tree len;
4160 rtx arg3_rtx;
4161
4162 enum machine_mode insn_mode
4163 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4164 tree len1 = c_strlen (arg1, 1);
4165 tree len2 = c_strlen (arg2, 1);
4166
4167 if (len1)
4168 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4169 if (len2)
4170 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4171
4172 /* If we don't have a constant length for the first, use the length
4173 of the second, if we know it. We don't require a constant for
4174 this case; some cost analysis could be done if both are available
4175 but neither is constant. For now, assume they're equally cheap,
4176 unless one has side effects. If both strings have constant lengths,
4177 use the smaller. */
4178
4179 if (!len1)
4180 len = len2;
4181 else if (!len2)
4182 len = len1;
4183 else if (TREE_SIDE_EFFECTS (len1))
4184 len = len2;
4185 else if (TREE_SIDE_EFFECTS (len2))
4186 len = len1;
4187 else if (TREE_CODE (len1) != INTEGER_CST)
4188 len = len2;
4189 else if (TREE_CODE (len2) != INTEGER_CST)
4190 len = len1;
4191 else if (tree_int_cst_lt (len1, len2))
4192 len = len1;
4193 else
4194 len = len2;
4195
4196 /* If both arguments have side effects, we cannot optimize. */
4197 if (!len || TREE_SIDE_EFFECTS (len))
4198 goto do_libcall;
4199
4200 arg3_rtx = expand_normal (len);
4201
4202 /* Make a place to write the result of the instruction. */
4203 result = target;
4204 if (! (result != 0
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4208
4209 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4210 GEN_INT (MIN (arg1_align, arg2_align)));
4211 }
4212 #endif
4213
4214 if (insn)
4215 {
4216 enum machine_mode mode;
4217 emit_insn (insn);
4218
4219 /* Return the value in the proper mode for this function. */
4220 mode = TYPE_MODE (TREE_TYPE (exp));
4221 if (GET_MODE (result) == mode)
4222 return result;
4223 if (target == 0)
4224 return convert_to_mode (mode, result, 0);
4225 convert_move (target, result, 0);
4226 return target;
4227 }
4228
4229 /* Expand the library call ourselves using a stabilized argument
4230 list to avoid re-evaluating the function's arguments twice. */
4231 #ifdef HAVE_cmpstrnsi
4232 do_libcall:
4233 #endif
4234 fndecl = get_callee_fndecl (exp);
4235 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4236 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4237 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4238 return expand_call (fn, target, target == const0_rtx);
4239 }
4240 #endif
4241 return NULL_RTX;
4242 }
4243
4244 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4245 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4246 the result in TARGET, if convenient. */
4247
4248 static rtx
4249 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4250 ATTRIBUTE_UNUSED enum machine_mode mode)
4251 {
4252 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4253
4254 if (!validate_arglist (exp,
4255 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4256 return NULL_RTX;
4257
4258 /* If c_strlen can determine an expression for one of the string
4259 lengths, and it doesn't have side effects, then emit cmpstrnsi
4260 using length MIN(strlen(string)+1, arg3). */
4261 #ifdef HAVE_cmpstrnsi
4262 if (HAVE_cmpstrnsi)
4263 {
4264 tree len, len1, len2;
4265 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4266 rtx result, insn;
4267 tree fndecl, fn;
4268 tree arg1 = CALL_EXPR_ARG (exp, 0);
4269 tree arg2 = CALL_EXPR_ARG (exp, 1);
4270 tree arg3 = CALL_EXPR_ARG (exp, 2);
4271
4272 int arg1_align
4273 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4274 int arg2_align
4275 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4276 enum machine_mode insn_mode
4277 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4278
4279 len1 = c_strlen (arg1, 1);
4280 len2 = c_strlen (arg2, 1);
4281
4282 if (len1)
4283 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4284 if (len2)
4285 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4286
4287 /* If we don't have a constant length for the first, use the length
4288 of the second, if we know it. We don't require a constant for
4289 this case; some cost analysis could be done if both are available
4290 but neither is constant. For now, assume they're equally cheap,
4291 unless one has side effects. If both strings have constant lengths,
4292 use the smaller. */
4293
4294 if (!len1)
4295 len = len2;
4296 else if (!len2)
4297 len = len1;
4298 else if (TREE_SIDE_EFFECTS (len1))
4299 len = len2;
4300 else if (TREE_SIDE_EFFECTS (len2))
4301 len = len1;
4302 else if (TREE_CODE (len1) != INTEGER_CST)
4303 len = len2;
4304 else if (TREE_CODE (len2) != INTEGER_CST)
4305 len = len1;
4306 else if (tree_int_cst_lt (len1, len2))
4307 len = len1;
4308 else
4309 len = len2;
4310
4311 /* If both arguments have side effects, we cannot optimize. */
4312 if (!len || TREE_SIDE_EFFECTS (len))
4313 return NULL_RTX;
4314
4315 /* The actual new length parameter is MIN(len,arg3). */
4316 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4317 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4318
4319 /* If we don't have POINTER_TYPE, call the function. */
4320 if (arg1_align == 0 || arg2_align == 0)
4321 return NULL_RTX;
4322
4323 /* Make a place to write the result of the instruction. */
4324 result = target;
4325 if (! (result != 0
4326 && REG_P (result) && GET_MODE (result) == insn_mode
4327 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4328 result = gen_reg_rtx (insn_mode);
4329
4330 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4331 arg1 = builtin_save_expr (arg1);
4332 arg2 = builtin_save_expr (arg2);
4333 len = builtin_save_expr (len);
4334
4335 arg1_rtx = get_memory_rtx (arg1, len);
4336 arg2_rtx = get_memory_rtx (arg2, len);
4337 arg3_rtx = expand_normal (len);
4338 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4339 GEN_INT (MIN (arg1_align, arg2_align)));
4340 if (insn)
4341 {
4342 emit_insn (insn);
4343
4344 /* Return the value in the proper mode for this function. */
4345 mode = TYPE_MODE (TREE_TYPE (exp));
4346 if (GET_MODE (result) == mode)
4347 return result;
4348 if (target == 0)
4349 return convert_to_mode (mode, result, 0);
4350 convert_move (target, result, 0);
4351 return target;
4352 }
4353
4354 /* Expand the library call ourselves using a stabilized argument
4355 list to avoid re-evaluating the function's arguments twice. */
4356 fndecl = get_callee_fndecl (exp);
4357 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4358 arg1, arg2, len);
4359 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4360 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4361 return expand_call (fn, target, target == const0_rtx);
4362 }
4363 #endif
4364 return NULL_RTX;
4365 }
4366
4367 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4368 if that's convenient. */
4369
4370 rtx
4371 expand_builtin_saveregs (void)
4372 {
4373 rtx val, seq;
4374
4375 /* Don't do __builtin_saveregs more than once in a function.
4376 Save the result of the first call and reuse it. */
4377 if (saveregs_value != 0)
4378 return saveregs_value;
4379
4380 /* When this function is called, it means that registers must be
4381 saved on entry to this function. So we migrate the call to the
4382 first insn of this function. */
4383
4384 start_sequence ();
4385
4386 /* Do whatever the machine needs done in this case. */
4387 val = targetm.calls.expand_builtin_saveregs ();
4388
4389 seq = get_insns ();
4390 end_sequence ();
4391
4392 saveregs_value = val;
4393
4394 /* Put the insns after the NOTE that starts the function. If this
4395 is inside a start_sequence, make the outer-level insn chain current, so
4396 the code is placed at the start of the function. */
4397 push_topmost_sequence ();
4398 emit_insn_after (seq, entry_of_function ());
4399 pop_topmost_sequence ();
4400
4401 return val;
4402 }
4403
4404 /* __builtin_args_info (N) returns word N of the arg space info
4405 for the current function. The number and meanings of words
4406 is controlled by the definition of CUMULATIVE_ARGS. */
4407
4408 static rtx
4409 expand_builtin_args_info (tree exp)
4410 {
4411 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4412 int *word_ptr = (int *) &crtl->args.info;
4413
4414 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4415
4416 if (call_expr_nargs (exp) != 0)
4417 {
4418 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4419 error ("argument of %<__builtin_args_info%> must be constant");
4420 else
4421 {
4422 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4423
4424 if (wordnum < 0 || wordnum >= nwords)
4425 error ("argument of %<__builtin_args_info%> out of range");
4426 else
4427 return GEN_INT (word_ptr[wordnum]);
4428 }
4429 }
4430 else
4431 error ("missing argument in %<__builtin_args_info%>");
4432
4433 return const0_rtx;
4434 }
4435
4436 /* Expand a call to __builtin_next_arg. */
4437
4438 static rtx
4439 expand_builtin_next_arg (void)
4440 {
4441 /* Checking arguments is already done in fold_builtin_next_arg
4442 that must be called before this function. */
4443 return expand_binop (ptr_mode, add_optab,
4444 crtl->args.internal_arg_pointer,
4445 crtl->args.arg_offset_rtx,
4446 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4447 }
4448
4449 /* Make it easier for the backends by protecting the valist argument
4450 from multiple evaluations. */
4451
4452 static tree
4453 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4454 {
4455 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4456
4457 /* The current way of determining the type of valist is completely
4458 bogus. We should have the information on the va builtin instead. */
4459 if (!vatype)
4460 vatype = targetm.fn_abi_va_list (cfun->decl);
4461
4462 if (TREE_CODE (vatype) == ARRAY_TYPE)
4463 {
4464 if (TREE_SIDE_EFFECTS (valist))
4465 valist = save_expr (valist);
4466
4467 /* For this case, the backends will be expecting a pointer to
4468 vatype, but it's possible we've actually been given an array
4469 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4470 So fix it. */
4471 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4472 {
4473 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4474 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4475 }
4476 }
4477 else
4478 {
4479 tree pt = build_pointer_type (vatype);
4480
4481 if (! needs_lvalue)
4482 {
4483 if (! TREE_SIDE_EFFECTS (valist))
4484 return valist;
4485
4486 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4487 TREE_SIDE_EFFECTS (valist) = 1;
4488 }
4489
4490 if (TREE_SIDE_EFFECTS (valist))
4491 valist = save_expr (valist);
4492 valist = fold_build2_loc (loc, MEM_REF,
4493 vatype, valist, build_int_cst (pt, 0));
4494 }
4495
4496 return valist;
4497 }
4498
4499 /* The "standard" definition of va_list is void*. */
4500
4501 tree
4502 std_build_builtin_va_list (void)
4503 {
4504 return ptr_type_node;
4505 }
4506
4507 /* The "standard" abi va_list is va_list_type_node. */
4508
4509 tree
4510 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4511 {
4512 return va_list_type_node;
4513 }
4514
4515 /* The "standard" type of va_list is va_list_type_node. */
4516
4517 tree
4518 std_canonical_va_list_type (tree type)
4519 {
4520 tree wtype, htype;
4521
4522 if (INDIRECT_REF_P (type))
4523 type = TREE_TYPE (type);
4524 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4525 type = TREE_TYPE (type);
4526 wtype = va_list_type_node;
4527 htype = type;
4528 /* Treat structure va_list types. */
4529 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4530 htype = TREE_TYPE (htype);
4531 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4532 {
4533 /* If va_list is an array type, the argument may have decayed
4534 to a pointer type, e.g. by being passed to another function.
4535 In that case, unwrap both types so that we can compare the
4536 underlying records. */
4537 if (TREE_CODE (htype) == ARRAY_TYPE
4538 || POINTER_TYPE_P (htype))
4539 {
4540 wtype = TREE_TYPE (wtype);
4541 htype = TREE_TYPE (htype);
4542 }
4543 }
4544 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4545 return va_list_type_node;
4546
4547 return NULL_TREE;
4548 }
4549
4550 /* The "standard" implementation of va_start: just assign `nextarg' to
4551 the variable. */
4552
4553 void
4554 std_expand_builtin_va_start (tree valist, rtx nextarg)
4555 {
4556 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4557 convert_move (va_r, nextarg, 0);
4558 }
4559
4560 /* Expand EXP, a call to __builtin_va_start. */
4561
4562 static rtx
4563 expand_builtin_va_start (tree exp)
4564 {
4565 rtx nextarg;
4566 tree valist;
4567 location_t loc = EXPR_LOCATION (exp);
4568
4569 if (call_expr_nargs (exp) < 2)
4570 {
4571 error_at (loc, "too few arguments to function %<va_start%>");
4572 return const0_rtx;
4573 }
4574
4575 if (fold_builtin_next_arg (exp, true))
4576 return const0_rtx;
4577
4578 nextarg = expand_builtin_next_arg ();
4579 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4580
4581 if (targetm.expand_builtin_va_start)
4582 targetm.expand_builtin_va_start (valist, nextarg);
4583 else
4584 std_expand_builtin_va_start (valist, nextarg);
4585
4586 return const0_rtx;
4587 }
4588
4589 /* The "standard" implementation of va_arg: read the value from the
4590 current (padded) address and increment by the (padded) size. */
4591
4592 tree
4593 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4594 gimple_seq *post_p)
4595 {
4596 tree addr, t, type_size, rounded_size, valist_tmp;
4597 unsigned HOST_WIDE_INT align, boundary;
4598 bool indirect;
4599
4600 #ifdef ARGS_GROW_DOWNWARD
4601 /* All of the alignment and movement below is for args-grow-up machines.
4602 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4603 implement their own specialized gimplify_va_arg_expr routines. */
4604 gcc_unreachable ();
4605 #endif
4606
4607 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4608 if (indirect)
4609 type = build_pointer_type (type);
4610
4611 align = PARM_BOUNDARY / BITS_PER_UNIT;
4612 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4613
4614 /* When we align parameter on stack for caller, if the parameter
4615 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4616 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4617 here with caller. */
4618 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4619 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4620
4621 boundary /= BITS_PER_UNIT;
4622
4623 /* Hoist the valist value into a temporary for the moment. */
4624 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4625
4626 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4627 requires greater alignment, we must perform dynamic alignment. */
4628 if (boundary > align
4629 && !integer_zerop (TYPE_SIZE (type)))
4630 {
4631 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4632 fold_build2 (POINTER_PLUS_EXPR,
4633 TREE_TYPE (valist),
4634 valist_tmp, size_int (boundary - 1)));
4635 gimplify_and_add (t, pre_p);
4636
4637 t = fold_convert (sizetype, valist_tmp);
4638 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4639 fold_convert (TREE_TYPE (valist),
4640 fold_build2 (BIT_AND_EXPR, sizetype, t,
4641 size_int (-boundary))));
4642 gimplify_and_add (t, pre_p);
4643 }
4644 else
4645 boundary = align;
4646
4647 /* If the actual alignment is less than the alignment of the type,
4648 adjust the type accordingly so that we don't assume strict alignment
4649 when dereferencing the pointer. */
4650 boundary *= BITS_PER_UNIT;
4651 if (boundary < TYPE_ALIGN (type))
4652 {
4653 type = build_variant_type_copy (type);
4654 TYPE_ALIGN (type) = boundary;
4655 }
4656
4657 /* Compute the rounded size of the type. */
4658 type_size = size_in_bytes (type);
4659 rounded_size = round_up (type_size, align);
4660
4661 /* Reduce rounded_size so it's sharable with the postqueue. */
4662 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4663
4664 /* Get AP. */
4665 addr = valist_tmp;
4666 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4667 {
4668 /* Small args are padded downward. */
4669 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4670 rounded_size, size_int (align));
4671 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4672 size_binop (MINUS_EXPR, rounded_size, type_size));
4673 addr = fold_build2 (POINTER_PLUS_EXPR,
4674 TREE_TYPE (addr), addr, t);
4675 }
4676
4677 /* Compute new value for AP. */
4678 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4679 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4680 gimplify_and_add (t, pre_p);
4681
4682 addr = fold_convert (build_pointer_type (type), addr);
4683
4684 if (indirect)
4685 addr = build_va_arg_indirect_ref (addr);
4686
4687 return build_va_arg_indirect_ref (addr);
4688 }
4689
4690 /* Build an indirect-ref expression over the given TREE, which represents a
4691 piece of a va_arg() expansion. */
4692 tree
4693 build_va_arg_indirect_ref (tree addr)
4694 {
4695 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4696
4697 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4698 mf_mark (addr);
4699
4700 return addr;
4701 }
4702
4703 /* Return a dummy expression of type TYPE in order to keep going after an
4704 error. */
4705
4706 static tree
4707 dummy_object (tree type)
4708 {
4709 tree t = build_int_cst (build_pointer_type (type), 0);
4710 return build1 (INDIRECT_REF, type, t);
4711 }
4712
4713 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4714 builtin function, but a very special sort of operator. */
4715
4716 enum gimplify_status
4717 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4718 {
4719 tree promoted_type, have_va_type;
4720 tree valist = TREE_OPERAND (*expr_p, 0);
4721 tree type = TREE_TYPE (*expr_p);
4722 tree t;
4723 location_t loc = EXPR_LOCATION (*expr_p);
4724
4725 /* Verify that valist is of the proper type. */
4726 have_va_type = TREE_TYPE (valist);
4727 if (have_va_type == error_mark_node)
4728 return GS_ERROR;
4729 have_va_type = targetm.canonical_va_list_type (have_va_type);
4730
4731 if (have_va_type == NULL_TREE)
4732 {
4733 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4734 return GS_ERROR;
4735 }
4736
4737 /* Generate a diagnostic for requesting data of a type that cannot
4738 be passed through `...' due to type promotion at the call site. */
4739 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4740 != type)
4741 {
4742 static bool gave_help;
4743 bool warned;
4744
4745 /* Unfortunately, this is merely undefined, rather than a constraint
4746 violation, so we cannot make this an error. If this call is never
4747 executed, the program is still strictly conforming. */
4748 warned = warning_at (loc, 0,
4749 "%qT is promoted to %qT when passed through %<...%>",
4750 type, promoted_type);
4751 if (!gave_help && warned)
4752 {
4753 gave_help = true;
4754 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4755 promoted_type, type);
4756 }
4757
4758 /* We can, however, treat "undefined" any way we please.
4759 Call abort to encourage the user to fix the program. */
4760 if (warned)
4761 inform (loc, "if this code is reached, the program will abort");
4762 /* Before the abort, allow the evaluation of the va_list
4763 expression to exit or longjmp. */
4764 gimplify_and_add (valist, pre_p);
4765 t = build_call_expr_loc (loc,
4766 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4767 gimplify_and_add (t, pre_p);
4768
4769 /* This is dead code, but go ahead and finish so that the
4770 mode of the result comes out right. */
4771 *expr_p = dummy_object (type);
4772 return GS_ALL_DONE;
4773 }
4774 else
4775 {
4776 /* Make it easier for the backends by protecting the valist argument
4777 from multiple evaluations. */
4778 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4779 {
4780 /* For this case, the backends will be expecting a pointer to
4781 TREE_TYPE (abi), but it's possible we've
4782 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4783 So fix it. */
4784 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4785 {
4786 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4787 valist = fold_convert_loc (loc, p1,
4788 build_fold_addr_expr_loc (loc, valist));
4789 }
4790
4791 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4792 }
4793 else
4794 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4795
4796 if (!targetm.gimplify_va_arg_expr)
4797 /* FIXME: Once most targets are converted we should merely
4798 assert this is non-null. */
4799 return GS_ALL_DONE;
4800
4801 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4802 return GS_OK;
4803 }
4804 }
4805
4806 /* Expand EXP, a call to __builtin_va_end. */
4807
4808 static rtx
4809 expand_builtin_va_end (tree exp)
4810 {
4811 tree valist = CALL_EXPR_ARG (exp, 0);
4812
4813 /* Evaluate for side effects, if needed. I hate macros that don't
4814 do that. */
4815 if (TREE_SIDE_EFFECTS (valist))
4816 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4817
4818 return const0_rtx;
4819 }
4820
4821 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4822 builtin rather than just as an assignment in stdarg.h because of the
4823 nastiness of array-type va_list types. */
4824
4825 static rtx
4826 expand_builtin_va_copy (tree exp)
4827 {
4828 tree dst, src, t;
4829 location_t loc = EXPR_LOCATION (exp);
4830
4831 dst = CALL_EXPR_ARG (exp, 0);
4832 src = CALL_EXPR_ARG (exp, 1);
4833
4834 dst = stabilize_va_list_loc (loc, dst, 1);
4835 src = stabilize_va_list_loc (loc, src, 0);
4836
4837 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4838
4839 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4840 {
4841 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4842 TREE_SIDE_EFFECTS (t) = 1;
4843 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4844 }
4845 else
4846 {
4847 rtx dstb, srcb, size;
4848
4849 /* Evaluate to pointers. */
4850 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4851 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4852 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4853 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4854
4855 dstb = convert_memory_address (Pmode, dstb);
4856 srcb = convert_memory_address (Pmode, srcb);
4857
4858 /* "Dereference" to BLKmode memories. */
4859 dstb = gen_rtx_MEM (BLKmode, dstb);
4860 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4861 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4862 srcb = gen_rtx_MEM (BLKmode, srcb);
4863 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4864 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4865
4866 /* Copy. */
4867 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4868 }
4869
4870 return const0_rtx;
4871 }
4872
4873 /* Expand a call to one of the builtin functions __builtin_frame_address or
4874 __builtin_return_address. */
4875
4876 static rtx
4877 expand_builtin_frame_address (tree fndecl, tree exp)
4878 {
4879 /* The argument must be a nonnegative integer constant.
4880 It counts the number of frames to scan up the stack.
4881 The value is the return address saved in that frame. */
4882 if (call_expr_nargs (exp) == 0)
4883 /* Warning about missing arg was already issued. */
4884 return const0_rtx;
4885 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4886 {
4887 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4888 error ("invalid argument to %<__builtin_frame_address%>");
4889 else
4890 error ("invalid argument to %<__builtin_return_address%>");
4891 return const0_rtx;
4892 }
4893 else
4894 {
4895 rtx tem
4896 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4897 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4898
4899 /* Some ports cannot access arbitrary stack frames. */
4900 if (tem == NULL)
4901 {
4902 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4903 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4904 else
4905 warning (0, "unsupported argument to %<__builtin_return_address%>");
4906 return const0_rtx;
4907 }
4908
4909 /* For __builtin_frame_address, return what we've got. */
4910 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4911 return tem;
4912
4913 if (!REG_P (tem)
4914 && ! CONSTANT_P (tem))
4915 tem = copy_to_mode_reg (Pmode, tem);
4916 return tem;
4917 }
4918 }
4919
4920 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4921 we failed and the caller should emit a normal call, otherwise try to get
4922 the result in TARGET, if convenient. */
4923
4924 static rtx
4925 expand_builtin_alloca (tree exp, rtx target)
4926 {
4927 rtx op0;
4928 rtx result;
4929
4930 /* Emit normal call if marked not-inlineable. */
4931 if (CALL_CANNOT_INLINE_P (exp))
4932 return NULL_RTX;
4933
4934 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4935 return NULL_RTX;
4936
4937 /* Compute the argument. */
4938 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4939
4940 /* Allocate the desired space. */
4941 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4942 result = convert_memory_address (ptr_mode, result);
4943
4944 return result;
4945 }
4946
4947 /* Expand a call to a bswap builtin with argument ARG0. MODE
4948 is the mode to expand with. */
4949
4950 static rtx
4951 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4952 {
4953 enum machine_mode mode;
4954 tree arg;
4955 rtx op0;
4956
4957 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4958 return NULL_RTX;
4959
4960 arg = CALL_EXPR_ARG (exp, 0);
4961 mode = TYPE_MODE (TREE_TYPE (arg));
4962 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4963
4964 target = expand_unop (mode, bswap_optab, op0, target, 1);
4965
4966 gcc_assert (target);
4967
4968 return convert_to_mode (mode, target, 0);
4969 }
4970
4971 /* Expand a call to a unary builtin in EXP.
4972 Return NULL_RTX if a normal call should be emitted rather than expanding the
4973 function in-line. If convenient, the result should be placed in TARGET.
4974 SUBTARGET may be used as the target for computing one of EXP's operands. */
4975
4976 static rtx
4977 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4978 rtx subtarget, optab op_optab)
4979 {
4980 rtx op0;
4981
4982 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4983 return NULL_RTX;
4984
4985 /* Compute the argument. */
4986 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4987 VOIDmode, EXPAND_NORMAL);
4988 /* Compute op, into TARGET if possible.
4989 Set TARGET to wherever the result comes back. */
4990 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4991 op_optab, op0, target, 1);
4992 gcc_assert (target);
4993
4994 return convert_to_mode (target_mode, target, 0);
4995 }
4996
4997 /* Expand a call to __builtin_expect. We just return our argument
4998 as the builtin_expect semantic should've been already executed by
4999 tree branch prediction pass. */
5000
5001 static rtx
5002 expand_builtin_expect (tree exp, rtx target)
5003 {
5004 tree arg;
5005
5006 if (call_expr_nargs (exp) < 2)
5007 return const0_rtx;
5008 arg = CALL_EXPR_ARG (exp, 0);
5009
5010 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5011 /* When guessing was done, the hints should be already stripped away. */
5012 gcc_assert (!flag_guess_branch_prob
5013 || optimize == 0 || seen_error ());
5014 return target;
5015 }
5016
5017 void
5018 expand_builtin_trap (void)
5019 {
5020 #ifdef HAVE_trap
5021 if (HAVE_trap)
5022 emit_insn (gen_trap ());
5023 else
5024 #endif
5025 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5026 emit_barrier ();
5027 }
5028
5029 /* Expand a call to __builtin_unreachable. We do nothing except emit
5030 a barrier saying that control flow will not pass here.
5031
5032 It is the responsibility of the program being compiled to ensure
5033 that control flow does never reach __builtin_unreachable. */
5034 static void
5035 expand_builtin_unreachable (void)
5036 {
5037 emit_barrier ();
5038 }
5039
5040 /* Expand EXP, a call to fabs, fabsf or fabsl.
5041 Return NULL_RTX if a normal call should be emitted rather than expanding
5042 the function inline. If convenient, the result should be placed
5043 in TARGET. SUBTARGET may be used as the target for computing
5044 the operand. */
5045
5046 static rtx
5047 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5048 {
5049 enum machine_mode mode;
5050 tree arg;
5051 rtx op0;
5052
5053 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5054 return NULL_RTX;
5055
5056 arg = CALL_EXPR_ARG (exp, 0);
5057 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5058 mode = TYPE_MODE (TREE_TYPE (arg));
5059 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5060 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5061 }
5062
5063 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5064 Return NULL is a normal call should be emitted rather than expanding the
5065 function inline. If convenient, the result should be placed in TARGET.
5066 SUBTARGET may be used as the target for computing the operand. */
5067
5068 static rtx
5069 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5070 {
5071 rtx op0, op1;
5072 tree arg;
5073
5074 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5075 return NULL_RTX;
5076
5077 arg = CALL_EXPR_ARG (exp, 0);
5078 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5079
5080 arg = CALL_EXPR_ARG (exp, 1);
5081 op1 = expand_normal (arg);
5082
5083 return expand_copysign (op0, op1, target);
5084 }
5085
5086 /* Create a new constant string literal and return a char* pointer to it.
5087 The STRING_CST value is the LEN characters at STR. */
5088 tree
5089 build_string_literal (int len, const char *str)
5090 {
5091 tree t, elem, index, type;
5092
5093 t = build_string (len, str);
5094 elem = build_type_variant (char_type_node, 1, 0);
5095 index = build_index_type (size_int (len - 1));
5096 type = build_array_type (elem, index);
5097 TREE_TYPE (t) = type;
5098 TREE_CONSTANT (t) = 1;
5099 TREE_READONLY (t) = 1;
5100 TREE_STATIC (t) = 1;
5101
5102 type = build_pointer_type (elem);
5103 t = build1 (ADDR_EXPR, type,
5104 build4 (ARRAY_REF, elem,
5105 t, integer_zero_node, NULL_TREE, NULL_TREE));
5106 return t;
5107 }
5108
5109 /* Expand a call to either the entry or exit function profiler. */
5110
5111 static rtx
5112 expand_builtin_profile_func (bool exitp)
5113 {
5114 rtx this_rtx, which;
5115
5116 this_rtx = DECL_RTL (current_function_decl);
5117 gcc_assert (MEM_P (this_rtx));
5118 this_rtx = XEXP (this_rtx, 0);
5119
5120 if (exitp)
5121 which = profile_function_exit_libfunc;
5122 else
5123 which = profile_function_entry_libfunc;
5124
5125 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5126 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5127 0),
5128 Pmode);
5129
5130 return const0_rtx;
5131 }
5132
5133 /* Expand a call to __builtin___clear_cache. */
5134
5135 static rtx
5136 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5137 {
5138 #ifndef HAVE_clear_cache
5139 #ifdef CLEAR_INSN_CACHE
5140 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5141 does something. Just do the default expansion to a call to
5142 __clear_cache(). */
5143 return NULL_RTX;
5144 #else
5145 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5146 does nothing. There is no need to call it. Do nothing. */
5147 return const0_rtx;
5148 #endif /* CLEAR_INSN_CACHE */
5149 #else
5150 /* We have a "clear_cache" insn, and it will handle everything. */
5151 tree begin, end;
5152 rtx begin_rtx, end_rtx;
5153 enum insn_code icode;
5154
5155 /* We must not expand to a library call. If we did, any
5156 fallback library function in libgcc that might contain a call to
5157 __builtin___clear_cache() would recurse infinitely. */
5158 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5159 {
5160 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5161 return const0_rtx;
5162 }
5163
5164 if (HAVE_clear_cache)
5165 {
5166 icode = CODE_FOR_clear_cache;
5167
5168 begin = CALL_EXPR_ARG (exp, 0);
5169 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5170 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5171 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5172 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5173
5174 end = CALL_EXPR_ARG (exp, 1);
5175 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5176 end_rtx = convert_memory_address (Pmode, end_rtx);
5177 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5178 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5179
5180 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5181 }
5182 return const0_rtx;
5183 #endif /* HAVE_clear_cache */
5184 }
5185
5186 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5187
5188 static rtx
5189 round_trampoline_addr (rtx tramp)
5190 {
5191 rtx temp, addend, mask;
5192
5193 /* If we don't need too much alignment, we'll have been guaranteed
5194 proper alignment by get_trampoline_type. */
5195 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5196 return tramp;
5197
5198 /* Round address up to desired boundary. */
5199 temp = gen_reg_rtx (Pmode);
5200 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5201 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5202
5203 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5204 temp, 0, OPTAB_LIB_WIDEN);
5205 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5206 temp, 0, OPTAB_LIB_WIDEN);
5207
5208 return tramp;
5209 }
5210
5211 static rtx
5212 expand_builtin_init_trampoline (tree exp)
5213 {
5214 tree t_tramp, t_func, t_chain;
5215 rtx m_tramp, r_tramp, r_chain, tmp;
5216
5217 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5218 POINTER_TYPE, VOID_TYPE))
5219 return NULL_RTX;
5220
5221 t_tramp = CALL_EXPR_ARG (exp, 0);
5222 t_func = CALL_EXPR_ARG (exp, 1);
5223 t_chain = CALL_EXPR_ARG (exp, 2);
5224
5225 r_tramp = expand_normal (t_tramp);
5226 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5227 MEM_NOTRAP_P (m_tramp) = 1;
5228
5229 /* The TRAMP argument should be the address of a field within the
5230 local function's FRAME decl. Let's see if we can fill in the
5231 to fill in the MEM_ATTRs for this memory. */
5232 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5233 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5234 true, 0);
5235
5236 tmp = round_trampoline_addr (r_tramp);
5237 if (tmp != r_tramp)
5238 {
5239 m_tramp = change_address (m_tramp, BLKmode, tmp);
5240 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5241 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5242 }
5243
5244 /* The FUNC argument should be the address of the nested function.
5245 Extract the actual function decl to pass to the hook. */
5246 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5247 t_func = TREE_OPERAND (t_func, 0);
5248 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5249
5250 r_chain = expand_normal (t_chain);
5251
5252 /* Generate insns to initialize the trampoline. */
5253 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5254
5255 trampolines_created = 1;
5256 return const0_rtx;
5257 }
5258
5259 static rtx
5260 expand_builtin_adjust_trampoline (tree exp)
5261 {
5262 rtx tramp;
5263
5264 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5265 return NULL_RTX;
5266
5267 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5268 tramp = round_trampoline_addr (tramp);
5269 if (targetm.calls.trampoline_adjust_address)
5270 tramp = targetm.calls.trampoline_adjust_address (tramp);
5271
5272 return tramp;
5273 }
5274
5275 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5276 function. The function first checks whether the back end provides
5277 an insn to implement signbit for the respective mode. If not, it
5278 checks whether the floating point format of the value is such that
5279 the sign bit can be extracted. If that is not the case, the
5280 function returns NULL_RTX to indicate that a normal call should be
5281 emitted rather than expanding the function in-line. EXP is the
5282 expression that is a call to the builtin function; if convenient,
5283 the result should be placed in TARGET. */
5284 static rtx
5285 expand_builtin_signbit (tree exp, rtx target)
5286 {
5287 const struct real_format *fmt;
5288 enum machine_mode fmode, imode, rmode;
5289 tree arg;
5290 int word, bitpos;
5291 enum insn_code icode;
5292 rtx temp;
5293 location_t loc = EXPR_LOCATION (exp);
5294
5295 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5296 return NULL_RTX;
5297
5298 arg = CALL_EXPR_ARG (exp, 0);
5299 fmode = TYPE_MODE (TREE_TYPE (arg));
5300 rmode = TYPE_MODE (TREE_TYPE (exp));
5301 fmt = REAL_MODE_FORMAT (fmode);
5302
5303 arg = builtin_save_expr (arg);
5304
5305 /* Expand the argument yielding a RTX expression. */
5306 temp = expand_normal (arg);
5307
5308 /* Check if the back end provides an insn that handles signbit for the
5309 argument's mode. */
5310 icode = optab_handler (signbit_optab, fmode);
5311 if (icode != CODE_FOR_nothing)
5312 {
5313 rtx last = get_last_insn ();
5314 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5315 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5316 return target;
5317 delete_insns_since (last);
5318 }
5319
5320 /* For floating point formats without a sign bit, implement signbit
5321 as "ARG < 0.0". */
5322 bitpos = fmt->signbit_ro;
5323 if (bitpos < 0)
5324 {
5325 /* But we can't do this if the format supports signed zero. */
5326 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5327 return NULL_RTX;
5328
5329 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5330 build_real (TREE_TYPE (arg), dconst0));
5331 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5332 }
5333
5334 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5335 {
5336 imode = int_mode_for_mode (fmode);
5337 if (imode == BLKmode)
5338 return NULL_RTX;
5339 temp = gen_lowpart (imode, temp);
5340 }
5341 else
5342 {
5343 imode = word_mode;
5344 /* Handle targets with different FP word orders. */
5345 if (FLOAT_WORDS_BIG_ENDIAN)
5346 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5347 else
5348 word = bitpos / BITS_PER_WORD;
5349 temp = operand_subword_force (temp, word, fmode);
5350 bitpos = bitpos % BITS_PER_WORD;
5351 }
5352
5353 /* Force the intermediate word_mode (or narrower) result into a
5354 register. This avoids attempting to create paradoxical SUBREGs
5355 of floating point modes below. */
5356 temp = force_reg (imode, temp);
5357
5358 /* If the bitpos is within the "result mode" lowpart, the operation
5359 can be implement with a single bitwise AND. Otherwise, we need
5360 a right shift and an AND. */
5361
5362 if (bitpos < GET_MODE_BITSIZE (rmode))
5363 {
5364 double_int mask = double_int_setbit (double_int_zero, bitpos);
5365
5366 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5367 temp = gen_lowpart (rmode, temp);
5368 temp = expand_binop (rmode, and_optab, temp,
5369 immed_double_int_const (mask, rmode),
5370 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5371 }
5372 else
5373 {
5374 /* Perform a logical right shift to place the signbit in the least
5375 significant bit, then truncate the result to the desired mode
5376 and mask just this bit. */
5377 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5378 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5379 temp = gen_lowpart (rmode, temp);
5380 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5381 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5382 }
5383
5384 return temp;
5385 }
5386
5387 /* Expand fork or exec calls. TARGET is the desired target of the
5388 call. EXP is the call. FN is the
5389 identificator of the actual function. IGNORE is nonzero if the
5390 value is to be ignored. */
5391
5392 static rtx
5393 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5394 {
5395 tree id, decl;
5396 tree call;
5397
5398 /* If we are not profiling, just call the function. */
5399 if (!profile_arc_flag)
5400 return NULL_RTX;
5401
5402 /* Otherwise call the wrapper. This should be equivalent for the rest of
5403 compiler, so the code does not diverge, and the wrapper may run the
5404 code necessary for keeping the profiling sane. */
5405
5406 switch (DECL_FUNCTION_CODE (fn))
5407 {
5408 case BUILT_IN_FORK:
5409 id = get_identifier ("__gcov_fork");
5410 break;
5411
5412 case BUILT_IN_EXECL:
5413 id = get_identifier ("__gcov_execl");
5414 break;
5415
5416 case BUILT_IN_EXECV:
5417 id = get_identifier ("__gcov_execv");
5418 break;
5419
5420 case BUILT_IN_EXECLP:
5421 id = get_identifier ("__gcov_execlp");
5422 break;
5423
5424 case BUILT_IN_EXECLE:
5425 id = get_identifier ("__gcov_execle");
5426 break;
5427
5428 case BUILT_IN_EXECVP:
5429 id = get_identifier ("__gcov_execvp");
5430 break;
5431
5432 case BUILT_IN_EXECVE:
5433 id = get_identifier ("__gcov_execve");
5434 break;
5435
5436 default:
5437 gcc_unreachable ();
5438 }
5439
5440 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5441 FUNCTION_DECL, id, TREE_TYPE (fn));
5442 DECL_EXTERNAL (decl) = 1;
5443 TREE_PUBLIC (decl) = 1;
5444 DECL_ARTIFICIAL (decl) = 1;
5445 TREE_NOTHROW (decl) = 1;
5446 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5447 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5448 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5449 return expand_call (call, target, ignore);
5450 }
5451
5452
5453 \f
5454 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5455 the pointer in these functions is void*, the tree optimizers may remove
5456 casts. The mode computed in expand_builtin isn't reliable either, due
5457 to __sync_bool_compare_and_swap.
5458
5459 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5460 group of builtins. This gives us log2 of the mode size. */
5461
5462 static inline enum machine_mode
5463 get_builtin_sync_mode (int fcode_diff)
5464 {
5465 /* The size is not negotiable, so ask not to get BLKmode in return
5466 if the target indicates that a smaller size would be better. */
5467 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5468 }
5469
5470 /* Expand the memory expression LOC and return the appropriate memory operand
5471 for the builtin_sync operations. */
5472
5473 static rtx
5474 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5475 {
5476 rtx addr, mem;
5477
5478 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5479 addr = convert_memory_address (Pmode, addr);
5480
5481 /* Note that we explicitly do not want any alias information for this
5482 memory, so that we kill all other live memories. Otherwise we don't
5483 satisfy the full barrier semantics of the intrinsic. */
5484 mem = validize_mem (gen_rtx_MEM (mode, addr));
5485
5486 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5487 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5488 MEM_VOLATILE_P (mem) = 1;
5489
5490 return mem;
5491 }
5492
5493 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5494 EXP is the CALL_EXPR. CODE is the rtx code
5495 that corresponds to the arithmetic or logical operation from the name;
5496 an exception here is that NOT actually means NAND. TARGET is an optional
5497 place for us to store the results; AFTER is true if this is the
5498 fetch_and_xxx form. IGNORE is true if we don't actually care about
5499 the result of the operation at all. */
5500
5501 static rtx
5502 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5503 enum rtx_code code, bool after,
5504 rtx target, bool ignore)
5505 {
5506 rtx val, mem;
5507 enum machine_mode old_mode;
5508 location_t loc = EXPR_LOCATION (exp);
5509
5510 if (code == NOT && warn_sync_nand)
5511 {
5512 tree fndecl = get_callee_fndecl (exp);
5513 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5514
5515 static bool warned_f_a_n, warned_n_a_f;
5516
5517 switch (fcode)
5518 {
5519 case BUILT_IN_FETCH_AND_NAND_1:
5520 case BUILT_IN_FETCH_AND_NAND_2:
5521 case BUILT_IN_FETCH_AND_NAND_4:
5522 case BUILT_IN_FETCH_AND_NAND_8:
5523 case BUILT_IN_FETCH_AND_NAND_16:
5524
5525 if (warned_f_a_n)
5526 break;
5527
5528 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5529 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5530 warned_f_a_n = true;
5531 break;
5532
5533 case BUILT_IN_NAND_AND_FETCH_1:
5534 case BUILT_IN_NAND_AND_FETCH_2:
5535 case BUILT_IN_NAND_AND_FETCH_4:
5536 case BUILT_IN_NAND_AND_FETCH_8:
5537 case BUILT_IN_NAND_AND_FETCH_16:
5538
5539 if (warned_n_a_f)
5540 break;
5541
5542 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5543 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5544 warned_n_a_f = true;
5545 break;
5546
5547 default:
5548 gcc_unreachable ();
5549 }
5550 }
5551
5552 /* Expand the operands. */
5553 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5554
5555 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5556 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5557 of CONST_INTs, where we know the old_mode only from the call argument. */
5558 old_mode = GET_MODE (val);
5559 if (old_mode == VOIDmode)
5560 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5561 val = convert_modes (mode, old_mode, val, 1);
5562
5563 if (ignore)
5564 return expand_sync_operation (mem, val, code);
5565 else
5566 return expand_sync_fetch_operation (mem, val, code, after, target);
5567 }
5568
5569 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5570 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5571 true if this is the boolean form. TARGET is a place for us to store the
5572 results; this is NOT optional if IS_BOOL is true. */
5573
5574 static rtx
5575 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5576 bool is_bool, rtx target)
5577 {
5578 rtx old_val, new_val, mem;
5579 enum machine_mode old_mode;
5580
5581 /* Expand the operands. */
5582 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5583
5584
5585 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5586 mode, EXPAND_NORMAL);
5587 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5588 of CONST_INTs, where we know the old_mode only from the call argument. */
5589 old_mode = GET_MODE (old_val);
5590 if (old_mode == VOIDmode)
5591 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5592 old_val = convert_modes (mode, old_mode, old_val, 1);
5593
5594 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5595 mode, EXPAND_NORMAL);
5596 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5597 of CONST_INTs, where we know the old_mode only from the call argument. */
5598 old_mode = GET_MODE (new_val);
5599 if (old_mode == VOIDmode)
5600 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5601 new_val = convert_modes (mode, old_mode, new_val, 1);
5602
5603 if (is_bool)
5604 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5605 else
5606 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5607 }
5608
5609 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5610 general form is actually an atomic exchange, and some targets only
5611 support a reduced form with the second argument being a constant 1.
5612 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5613 the results. */
5614
5615 static rtx
5616 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5617 rtx target)
5618 {
5619 rtx val, mem;
5620 enum machine_mode old_mode;
5621
5622 /* Expand the operands. */
5623 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5624 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5625 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5626 of CONST_INTs, where we know the old_mode only from the call argument. */
5627 old_mode = GET_MODE (val);
5628 if (old_mode == VOIDmode)
5629 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5630 val = convert_modes (mode, old_mode, val, 1);
5631
5632 return expand_sync_lock_test_and_set (mem, val, target);
5633 }
5634
5635 /* Expand the __sync_synchronize intrinsic. */
5636
5637 static void
5638 expand_builtin_synchronize (void)
5639 {
5640 gimple x;
5641 VEC (tree, gc) *v_clobbers;
5642
5643 #ifdef HAVE_memory_barrier
5644 if (HAVE_memory_barrier)
5645 {
5646 emit_insn (gen_memory_barrier ());
5647 return;
5648 }
5649 #endif
5650
5651 if (synchronize_libfunc != NULL_RTX)
5652 {
5653 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5654 return;
5655 }
5656
5657 /* If no explicit memory barrier instruction is available, create an
5658 empty asm stmt with a memory clobber. */
5659 v_clobbers = VEC_alloc (tree, gc, 1);
5660 VEC_quick_push (tree, v_clobbers,
5661 tree_cons (NULL, build_string (6, "memory"), NULL));
5662 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5663 gimple_asm_set_volatile (x, true);
5664 expand_asm_stmt (x);
5665 }
5666
5667 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5668
5669 static void
5670 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5671 {
5672 enum insn_code icode;
5673 rtx mem, insn;
5674 rtx val = const0_rtx;
5675
5676 /* Expand the operands. */
5677 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5678
5679 /* If there is an explicit operation in the md file, use it. */
5680 icode = direct_optab_handler (sync_lock_release_optab, mode);
5681 if (icode != CODE_FOR_nothing)
5682 {
5683 if (!insn_data[icode].operand[1].predicate (val, mode))
5684 val = force_reg (mode, val);
5685
5686 insn = GEN_FCN (icode) (mem, val);
5687 if (insn)
5688 {
5689 emit_insn (insn);
5690 return;
5691 }
5692 }
5693
5694 /* Otherwise we can implement this operation by emitting a barrier
5695 followed by a store of zero. */
5696 expand_builtin_synchronize ();
5697 emit_move_insn (mem, val);
5698 }
5699 \f
5700 /* Expand an expression EXP that calls a built-in function,
5701 with result going to TARGET if that's convenient
5702 (and in mode MODE if that's convenient).
5703 SUBTARGET may be used as the target for computing one of EXP's operands.
5704 IGNORE is nonzero if the value is to be ignored. */
5705
5706 rtx
5707 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5708 int ignore)
5709 {
5710 tree fndecl = get_callee_fndecl (exp);
5711 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5712 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5713
5714 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5715 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5716
5717 /* When not optimizing, generate calls to library functions for a certain
5718 set of builtins. */
5719 if (!optimize
5720 && !called_as_built_in (fndecl)
5721 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5722 && fcode != BUILT_IN_ALLOCA
5723 && fcode != BUILT_IN_FREE)
5724 return expand_call (exp, target, ignore);
5725
5726 /* The built-in function expanders test for target == const0_rtx
5727 to determine whether the function's result will be ignored. */
5728 if (ignore)
5729 target = const0_rtx;
5730
5731 /* If the result of a pure or const built-in function is ignored, and
5732 none of its arguments are volatile, we can avoid expanding the
5733 built-in call and just evaluate the arguments for side-effects. */
5734 if (target == const0_rtx
5735 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5736 {
5737 bool volatilep = false;
5738 tree arg;
5739 call_expr_arg_iterator iter;
5740
5741 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5742 if (TREE_THIS_VOLATILE (arg))
5743 {
5744 volatilep = true;
5745 break;
5746 }
5747
5748 if (! volatilep)
5749 {
5750 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5751 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5752 return const0_rtx;
5753 }
5754 }
5755
5756 switch (fcode)
5757 {
5758 CASE_FLT_FN (BUILT_IN_FABS):
5759 target = expand_builtin_fabs (exp, target, subtarget);
5760 if (target)
5761 return target;
5762 break;
5763
5764 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5765 target = expand_builtin_copysign (exp, target, subtarget);
5766 if (target)
5767 return target;
5768 break;
5769
5770 /* Just do a normal library call if we were unable to fold
5771 the values. */
5772 CASE_FLT_FN (BUILT_IN_CABS):
5773 break;
5774
5775 CASE_FLT_FN (BUILT_IN_EXP):
5776 CASE_FLT_FN (BUILT_IN_EXP10):
5777 CASE_FLT_FN (BUILT_IN_POW10):
5778 CASE_FLT_FN (BUILT_IN_EXP2):
5779 CASE_FLT_FN (BUILT_IN_EXPM1):
5780 CASE_FLT_FN (BUILT_IN_LOGB):
5781 CASE_FLT_FN (BUILT_IN_LOG):
5782 CASE_FLT_FN (BUILT_IN_LOG10):
5783 CASE_FLT_FN (BUILT_IN_LOG2):
5784 CASE_FLT_FN (BUILT_IN_LOG1P):
5785 CASE_FLT_FN (BUILT_IN_TAN):
5786 CASE_FLT_FN (BUILT_IN_ASIN):
5787 CASE_FLT_FN (BUILT_IN_ACOS):
5788 CASE_FLT_FN (BUILT_IN_ATAN):
5789 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5790 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5791 because of possible accuracy problems. */
5792 if (! flag_unsafe_math_optimizations)
5793 break;
5794 CASE_FLT_FN (BUILT_IN_SQRT):
5795 CASE_FLT_FN (BUILT_IN_FLOOR):
5796 CASE_FLT_FN (BUILT_IN_CEIL):
5797 CASE_FLT_FN (BUILT_IN_TRUNC):
5798 CASE_FLT_FN (BUILT_IN_ROUND):
5799 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5800 CASE_FLT_FN (BUILT_IN_RINT):
5801 target = expand_builtin_mathfn (exp, target, subtarget);
5802 if (target)
5803 return target;
5804 break;
5805
5806 CASE_FLT_FN (BUILT_IN_ILOGB):
5807 if (! flag_unsafe_math_optimizations)
5808 break;
5809 CASE_FLT_FN (BUILT_IN_ISINF):
5810 CASE_FLT_FN (BUILT_IN_FINITE):
5811 case BUILT_IN_ISFINITE:
5812 case BUILT_IN_ISNORMAL:
5813 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5814 if (target)
5815 return target;
5816 break;
5817
5818 CASE_FLT_FN (BUILT_IN_LCEIL):
5819 CASE_FLT_FN (BUILT_IN_LLCEIL):
5820 CASE_FLT_FN (BUILT_IN_LFLOOR):
5821 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5822 target = expand_builtin_int_roundingfn (exp, target);
5823 if (target)
5824 return target;
5825 break;
5826
5827 CASE_FLT_FN (BUILT_IN_LRINT):
5828 CASE_FLT_FN (BUILT_IN_LLRINT):
5829 CASE_FLT_FN (BUILT_IN_LROUND):
5830 CASE_FLT_FN (BUILT_IN_LLROUND):
5831 target = expand_builtin_int_roundingfn_2 (exp, target);
5832 if (target)
5833 return target;
5834 break;
5835
5836 CASE_FLT_FN (BUILT_IN_POW):
5837 target = expand_builtin_pow (exp, target, subtarget);
5838 if (target)
5839 return target;
5840 break;
5841
5842 CASE_FLT_FN (BUILT_IN_POWI):
5843 target = expand_builtin_powi (exp, target, subtarget);
5844 if (target)
5845 return target;
5846 break;
5847
5848 CASE_FLT_FN (BUILT_IN_ATAN2):
5849 CASE_FLT_FN (BUILT_IN_LDEXP):
5850 CASE_FLT_FN (BUILT_IN_SCALB):
5851 CASE_FLT_FN (BUILT_IN_SCALBN):
5852 CASE_FLT_FN (BUILT_IN_SCALBLN):
5853 if (! flag_unsafe_math_optimizations)
5854 break;
5855
5856 CASE_FLT_FN (BUILT_IN_FMOD):
5857 CASE_FLT_FN (BUILT_IN_REMAINDER):
5858 CASE_FLT_FN (BUILT_IN_DREM):
5859 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5860 if (target)
5861 return target;
5862 break;
5863
5864 CASE_FLT_FN (BUILT_IN_CEXPI):
5865 target = expand_builtin_cexpi (exp, target, subtarget);
5866 gcc_assert (target);
5867 return target;
5868
5869 CASE_FLT_FN (BUILT_IN_SIN):
5870 CASE_FLT_FN (BUILT_IN_COS):
5871 if (! flag_unsafe_math_optimizations)
5872 break;
5873 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5874 if (target)
5875 return target;
5876 break;
5877
5878 CASE_FLT_FN (BUILT_IN_SINCOS):
5879 if (! flag_unsafe_math_optimizations)
5880 break;
5881 target = expand_builtin_sincos (exp);
5882 if (target)
5883 return target;
5884 break;
5885
5886 case BUILT_IN_APPLY_ARGS:
5887 return expand_builtin_apply_args ();
5888
5889 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5890 FUNCTION with a copy of the parameters described by
5891 ARGUMENTS, and ARGSIZE. It returns a block of memory
5892 allocated on the stack into which is stored all the registers
5893 that might possibly be used for returning the result of a
5894 function. ARGUMENTS is the value returned by
5895 __builtin_apply_args. ARGSIZE is the number of bytes of
5896 arguments that must be copied. ??? How should this value be
5897 computed? We'll also need a safe worst case value for varargs
5898 functions. */
5899 case BUILT_IN_APPLY:
5900 if (!validate_arglist (exp, POINTER_TYPE,
5901 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5902 && !validate_arglist (exp, REFERENCE_TYPE,
5903 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5904 return const0_rtx;
5905 else
5906 {
5907 rtx ops[3];
5908
5909 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5910 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5911 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5912
5913 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5914 }
5915
5916 /* __builtin_return (RESULT) causes the function to return the
5917 value described by RESULT. RESULT is address of the block of
5918 memory returned by __builtin_apply. */
5919 case BUILT_IN_RETURN:
5920 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5921 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5922 return const0_rtx;
5923
5924 case BUILT_IN_SAVEREGS:
5925 return expand_builtin_saveregs ();
5926
5927 case BUILT_IN_ARGS_INFO:
5928 return expand_builtin_args_info (exp);
5929
5930 case BUILT_IN_VA_ARG_PACK:
5931 /* All valid uses of __builtin_va_arg_pack () are removed during
5932 inlining. */
5933 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5934 return const0_rtx;
5935
5936 case BUILT_IN_VA_ARG_PACK_LEN:
5937 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5938 inlining. */
5939 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5940 return const0_rtx;
5941
5942 /* Return the address of the first anonymous stack arg. */
5943 case BUILT_IN_NEXT_ARG:
5944 if (fold_builtin_next_arg (exp, false))
5945 return const0_rtx;
5946 return expand_builtin_next_arg ();
5947
5948 case BUILT_IN_CLEAR_CACHE:
5949 target = expand_builtin___clear_cache (exp);
5950 if (target)
5951 return target;
5952 break;
5953
5954 case BUILT_IN_CLASSIFY_TYPE:
5955 return expand_builtin_classify_type (exp);
5956
5957 case BUILT_IN_CONSTANT_P:
5958 return const0_rtx;
5959
5960 case BUILT_IN_FRAME_ADDRESS:
5961 case BUILT_IN_RETURN_ADDRESS:
5962 return expand_builtin_frame_address (fndecl, exp);
5963
5964 /* Returns the address of the area where the structure is returned.
5965 0 otherwise. */
5966 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5967 if (call_expr_nargs (exp) != 0
5968 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5969 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5970 return const0_rtx;
5971 else
5972 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5973
5974 case BUILT_IN_ALLOCA:
5975 target = expand_builtin_alloca (exp, target);
5976 if (target)
5977 return target;
5978 break;
5979
5980 case BUILT_IN_STACK_SAVE:
5981 return expand_stack_save ();
5982
5983 case BUILT_IN_STACK_RESTORE:
5984 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5985 return const0_rtx;
5986
5987 case BUILT_IN_BSWAP32:
5988 case BUILT_IN_BSWAP64:
5989 target = expand_builtin_bswap (exp, target, subtarget);
5990
5991 if (target)
5992 return target;
5993 break;
5994
5995 CASE_INT_FN (BUILT_IN_FFS):
5996 case BUILT_IN_FFSIMAX:
5997 target = expand_builtin_unop (target_mode, exp, target,
5998 subtarget, ffs_optab);
5999 if (target)
6000 return target;
6001 break;
6002
6003 CASE_INT_FN (BUILT_IN_CLZ):
6004 case BUILT_IN_CLZIMAX:
6005 target = expand_builtin_unop (target_mode, exp, target,
6006 subtarget, clz_optab);
6007 if (target)
6008 return target;
6009 break;
6010
6011 CASE_INT_FN (BUILT_IN_CTZ):
6012 case BUILT_IN_CTZIMAX:
6013 target = expand_builtin_unop (target_mode, exp, target,
6014 subtarget, ctz_optab);
6015 if (target)
6016 return target;
6017 break;
6018
6019 CASE_INT_FN (BUILT_IN_POPCOUNT):
6020 case BUILT_IN_POPCOUNTIMAX:
6021 target = expand_builtin_unop (target_mode, exp, target,
6022 subtarget, popcount_optab);
6023 if (target)
6024 return target;
6025 break;
6026
6027 CASE_INT_FN (BUILT_IN_PARITY):
6028 case BUILT_IN_PARITYIMAX:
6029 target = expand_builtin_unop (target_mode, exp, target,
6030 subtarget, parity_optab);
6031 if (target)
6032 return target;
6033 break;
6034
6035 case BUILT_IN_STRLEN:
6036 target = expand_builtin_strlen (exp, target, target_mode);
6037 if (target)
6038 return target;
6039 break;
6040
6041 case BUILT_IN_STRCPY:
6042 target = expand_builtin_strcpy (exp, target);
6043 if (target)
6044 return target;
6045 break;
6046
6047 case BUILT_IN_STRNCPY:
6048 target = expand_builtin_strncpy (exp, target);
6049 if (target)
6050 return target;
6051 break;
6052
6053 case BUILT_IN_STPCPY:
6054 target = expand_builtin_stpcpy (exp, target, mode);
6055 if (target)
6056 return target;
6057 break;
6058
6059 case BUILT_IN_MEMCPY:
6060 target = expand_builtin_memcpy (exp, target);
6061 if (target)
6062 return target;
6063 break;
6064
6065 case BUILT_IN_MEMPCPY:
6066 target = expand_builtin_mempcpy (exp, target, mode);
6067 if (target)
6068 return target;
6069 break;
6070
6071 case BUILT_IN_MEMSET:
6072 target = expand_builtin_memset (exp, target, mode);
6073 if (target)
6074 return target;
6075 break;
6076
6077 case BUILT_IN_BZERO:
6078 target = expand_builtin_bzero (exp);
6079 if (target)
6080 return target;
6081 break;
6082
6083 case BUILT_IN_STRCMP:
6084 target = expand_builtin_strcmp (exp, target);
6085 if (target)
6086 return target;
6087 break;
6088
6089 case BUILT_IN_STRNCMP:
6090 target = expand_builtin_strncmp (exp, target, mode);
6091 if (target)
6092 return target;
6093 break;
6094
6095 case BUILT_IN_BCMP:
6096 case BUILT_IN_MEMCMP:
6097 target = expand_builtin_memcmp (exp, target, mode);
6098 if (target)
6099 return target;
6100 break;
6101
6102 case BUILT_IN_SETJMP:
6103 /* This should have been lowered to the builtins below. */
6104 gcc_unreachable ();
6105
6106 case BUILT_IN_SETJMP_SETUP:
6107 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6108 and the receiver label. */
6109 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6110 {
6111 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6112 VOIDmode, EXPAND_NORMAL);
6113 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6114 rtx label_r = label_rtx (label);
6115
6116 /* This is copied from the handling of non-local gotos. */
6117 expand_builtin_setjmp_setup (buf_addr, label_r);
6118 nonlocal_goto_handler_labels
6119 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6120 nonlocal_goto_handler_labels);
6121 /* ??? Do not let expand_label treat us as such since we would
6122 not want to be both on the list of non-local labels and on
6123 the list of forced labels. */
6124 FORCED_LABEL (label) = 0;
6125 return const0_rtx;
6126 }
6127 break;
6128
6129 case BUILT_IN_SETJMP_DISPATCHER:
6130 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6131 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6132 {
6133 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6134 rtx label_r = label_rtx (label);
6135
6136 /* Remove the dispatcher label from the list of non-local labels
6137 since the receiver labels have been added to it above. */
6138 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6139 return const0_rtx;
6140 }
6141 break;
6142
6143 case BUILT_IN_SETJMP_RECEIVER:
6144 /* __builtin_setjmp_receiver is passed the receiver label. */
6145 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6146 {
6147 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6148 rtx label_r = label_rtx (label);
6149
6150 expand_builtin_setjmp_receiver (label_r);
6151 return const0_rtx;
6152 }
6153 break;
6154
6155 /* __builtin_longjmp is passed a pointer to an array of five words.
6156 It's similar to the C library longjmp function but works with
6157 __builtin_setjmp above. */
6158 case BUILT_IN_LONGJMP:
6159 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6160 {
6161 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6162 VOIDmode, EXPAND_NORMAL);
6163 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6164
6165 if (value != const1_rtx)
6166 {
6167 error ("%<__builtin_longjmp%> second argument must be 1");
6168 return const0_rtx;
6169 }
6170
6171 expand_builtin_longjmp (buf_addr, value);
6172 return const0_rtx;
6173 }
6174 break;
6175
6176 case BUILT_IN_NONLOCAL_GOTO:
6177 target = expand_builtin_nonlocal_goto (exp);
6178 if (target)
6179 return target;
6180 break;
6181
6182 /* This updates the setjmp buffer that is its argument with the value
6183 of the current stack pointer. */
6184 case BUILT_IN_UPDATE_SETJMP_BUF:
6185 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6186 {
6187 rtx buf_addr
6188 = expand_normal (CALL_EXPR_ARG (exp, 0));
6189
6190 expand_builtin_update_setjmp_buf (buf_addr);
6191 return const0_rtx;
6192 }
6193 break;
6194
6195 case BUILT_IN_TRAP:
6196 expand_builtin_trap ();
6197 return const0_rtx;
6198
6199 case BUILT_IN_UNREACHABLE:
6200 expand_builtin_unreachable ();
6201 return const0_rtx;
6202
6203 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6204 case BUILT_IN_SIGNBITD32:
6205 case BUILT_IN_SIGNBITD64:
6206 case BUILT_IN_SIGNBITD128:
6207 target = expand_builtin_signbit (exp, target);
6208 if (target)
6209 return target;
6210 break;
6211
6212 /* Various hooks for the DWARF 2 __throw routine. */
6213 case BUILT_IN_UNWIND_INIT:
6214 expand_builtin_unwind_init ();
6215 return const0_rtx;
6216 case BUILT_IN_DWARF_CFA:
6217 return virtual_cfa_rtx;
6218 #ifdef DWARF2_UNWIND_INFO
6219 case BUILT_IN_DWARF_SP_COLUMN:
6220 return expand_builtin_dwarf_sp_column ();
6221 case BUILT_IN_INIT_DWARF_REG_SIZES:
6222 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6223 return const0_rtx;
6224 #endif
6225 case BUILT_IN_FROB_RETURN_ADDR:
6226 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6227 case BUILT_IN_EXTRACT_RETURN_ADDR:
6228 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6229 case BUILT_IN_EH_RETURN:
6230 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6231 CALL_EXPR_ARG (exp, 1));
6232 return const0_rtx;
6233 #ifdef EH_RETURN_DATA_REGNO
6234 case BUILT_IN_EH_RETURN_DATA_REGNO:
6235 return expand_builtin_eh_return_data_regno (exp);
6236 #endif
6237 case BUILT_IN_EXTEND_POINTER:
6238 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6239 case BUILT_IN_EH_POINTER:
6240 return expand_builtin_eh_pointer (exp);
6241 case BUILT_IN_EH_FILTER:
6242 return expand_builtin_eh_filter (exp);
6243 case BUILT_IN_EH_COPY_VALUES:
6244 return expand_builtin_eh_copy_values (exp);
6245
6246 case BUILT_IN_VA_START:
6247 return expand_builtin_va_start (exp);
6248 case BUILT_IN_VA_END:
6249 return expand_builtin_va_end (exp);
6250 case BUILT_IN_VA_COPY:
6251 return expand_builtin_va_copy (exp);
6252 case BUILT_IN_EXPECT:
6253 return expand_builtin_expect (exp, target);
6254 case BUILT_IN_PREFETCH:
6255 expand_builtin_prefetch (exp);
6256 return const0_rtx;
6257
6258 case BUILT_IN_PROFILE_FUNC_ENTER:
6259 return expand_builtin_profile_func (false);
6260 case BUILT_IN_PROFILE_FUNC_EXIT:
6261 return expand_builtin_profile_func (true);
6262
6263 case BUILT_IN_INIT_TRAMPOLINE:
6264 return expand_builtin_init_trampoline (exp);
6265 case BUILT_IN_ADJUST_TRAMPOLINE:
6266 return expand_builtin_adjust_trampoline (exp);
6267
6268 case BUILT_IN_FORK:
6269 case BUILT_IN_EXECL:
6270 case BUILT_IN_EXECV:
6271 case BUILT_IN_EXECLP:
6272 case BUILT_IN_EXECLE:
6273 case BUILT_IN_EXECVP:
6274 case BUILT_IN_EXECVE:
6275 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6276 if (target)
6277 return target;
6278 break;
6279
6280 case BUILT_IN_FETCH_AND_ADD_1:
6281 case BUILT_IN_FETCH_AND_ADD_2:
6282 case BUILT_IN_FETCH_AND_ADD_4:
6283 case BUILT_IN_FETCH_AND_ADD_8:
6284 case BUILT_IN_FETCH_AND_ADD_16:
6285 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6286 target = expand_builtin_sync_operation (mode, exp, PLUS,
6287 false, target, ignore);
6288 if (target)
6289 return target;
6290 break;
6291
6292 case BUILT_IN_FETCH_AND_SUB_1:
6293 case BUILT_IN_FETCH_AND_SUB_2:
6294 case BUILT_IN_FETCH_AND_SUB_4:
6295 case BUILT_IN_FETCH_AND_SUB_8:
6296 case BUILT_IN_FETCH_AND_SUB_16:
6297 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6298 target = expand_builtin_sync_operation (mode, exp, MINUS,
6299 false, target, ignore);
6300 if (target)
6301 return target;
6302 break;
6303
6304 case BUILT_IN_FETCH_AND_OR_1:
6305 case BUILT_IN_FETCH_AND_OR_2:
6306 case BUILT_IN_FETCH_AND_OR_4:
6307 case BUILT_IN_FETCH_AND_OR_8:
6308 case BUILT_IN_FETCH_AND_OR_16:
6309 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6310 target = expand_builtin_sync_operation (mode, exp, IOR,
6311 false, target, ignore);
6312 if (target)
6313 return target;
6314 break;
6315
6316 case BUILT_IN_FETCH_AND_AND_1:
6317 case BUILT_IN_FETCH_AND_AND_2:
6318 case BUILT_IN_FETCH_AND_AND_4:
6319 case BUILT_IN_FETCH_AND_AND_8:
6320 case BUILT_IN_FETCH_AND_AND_16:
6321 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6322 target = expand_builtin_sync_operation (mode, exp, AND,
6323 false, target, ignore);
6324 if (target)
6325 return target;
6326 break;
6327
6328 case BUILT_IN_FETCH_AND_XOR_1:
6329 case BUILT_IN_FETCH_AND_XOR_2:
6330 case BUILT_IN_FETCH_AND_XOR_4:
6331 case BUILT_IN_FETCH_AND_XOR_8:
6332 case BUILT_IN_FETCH_AND_XOR_16:
6333 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6334 target = expand_builtin_sync_operation (mode, exp, XOR,
6335 false, target, ignore);
6336 if (target)
6337 return target;
6338 break;
6339
6340 case BUILT_IN_FETCH_AND_NAND_1:
6341 case BUILT_IN_FETCH_AND_NAND_2:
6342 case BUILT_IN_FETCH_AND_NAND_4:
6343 case BUILT_IN_FETCH_AND_NAND_8:
6344 case BUILT_IN_FETCH_AND_NAND_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6346 target = expand_builtin_sync_operation (mode, exp, NOT,
6347 false, target, ignore);
6348 if (target)
6349 return target;
6350 break;
6351
6352 case BUILT_IN_ADD_AND_FETCH_1:
6353 case BUILT_IN_ADD_AND_FETCH_2:
6354 case BUILT_IN_ADD_AND_FETCH_4:
6355 case BUILT_IN_ADD_AND_FETCH_8:
6356 case BUILT_IN_ADD_AND_FETCH_16:
6357 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6358 target = expand_builtin_sync_operation (mode, exp, PLUS,
6359 true, target, ignore);
6360 if (target)
6361 return target;
6362 break;
6363
6364 case BUILT_IN_SUB_AND_FETCH_1:
6365 case BUILT_IN_SUB_AND_FETCH_2:
6366 case BUILT_IN_SUB_AND_FETCH_4:
6367 case BUILT_IN_SUB_AND_FETCH_8:
6368 case BUILT_IN_SUB_AND_FETCH_16:
6369 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6370 target = expand_builtin_sync_operation (mode, exp, MINUS,
6371 true, target, ignore);
6372 if (target)
6373 return target;
6374 break;
6375
6376 case BUILT_IN_OR_AND_FETCH_1:
6377 case BUILT_IN_OR_AND_FETCH_2:
6378 case BUILT_IN_OR_AND_FETCH_4:
6379 case BUILT_IN_OR_AND_FETCH_8:
6380 case BUILT_IN_OR_AND_FETCH_16:
6381 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6382 target = expand_builtin_sync_operation (mode, exp, IOR,
6383 true, target, ignore);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_AND_AND_FETCH_1:
6389 case BUILT_IN_AND_AND_FETCH_2:
6390 case BUILT_IN_AND_AND_FETCH_4:
6391 case BUILT_IN_AND_AND_FETCH_8:
6392 case BUILT_IN_AND_AND_FETCH_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6394 target = expand_builtin_sync_operation (mode, exp, AND,
6395 true, target, ignore);
6396 if (target)
6397 return target;
6398 break;
6399
6400 case BUILT_IN_XOR_AND_FETCH_1:
6401 case BUILT_IN_XOR_AND_FETCH_2:
6402 case BUILT_IN_XOR_AND_FETCH_4:
6403 case BUILT_IN_XOR_AND_FETCH_8:
6404 case BUILT_IN_XOR_AND_FETCH_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6406 target = expand_builtin_sync_operation (mode, exp, XOR,
6407 true, target, ignore);
6408 if (target)
6409 return target;
6410 break;
6411
6412 case BUILT_IN_NAND_AND_FETCH_1:
6413 case BUILT_IN_NAND_AND_FETCH_2:
6414 case BUILT_IN_NAND_AND_FETCH_4:
6415 case BUILT_IN_NAND_AND_FETCH_8:
6416 case BUILT_IN_NAND_AND_FETCH_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6418 target = expand_builtin_sync_operation (mode, exp, NOT,
6419 true, target, ignore);
6420 if (target)
6421 return target;
6422 break;
6423
6424 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6425 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6426 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6427 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6428 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6429 if (mode == VOIDmode)
6430 mode = TYPE_MODE (boolean_type_node);
6431 if (!target || !register_operand (target, mode))
6432 target = gen_reg_rtx (mode);
6433
6434 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6435 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6436 if (target)
6437 return target;
6438 break;
6439
6440 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6441 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6442 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6443 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6444 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6445 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6446 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6447 if (target)
6448 return target;
6449 break;
6450
6451 case BUILT_IN_LOCK_TEST_AND_SET_1:
6452 case BUILT_IN_LOCK_TEST_AND_SET_2:
6453 case BUILT_IN_LOCK_TEST_AND_SET_4:
6454 case BUILT_IN_LOCK_TEST_AND_SET_8:
6455 case BUILT_IN_LOCK_TEST_AND_SET_16:
6456 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6457 target = expand_builtin_lock_test_and_set (mode, exp, target);
6458 if (target)
6459 return target;
6460 break;
6461
6462 case BUILT_IN_LOCK_RELEASE_1:
6463 case BUILT_IN_LOCK_RELEASE_2:
6464 case BUILT_IN_LOCK_RELEASE_4:
6465 case BUILT_IN_LOCK_RELEASE_8:
6466 case BUILT_IN_LOCK_RELEASE_16:
6467 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6468 expand_builtin_lock_release (mode, exp);
6469 return const0_rtx;
6470
6471 case BUILT_IN_SYNCHRONIZE:
6472 expand_builtin_synchronize ();
6473 return const0_rtx;
6474
6475 case BUILT_IN_OBJECT_SIZE:
6476 return expand_builtin_object_size (exp);
6477
6478 case BUILT_IN_MEMCPY_CHK:
6479 case BUILT_IN_MEMPCPY_CHK:
6480 case BUILT_IN_MEMMOVE_CHK:
6481 case BUILT_IN_MEMSET_CHK:
6482 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6483 if (target)
6484 return target;
6485 break;
6486
6487 case BUILT_IN_STRCPY_CHK:
6488 case BUILT_IN_STPCPY_CHK:
6489 case BUILT_IN_STRNCPY_CHK:
6490 case BUILT_IN_STRCAT_CHK:
6491 case BUILT_IN_STRNCAT_CHK:
6492 case BUILT_IN_SNPRINTF_CHK:
6493 case BUILT_IN_VSNPRINTF_CHK:
6494 maybe_emit_chk_warning (exp, fcode);
6495 break;
6496
6497 case BUILT_IN_SPRINTF_CHK:
6498 case BUILT_IN_VSPRINTF_CHK:
6499 maybe_emit_sprintf_chk_warning (exp, fcode);
6500 break;
6501
6502 case BUILT_IN_FREE:
6503 maybe_emit_free_warning (exp);
6504 break;
6505
6506 default: /* just do library call, if unknown builtin */
6507 break;
6508 }
6509
6510 /* The switch statement above can drop through to cause the function
6511 to be called normally. */
6512 return expand_call (exp, target, ignore);
6513 }
6514
6515 /* Determine whether a tree node represents a call to a built-in
6516 function. If the tree T is a call to a built-in function with
6517 the right number of arguments of the appropriate types, return
6518 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6519 Otherwise the return value is END_BUILTINS. */
6520
6521 enum built_in_function
6522 builtin_mathfn_code (const_tree t)
6523 {
6524 const_tree fndecl, arg, parmlist;
6525 const_tree argtype, parmtype;
6526 const_call_expr_arg_iterator iter;
6527
6528 if (TREE_CODE (t) != CALL_EXPR
6529 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6530 return END_BUILTINS;
6531
6532 fndecl = get_callee_fndecl (t);
6533 if (fndecl == NULL_TREE
6534 || TREE_CODE (fndecl) != FUNCTION_DECL
6535 || ! DECL_BUILT_IN (fndecl)
6536 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6537 return END_BUILTINS;
6538
6539 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6540 init_const_call_expr_arg_iterator (t, &iter);
6541 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6542 {
6543 /* If a function doesn't take a variable number of arguments,
6544 the last element in the list will have type `void'. */
6545 parmtype = TREE_VALUE (parmlist);
6546 if (VOID_TYPE_P (parmtype))
6547 {
6548 if (more_const_call_expr_args_p (&iter))
6549 return END_BUILTINS;
6550 return DECL_FUNCTION_CODE (fndecl);
6551 }
6552
6553 if (! more_const_call_expr_args_p (&iter))
6554 return END_BUILTINS;
6555
6556 arg = next_const_call_expr_arg (&iter);
6557 argtype = TREE_TYPE (arg);
6558
6559 if (SCALAR_FLOAT_TYPE_P (parmtype))
6560 {
6561 if (! SCALAR_FLOAT_TYPE_P (argtype))
6562 return END_BUILTINS;
6563 }
6564 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6565 {
6566 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6567 return END_BUILTINS;
6568 }
6569 else if (POINTER_TYPE_P (parmtype))
6570 {
6571 if (! POINTER_TYPE_P (argtype))
6572 return END_BUILTINS;
6573 }
6574 else if (INTEGRAL_TYPE_P (parmtype))
6575 {
6576 if (! INTEGRAL_TYPE_P (argtype))
6577 return END_BUILTINS;
6578 }
6579 else
6580 return END_BUILTINS;
6581 }
6582
6583 /* Variable-length argument list. */
6584 return DECL_FUNCTION_CODE (fndecl);
6585 }
6586
6587 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6588 evaluate to a constant. */
6589
6590 static tree
6591 fold_builtin_constant_p (tree arg)
6592 {
6593 /* We return 1 for a numeric type that's known to be a constant
6594 value at compile-time or for an aggregate type that's a
6595 literal constant. */
6596 STRIP_NOPS (arg);
6597
6598 /* If we know this is a constant, emit the constant of one. */
6599 if (CONSTANT_CLASS_P (arg)
6600 || (TREE_CODE (arg) == CONSTRUCTOR
6601 && TREE_CONSTANT (arg)))
6602 return integer_one_node;
6603 if (TREE_CODE (arg) == ADDR_EXPR)
6604 {
6605 tree op = TREE_OPERAND (arg, 0);
6606 if (TREE_CODE (op) == STRING_CST
6607 || (TREE_CODE (op) == ARRAY_REF
6608 && integer_zerop (TREE_OPERAND (op, 1))
6609 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6610 return integer_one_node;
6611 }
6612
6613 /* If this expression has side effects, show we don't know it to be a
6614 constant. Likewise if it's a pointer or aggregate type since in
6615 those case we only want literals, since those are only optimized
6616 when generating RTL, not later.
6617 And finally, if we are compiling an initializer, not code, we
6618 need to return a definite result now; there's not going to be any
6619 more optimization done. */
6620 if (TREE_SIDE_EFFECTS (arg)
6621 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6622 || POINTER_TYPE_P (TREE_TYPE (arg))
6623 || cfun == 0
6624 || folding_initializer)
6625 return integer_zero_node;
6626
6627 return NULL_TREE;
6628 }
6629
6630 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6631 return it as a truthvalue. */
6632
6633 static tree
6634 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6635 {
6636 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6637
6638 fn = built_in_decls[BUILT_IN_EXPECT];
6639 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6640 ret_type = TREE_TYPE (TREE_TYPE (fn));
6641 pred_type = TREE_VALUE (arg_types);
6642 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6643
6644 pred = fold_convert_loc (loc, pred_type, pred);
6645 expected = fold_convert_loc (loc, expected_type, expected);
6646 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6647
6648 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6649 build_int_cst (ret_type, 0));
6650 }
6651
6652 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6653 NULL_TREE if no simplification is possible. */
6654
6655 static tree
6656 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6657 {
6658 tree inner, fndecl;
6659 enum tree_code code;
6660
6661 /* If this is a builtin_expect within a builtin_expect keep the
6662 inner one. See through a comparison against a constant. It
6663 might have been added to create a thruthvalue. */
6664 inner = arg0;
6665 if (COMPARISON_CLASS_P (inner)
6666 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6667 inner = TREE_OPERAND (inner, 0);
6668
6669 if (TREE_CODE (inner) == CALL_EXPR
6670 && (fndecl = get_callee_fndecl (inner))
6671 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6672 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6673 return arg0;
6674
6675 /* Distribute the expected value over short-circuiting operators.
6676 See through the cast from truthvalue_type_node to long. */
6677 inner = arg0;
6678 while (TREE_CODE (inner) == NOP_EXPR
6679 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6680 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6681 inner = TREE_OPERAND (inner, 0);
6682
6683 code = TREE_CODE (inner);
6684 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6685 {
6686 tree op0 = TREE_OPERAND (inner, 0);
6687 tree op1 = TREE_OPERAND (inner, 1);
6688
6689 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6690 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6691 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6692
6693 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6694 }
6695
6696 /* If the argument isn't invariant then there's nothing else we can do. */
6697 if (!TREE_CONSTANT (arg0))
6698 return NULL_TREE;
6699
6700 /* If we expect that a comparison against the argument will fold to
6701 a constant return the constant. In practice, this means a true
6702 constant or the address of a non-weak symbol. */
6703 inner = arg0;
6704 STRIP_NOPS (inner);
6705 if (TREE_CODE (inner) == ADDR_EXPR)
6706 {
6707 do
6708 {
6709 inner = TREE_OPERAND (inner, 0);
6710 }
6711 while (TREE_CODE (inner) == COMPONENT_REF
6712 || TREE_CODE (inner) == ARRAY_REF);
6713 if ((TREE_CODE (inner) == VAR_DECL
6714 || TREE_CODE (inner) == FUNCTION_DECL)
6715 && DECL_WEAK (inner))
6716 return NULL_TREE;
6717 }
6718
6719 /* Otherwise, ARG0 already has the proper type for the return value. */
6720 return arg0;
6721 }
6722
6723 /* Fold a call to __builtin_classify_type with argument ARG. */
6724
6725 static tree
6726 fold_builtin_classify_type (tree arg)
6727 {
6728 if (arg == 0)
6729 return build_int_cst (NULL_TREE, no_type_class);
6730
6731 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6732 }
6733
6734 /* Fold a call to __builtin_strlen with argument ARG. */
6735
6736 static tree
6737 fold_builtin_strlen (location_t loc, tree type, tree arg)
6738 {
6739 if (!validate_arg (arg, POINTER_TYPE))
6740 return NULL_TREE;
6741 else
6742 {
6743 tree len = c_strlen (arg, 0);
6744
6745 if (len)
6746 return fold_convert_loc (loc, type, len);
6747
6748 return NULL_TREE;
6749 }
6750 }
6751
6752 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6753
6754 static tree
6755 fold_builtin_inf (location_t loc, tree type, int warn)
6756 {
6757 REAL_VALUE_TYPE real;
6758
6759 /* __builtin_inff is intended to be usable to define INFINITY on all
6760 targets. If an infinity is not available, INFINITY expands "to a
6761 positive constant of type float that overflows at translation
6762 time", footnote "In this case, using INFINITY will violate the
6763 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6764 Thus we pedwarn to ensure this constraint violation is
6765 diagnosed. */
6766 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6767 pedwarn (loc, 0, "target format does not support infinity");
6768
6769 real_inf (&real);
6770 return build_real (type, real);
6771 }
6772
6773 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6774
6775 static tree
6776 fold_builtin_nan (tree arg, tree type, int quiet)
6777 {
6778 REAL_VALUE_TYPE real;
6779 const char *str;
6780
6781 if (!validate_arg (arg, POINTER_TYPE))
6782 return NULL_TREE;
6783 str = c_getstr (arg);
6784 if (!str)
6785 return NULL_TREE;
6786
6787 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6788 return NULL_TREE;
6789
6790 return build_real (type, real);
6791 }
6792
6793 /* Return true if the floating point expression T has an integer value.
6794 We also allow +Inf, -Inf and NaN to be considered integer values. */
6795
6796 static bool
6797 integer_valued_real_p (tree t)
6798 {
6799 switch (TREE_CODE (t))
6800 {
6801 case FLOAT_EXPR:
6802 return true;
6803
6804 case ABS_EXPR:
6805 case SAVE_EXPR:
6806 return integer_valued_real_p (TREE_OPERAND (t, 0));
6807
6808 case COMPOUND_EXPR:
6809 case MODIFY_EXPR:
6810 case BIND_EXPR:
6811 return integer_valued_real_p (TREE_OPERAND (t, 1));
6812
6813 case PLUS_EXPR:
6814 case MINUS_EXPR:
6815 case MULT_EXPR:
6816 case MIN_EXPR:
6817 case MAX_EXPR:
6818 return integer_valued_real_p (TREE_OPERAND (t, 0))
6819 && integer_valued_real_p (TREE_OPERAND (t, 1));
6820
6821 case COND_EXPR:
6822 return integer_valued_real_p (TREE_OPERAND (t, 1))
6823 && integer_valued_real_p (TREE_OPERAND (t, 2));
6824
6825 case REAL_CST:
6826 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6827
6828 case NOP_EXPR:
6829 {
6830 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6831 if (TREE_CODE (type) == INTEGER_TYPE)
6832 return true;
6833 if (TREE_CODE (type) == REAL_TYPE)
6834 return integer_valued_real_p (TREE_OPERAND (t, 0));
6835 break;
6836 }
6837
6838 case CALL_EXPR:
6839 switch (builtin_mathfn_code (t))
6840 {
6841 CASE_FLT_FN (BUILT_IN_CEIL):
6842 CASE_FLT_FN (BUILT_IN_FLOOR):
6843 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6844 CASE_FLT_FN (BUILT_IN_RINT):
6845 CASE_FLT_FN (BUILT_IN_ROUND):
6846 CASE_FLT_FN (BUILT_IN_TRUNC):
6847 return true;
6848
6849 CASE_FLT_FN (BUILT_IN_FMIN):
6850 CASE_FLT_FN (BUILT_IN_FMAX):
6851 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6852 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6853
6854 default:
6855 break;
6856 }
6857 break;
6858
6859 default:
6860 break;
6861 }
6862 return false;
6863 }
6864
6865 /* FNDECL is assumed to be a builtin where truncation can be propagated
6866 across (for instance floor((double)f) == (double)floorf (f).
6867 Do the transformation for a call with argument ARG. */
6868
6869 static tree
6870 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6871 {
6872 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6873
6874 if (!validate_arg (arg, REAL_TYPE))
6875 return NULL_TREE;
6876
6877 /* Integer rounding functions are idempotent. */
6878 if (fcode == builtin_mathfn_code (arg))
6879 return arg;
6880
6881 /* If argument is already integer valued, and we don't need to worry
6882 about setting errno, there's no need to perform rounding. */
6883 if (! flag_errno_math && integer_valued_real_p (arg))
6884 return arg;
6885
6886 if (optimize)
6887 {
6888 tree arg0 = strip_float_extensions (arg);
6889 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6890 tree newtype = TREE_TYPE (arg0);
6891 tree decl;
6892
6893 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6894 && (decl = mathfn_built_in (newtype, fcode)))
6895 return fold_convert_loc (loc, ftype,
6896 build_call_expr_loc (loc, decl, 1,
6897 fold_convert_loc (loc,
6898 newtype,
6899 arg0)));
6900 }
6901 return NULL_TREE;
6902 }
6903
6904 /* FNDECL is assumed to be builtin which can narrow the FP type of
6905 the argument, for instance lround((double)f) -> lroundf (f).
6906 Do the transformation for a call with argument ARG. */
6907
6908 static tree
6909 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6910 {
6911 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6912
6913 if (!validate_arg (arg, REAL_TYPE))
6914 return NULL_TREE;
6915
6916 /* If argument is already integer valued, and we don't need to worry
6917 about setting errno, there's no need to perform rounding. */
6918 if (! flag_errno_math && integer_valued_real_p (arg))
6919 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6920 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6921
6922 if (optimize)
6923 {
6924 tree ftype = TREE_TYPE (arg);
6925 tree arg0 = strip_float_extensions (arg);
6926 tree newtype = TREE_TYPE (arg0);
6927 tree decl;
6928
6929 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6930 && (decl = mathfn_built_in (newtype, fcode)))
6931 return build_call_expr_loc (loc, decl, 1,
6932 fold_convert_loc (loc, newtype, arg0));
6933 }
6934
6935 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6936 sizeof (long long) == sizeof (long). */
6937 if (TYPE_PRECISION (long_long_integer_type_node)
6938 == TYPE_PRECISION (long_integer_type_node))
6939 {
6940 tree newfn = NULL_TREE;
6941 switch (fcode)
6942 {
6943 CASE_FLT_FN (BUILT_IN_LLCEIL):
6944 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6945 break;
6946
6947 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6948 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6949 break;
6950
6951 CASE_FLT_FN (BUILT_IN_LLROUND):
6952 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6953 break;
6954
6955 CASE_FLT_FN (BUILT_IN_LLRINT):
6956 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6957 break;
6958
6959 default:
6960 break;
6961 }
6962
6963 if (newfn)
6964 {
6965 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6966 return fold_convert_loc (loc,
6967 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6968 }
6969 }
6970
6971 return NULL_TREE;
6972 }
6973
6974 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6975 return type. Return NULL_TREE if no simplification can be made. */
6976
6977 static tree
6978 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6979 {
6980 tree res;
6981
6982 if (!validate_arg (arg, COMPLEX_TYPE)
6983 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6984 return NULL_TREE;
6985
6986 /* Calculate the result when the argument is a constant. */
6987 if (TREE_CODE (arg) == COMPLEX_CST
6988 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6989 type, mpfr_hypot)))
6990 return res;
6991
6992 if (TREE_CODE (arg) == COMPLEX_EXPR)
6993 {
6994 tree real = TREE_OPERAND (arg, 0);
6995 tree imag = TREE_OPERAND (arg, 1);
6996
6997 /* If either part is zero, cabs is fabs of the other. */
6998 if (real_zerop (real))
6999 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7000 if (real_zerop (imag))
7001 return fold_build1_loc (loc, ABS_EXPR, type, real);
7002
7003 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7004 if (flag_unsafe_math_optimizations
7005 && operand_equal_p (real, imag, OEP_PURE_SAME))
7006 {
7007 const REAL_VALUE_TYPE sqrt2_trunc
7008 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7009 STRIP_NOPS (real);
7010 return fold_build2_loc (loc, MULT_EXPR, type,
7011 fold_build1_loc (loc, ABS_EXPR, type, real),
7012 build_real (type, sqrt2_trunc));
7013 }
7014 }
7015
7016 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7017 if (TREE_CODE (arg) == NEGATE_EXPR
7018 || TREE_CODE (arg) == CONJ_EXPR)
7019 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7020
7021 /* Don't do this when optimizing for size. */
7022 if (flag_unsafe_math_optimizations
7023 && optimize && optimize_function_for_speed_p (cfun))
7024 {
7025 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7026
7027 if (sqrtfn != NULL_TREE)
7028 {
7029 tree rpart, ipart, result;
7030
7031 arg = builtin_save_expr (arg);
7032
7033 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7034 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7035
7036 rpart = builtin_save_expr (rpart);
7037 ipart = builtin_save_expr (ipart);
7038
7039 result = fold_build2_loc (loc, PLUS_EXPR, type,
7040 fold_build2_loc (loc, MULT_EXPR, type,
7041 rpart, rpart),
7042 fold_build2_loc (loc, MULT_EXPR, type,
7043 ipart, ipart));
7044
7045 return build_call_expr_loc (loc, sqrtfn, 1, result);
7046 }
7047 }
7048
7049 return NULL_TREE;
7050 }
7051
7052 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7053 complex tree type of the result. If NEG is true, the imaginary
7054 zero is negative. */
7055
7056 static tree
7057 build_complex_cproj (tree type, bool neg)
7058 {
7059 REAL_VALUE_TYPE rinf, rzero = dconst0;
7060
7061 real_inf (&rinf);
7062 rzero.sign = neg;
7063 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7064 build_real (TREE_TYPE (type), rzero));
7065 }
7066
7067 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7068 return type. Return NULL_TREE if no simplification can be made. */
7069
7070 static tree
7071 fold_builtin_cproj (location_t loc, tree arg, tree type)
7072 {
7073 if (!validate_arg (arg, COMPLEX_TYPE)
7074 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7075 return NULL_TREE;
7076
7077 /* If there are no infinities, return arg. */
7078 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7079 return non_lvalue_loc (loc, arg);
7080
7081 /* Calculate the result when the argument is a constant. */
7082 if (TREE_CODE (arg) == COMPLEX_CST)
7083 {
7084 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7085 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7086
7087 if (real_isinf (real) || real_isinf (imag))
7088 return build_complex_cproj (type, imag->sign);
7089 else
7090 return arg;
7091 }
7092 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7093 {
7094 tree real = TREE_OPERAND (arg, 0);
7095 tree imag = TREE_OPERAND (arg, 1);
7096
7097 STRIP_NOPS (real);
7098 STRIP_NOPS (imag);
7099
7100 /* If the real part is inf and the imag part is known to be
7101 nonnegative, return (inf + 0i). Remember side-effects are
7102 possible in the imag part. */
7103 if (TREE_CODE (real) == REAL_CST
7104 && real_isinf (TREE_REAL_CST_PTR (real))
7105 && tree_expr_nonnegative_p (imag))
7106 return omit_one_operand_loc (loc, type,
7107 build_complex_cproj (type, false),
7108 arg);
7109
7110 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7111 Remember side-effects are possible in the real part. */
7112 if (TREE_CODE (imag) == REAL_CST
7113 && real_isinf (TREE_REAL_CST_PTR (imag)))
7114 return
7115 omit_one_operand_loc (loc, type,
7116 build_complex_cproj (type, TREE_REAL_CST_PTR
7117 (imag)->sign), arg);
7118 }
7119
7120 return NULL_TREE;
7121 }
7122
7123 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7124 Return NULL_TREE if no simplification can be made. */
7125
7126 static tree
7127 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7128 {
7129
7130 enum built_in_function fcode;
7131 tree res;
7132
7133 if (!validate_arg (arg, REAL_TYPE))
7134 return NULL_TREE;
7135
7136 /* Calculate the result when the argument is a constant. */
7137 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7138 return res;
7139
7140 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7141 fcode = builtin_mathfn_code (arg);
7142 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7143 {
7144 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7145 arg = fold_build2_loc (loc, MULT_EXPR, type,
7146 CALL_EXPR_ARG (arg, 0),
7147 build_real (type, dconsthalf));
7148 return build_call_expr_loc (loc, expfn, 1, arg);
7149 }
7150
7151 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7152 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7153 {
7154 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7155
7156 if (powfn)
7157 {
7158 tree arg0 = CALL_EXPR_ARG (arg, 0);
7159 tree tree_root;
7160 /* The inner root was either sqrt or cbrt. */
7161 /* This was a conditional expression but it triggered a bug
7162 in Sun C 5.5. */
7163 REAL_VALUE_TYPE dconstroot;
7164 if (BUILTIN_SQRT_P (fcode))
7165 dconstroot = dconsthalf;
7166 else
7167 dconstroot = dconst_third ();
7168
7169 /* Adjust for the outer root. */
7170 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7171 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7172 tree_root = build_real (type, dconstroot);
7173 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7174 }
7175 }
7176
7177 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7178 if (flag_unsafe_math_optimizations
7179 && (fcode == BUILT_IN_POW
7180 || fcode == BUILT_IN_POWF
7181 || fcode == BUILT_IN_POWL))
7182 {
7183 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7184 tree arg0 = CALL_EXPR_ARG (arg, 0);
7185 tree arg1 = CALL_EXPR_ARG (arg, 1);
7186 tree narg1;
7187 if (!tree_expr_nonnegative_p (arg0))
7188 arg0 = build1 (ABS_EXPR, type, arg0);
7189 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7190 build_real (type, dconsthalf));
7191 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7192 }
7193
7194 return NULL_TREE;
7195 }
7196
7197 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7198 Return NULL_TREE if no simplification can be made. */
7199
7200 static tree
7201 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7202 {
7203 const enum built_in_function fcode = builtin_mathfn_code (arg);
7204 tree res;
7205
7206 if (!validate_arg (arg, REAL_TYPE))
7207 return NULL_TREE;
7208
7209 /* Calculate the result when the argument is a constant. */
7210 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7211 return res;
7212
7213 if (flag_unsafe_math_optimizations)
7214 {
7215 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7216 if (BUILTIN_EXPONENT_P (fcode))
7217 {
7218 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7219 const REAL_VALUE_TYPE third_trunc =
7220 real_value_truncate (TYPE_MODE (type), dconst_third ());
7221 arg = fold_build2_loc (loc, MULT_EXPR, type,
7222 CALL_EXPR_ARG (arg, 0),
7223 build_real (type, third_trunc));
7224 return build_call_expr_loc (loc, expfn, 1, arg);
7225 }
7226
7227 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7228 if (BUILTIN_SQRT_P (fcode))
7229 {
7230 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7231
7232 if (powfn)
7233 {
7234 tree arg0 = CALL_EXPR_ARG (arg, 0);
7235 tree tree_root;
7236 REAL_VALUE_TYPE dconstroot = dconst_third ();
7237
7238 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7239 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7240 tree_root = build_real (type, dconstroot);
7241 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7242 }
7243 }
7244
7245 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7246 if (BUILTIN_CBRT_P (fcode))
7247 {
7248 tree arg0 = CALL_EXPR_ARG (arg, 0);
7249 if (tree_expr_nonnegative_p (arg0))
7250 {
7251 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7252
7253 if (powfn)
7254 {
7255 tree tree_root;
7256 REAL_VALUE_TYPE dconstroot;
7257
7258 real_arithmetic (&dconstroot, MULT_EXPR,
7259 dconst_third_ptr (), dconst_third_ptr ());
7260 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7261 tree_root = build_real (type, dconstroot);
7262 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7263 }
7264 }
7265 }
7266
7267 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7268 if (fcode == BUILT_IN_POW
7269 || fcode == BUILT_IN_POWF
7270 || fcode == BUILT_IN_POWL)
7271 {
7272 tree arg00 = CALL_EXPR_ARG (arg, 0);
7273 tree arg01 = CALL_EXPR_ARG (arg, 1);
7274 if (tree_expr_nonnegative_p (arg00))
7275 {
7276 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7277 const REAL_VALUE_TYPE dconstroot
7278 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7279 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7280 build_real (type, dconstroot));
7281 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7282 }
7283 }
7284 }
7285 return NULL_TREE;
7286 }
7287
7288 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7289 TYPE is the type of the return value. Return NULL_TREE if no
7290 simplification can be made. */
7291
7292 static tree
7293 fold_builtin_cos (location_t loc,
7294 tree arg, tree type, tree fndecl)
7295 {
7296 tree res, narg;
7297
7298 if (!validate_arg (arg, REAL_TYPE))
7299 return NULL_TREE;
7300
7301 /* Calculate the result when the argument is a constant. */
7302 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7303 return res;
7304
7305 /* Optimize cos(-x) into cos (x). */
7306 if ((narg = fold_strip_sign_ops (arg)))
7307 return build_call_expr_loc (loc, fndecl, 1, narg);
7308
7309 return NULL_TREE;
7310 }
7311
7312 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7313 Return NULL_TREE if no simplification can be made. */
7314
7315 static tree
7316 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7317 {
7318 if (validate_arg (arg, REAL_TYPE))
7319 {
7320 tree res, narg;
7321
7322 /* Calculate the result when the argument is a constant. */
7323 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7324 return res;
7325
7326 /* Optimize cosh(-x) into cosh (x). */
7327 if ((narg = fold_strip_sign_ops (arg)))
7328 return build_call_expr_loc (loc, fndecl, 1, narg);
7329 }
7330
7331 return NULL_TREE;
7332 }
7333
7334 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7335 argument ARG. TYPE is the type of the return value. Return
7336 NULL_TREE if no simplification can be made. */
7337
7338 static tree
7339 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7340 bool hyper)
7341 {
7342 if (validate_arg (arg, COMPLEX_TYPE)
7343 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7344 {
7345 tree tmp;
7346
7347 /* Calculate the result when the argument is a constant. */
7348 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7349 return tmp;
7350
7351 /* Optimize fn(-x) into fn(x). */
7352 if ((tmp = fold_strip_sign_ops (arg)))
7353 return build_call_expr_loc (loc, fndecl, 1, tmp);
7354 }
7355
7356 return NULL_TREE;
7357 }
7358
7359 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7360 Return NULL_TREE if no simplification can be made. */
7361
7362 static tree
7363 fold_builtin_tan (tree arg, tree type)
7364 {
7365 enum built_in_function fcode;
7366 tree res;
7367
7368 if (!validate_arg (arg, REAL_TYPE))
7369 return NULL_TREE;
7370
7371 /* Calculate the result when the argument is a constant. */
7372 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7373 return res;
7374
7375 /* Optimize tan(atan(x)) = x. */
7376 fcode = builtin_mathfn_code (arg);
7377 if (flag_unsafe_math_optimizations
7378 && (fcode == BUILT_IN_ATAN
7379 || fcode == BUILT_IN_ATANF
7380 || fcode == BUILT_IN_ATANL))
7381 return CALL_EXPR_ARG (arg, 0);
7382
7383 return NULL_TREE;
7384 }
7385
7386 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7387 NULL_TREE if no simplification can be made. */
7388
7389 static tree
7390 fold_builtin_sincos (location_t loc,
7391 tree arg0, tree arg1, tree arg2)
7392 {
7393 tree type;
7394 tree res, fn, call;
7395
7396 if (!validate_arg (arg0, REAL_TYPE)
7397 || !validate_arg (arg1, POINTER_TYPE)
7398 || !validate_arg (arg2, POINTER_TYPE))
7399 return NULL_TREE;
7400
7401 type = TREE_TYPE (arg0);
7402
7403 /* Calculate the result when the argument is a constant. */
7404 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7405 return res;
7406
7407 /* Canonicalize sincos to cexpi. */
7408 if (!TARGET_C99_FUNCTIONS)
7409 return NULL_TREE;
7410 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7411 if (!fn)
7412 return NULL_TREE;
7413
7414 call = build_call_expr_loc (loc, fn, 1, arg0);
7415 call = builtin_save_expr (call);
7416
7417 return build2 (COMPOUND_EXPR, void_type_node,
7418 build2 (MODIFY_EXPR, void_type_node,
7419 build_fold_indirect_ref_loc (loc, arg1),
7420 build1 (IMAGPART_EXPR, type, call)),
7421 build2 (MODIFY_EXPR, void_type_node,
7422 build_fold_indirect_ref_loc (loc, arg2),
7423 build1 (REALPART_EXPR, type, call)));
7424 }
7425
7426 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7427 NULL_TREE if no simplification can be made. */
7428
7429 static tree
7430 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7431 {
7432 tree rtype;
7433 tree realp, imagp, ifn;
7434 tree res;
7435
7436 if (!validate_arg (arg0, COMPLEX_TYPE)
7437 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7438 return NULL_TREE;
7439
7440 /* Calculate the result when the argument is a constant. */
7441 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7442 return res;
7443
7444 rtype = TREE_TYPE (TREE_TYPE (arg0));
7445
7446 /* In case we can figure out the real part of arg0 and it is constant zero
7447 fold to cexpi. */
7448 if (!TARGET_C99_FUNCTIONS)
7449 return NULL_TREE;
7450 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7451 if (!ifn)
7452 return NULL_TREE;
7453
7454 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7455 && real_zerop (realp))
7456 {
7457 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7458 return build_call_expr_loc (loc, ifn, 1, narg);
7459 }
7460
7461 /* In case we can easily decompose real and imaginary parts split cexp
7462 to exp (r) * cexpi (i). */
7463 if (flag_unsafe_math_optimizations
7464 && realp)
7465 {
7466 tree rfn, rcall, icall;
7467
7468 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7469 if (!rfn)
7470 return NULL_TREE;
7471
7472 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7473 if (!imagp)
7474 return NULL_TREE;
7475
7476 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7477 icall = builtin_save_expr (icall);
7478 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7479 rcall = builtin_save_expr (rcall);
7480 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7481 fold_build2_loc (loc, MULT_EXPR, rtype,
7482 rcall,
7483 fold_build1_loc (loc, REALPART_EXPR,
7484 rtype, icall)),
7485 fold_build2_loc (loc, MULT_EXPR, rtype,
7486 rcall,
7487 fold_build1_loc (loc, IMAGPART_EXPR,
7488 rtype, icall)));
7489 }
7490
7491 return NULL_TREE;
7492 }
7493
7494 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7495 Return NULL_TREE if no simplification can be made. */
7496
7497 static tree
7498 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7499 {
7500 if (!validate_arg (arg, REAL_TYPE))
7501 return NULL_TREE;
7502
7503 /* Optimize trunc of constant value. */
7504 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7505 {
7506 REAL_VALUE_TYPE r, x;
7507 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7508
7509 x = TREE_REAL_CST (arg);
7510 real_trunc (&r, TYPE_MODE (type), &x);
7511 return build_real (type, r);
7512 }
7513
7514 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7515 }
7516
7517 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7518 Return NULL_TREE if no simplification can be made. */
7519
7520 static tree
7521 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7522 {
7523 if (!validate_arg (arg, REAL_TYPE))
7524 return NULL_TREE;
7525
7526 /* Optimize floor of constant value. */
7527 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7528 {
7529 REAL_VALUE_TYPE x;
7530
7531 x = TREE_REAL_CST (arg);
7532 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7533 {
7534 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7535 REAL_VALUE_TYPE r;
7536
7537 real_floor (&r, TYPE_MODE (type), &x);
7538 return build_real (type, r);
7539 }
7540 }
7541
7542 /* Fold floor (x) where x is nonnegative to trunc (x). */
7543 if (tree_expr_nonnegative_p (arg))
7544 {
7545 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7546 if (truncfn)
7547 return build_call_expr_loc (loc, truncfn, 1, arg);
7548 }
7549
7550 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7551 }
7552
7553 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7554 Return NULL_TREE if no simplification can be made. */
7555
7556 static tree
7557 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7558 {
7559 if (!validate_arg (arg, REAL_TYPE))
7560 return NULL_TREE;
7561
7562 /* Optimize ceil of constant value. */
7563 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7564 {
7565 REAL_VALUE_TYPE x;
7566
7567 x = TREE_REAL_CST (arg);
7568 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7569 {
7570 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7571 REAL_VALUE_TYPE r;
7572
7573 real_ceil (&r, TYPE_MODE (type), &x);
7574 return build_real (type, r);
7575 }
7576 }
7577
7578 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7579 }
7580
7581 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7582 Return NULL_TREE if no simplification can be made. */
7583
7584 static tree
7585 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7586 {
7587 if (!validate_arg (arg, REAL_TYPE))
7588 return NULL_TREE;
7589
7590 /* Optimize round of constant value. */
7591 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7592 {
7593 REAL_VALUE_TYPE x;
7594
7595 x = TREE_REAL_CST (arg);
7596 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7597 {
7598 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7599 REAL_VALUE_TYPE r;
7600
7601 real_round (&r, TYPE_MODE (type), &x);
7602 return build_real (type, r);
7603 }
7604 }
7605
7606 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7607 }
7608
7609 /* Fold function call to builtin lround, lroundf or lroundl (or the
7610 corresponding long long versions) and other rounding functions. ARG
7611 is the argument to the call. Return NULL_TREE if no simplification
7612 can be made. */
7613
7614 static tree
7615 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7616 {
7617 if (!validate_arg (arg, REAL_TYPE))
7618 return NULL_TREE;
7619
7620 /* Optimize lround of constant value. */
7621 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7622 {
7623 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7624
7625 if (real_isfinite (&x))
7626 {
7627 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7628 tree ftype = TREE_TYPE (arg);
7629 double_int val;
7630 REAL_VALUE_TYPE r;
7631
7632 switch (DECL_FUNCTION_CODE (fndecl))
7633 {
7634 CASE_FLT_FN (BUILT_IN_LFLOOR):
7635 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7636 real_floor (&r, TYPE_MODE (ftype), &x);
7637 break;
7638
7639 CASE_FLT_FN (BUILT_IN_LCEIL):
7640 CASE_FLT_FN (BUILT_IN_LLCEIL):
7641 real_ceil (&r, TYPE_MODE (ftype), &x);
7642 break;
7643
7644 CASE_FLT_FN (BUILT_IN_LROUND):
7645 CASE_FLT_FN (BUILT_IN_LLROUND):
7646 real_round (&r, TYPE_MODE (ftype), &x);
7647 break;
7648
7649 default:
7650 gcc_unreachable ();
7651 }
7652
7653 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7654 if (double_int_fits_to_tree_p (itype, val))
7655 return double_int_to_tree (itype, val);
7656 }
7657 }
7658
7659 switch (DECL_FUNCTION_CODE (fndecl))
7660 {
7661 CASE_FLT_FN (BUILT_IN_LFLOOR):
7662 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7663 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7664 if (tree_expr_nonnegative_p (arg))
7665 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7666 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7667 break;
7668 default:;
7669 }
7670
7671 return fold_fixed_mathfn (loc, fndecl, arg);
7672 }
7673
7674 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7675 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7676 the argument to the call. Return NULL_TREE if no simplification can
7677 be made. */
7678
7679 static tree
7680 fold_builtin_bitop (tree fndecl, tree arg)
7681 {
7682 if (!validate_arg (arg, INTEGER_TYPE))
7683 return NULL_TREE;
7684
7685 /* Optimize for constant argument. */
7686 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7687 {
7688 HOST_WIDE_INT hi, width, result;
7689 unsigned HOST_WIDE_INT lo;
7690 tree type;
7691
7692 type = TREE_TYPE (arg);
7693 width = TYPE_PRECISION (type);
7694 lo = TREE_INT_CST_LOW (arg);
7695
7696 /* Clear all the bits that are beyond the type's precision. */
7697 if (width > HOST_BITS_PER_WIDE_INT)
7698 {
7699 hi = TREE_INT_CST_HIGH (arg);
7700 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7701 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7702 }
7703 else
7704 {
7705 hi = 0;
7706 if (width < HOST_BITS_PER_WIDE_INT)
7707 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7708 }
7709
7710 switch (DECL_FUNCTION_CODE (fndecl))
7711 {
7712 CASE_INT_FN (BUILT_IN_FFS):
7713 if (lo != 0)
7714 result = exact_log2 (lo & -lo) + 1;
7715 else if (hi != 0)
7716 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7717 else
7718 result = 0;
7719 break;
7720
7721 CASE_INT_FN (BUILT_IN_CLZ):
7722 if (hi != 0)
7723 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7724 else if (lo != 0)
7725 result = width - floor_log2 (lo) - 1;
7726 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7727 result = width;
7728 break;
7729
7730 CASE_INT_FN (BUILT_IN_CTZ):
7731 if (lo != 0)
7732 result = exact_log2 (lo & -lo);
7733 else if (hi != 0)
7734 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7735 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7736 result = width;
7737 break;
7738
7739 CASE_INT_FN (BUILT_IN_POPCOUNT):
7740 result = 0;
7741 while (lo)
7742 result++, lo &= lo - 1;
7743 while (hi)
7744 result++, hi &= hi - 1;
7745 break;
7746
7747 CASE_INT_FN (BUILT_IN_PARITY):
7748 result = 0;
7749 while (lo)
7750 result++, lo &= lo - 1;
7751 while (hi)
7752 result++, hi &= hi - 1;
7753 result &= 1;
7754 break;
7755
7756 default:
7757 gcc_unreachable ();
7758 }
7759
7760 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7761 }
7762
7763 return NULL_TREE;
7764 }
7765
7766 /* Fold function call to builtin_bswap and the long and long long
7767 variants. Return NULL_TREE if no simplification can be made. */
7768 static tree
7769 fold_builtin_bswap (tree fndecl, tree arg)
7770 {
7771 if (! validate_arg (arg, INTEGER_TYPE))
7772 return NULL_TREE;
7773
7774 /* Optimize constant value. */
7775 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7776 {
7777 HOST_WIDE_INT hi, width, r_hi = 0;
7778 unsigned HOST_WIDE_INT lo, r_lo = 0;
7779 tree type;
7780
7781 type = TREE_TYPE (arg);
7782 width = TYPE_PRECISION (type);
7783 lo = TREE_INT_CST_LOW (arg);
7784 hi = TREE_INT_CST_HIGH (arg);
7785
7786 switch (DECL_FUNCTION_CODE (fndecl))
7787 {
7788 case BUILT_IN_BSWAP32:
7789 case BUILT_IN_BSWAP64:
7790 {
7791 int s;
7792
7793 for (s = 0; s < width; s += 8)
7794 {
7795 int d = width - s - 8;
7796 unsigned HOST_WIDE_INT byte;
7797
7798 if (s < HOST_BITS_PER_WIDE_INT)
7799 byte = (lo >> s) & 0xff;
7800 else
7801 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7802
7803 if (d < HOST_BITS_PER_WIDE_INT)
7804 r_lo |= byte << d;
7805 else
7806 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7807 }
7808 }
7809
7810 break;
7811
7812 default:
7813 gcc_unreachable ();
7814 }
7815
7816 if (width < HOST_BITS_PER_WIDE_INT)
7817 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7818 else
7819 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7820 }
7821
7822 return NULL_TREE;
7823 }
7824
7825 /* A subroutine of fold_builtin to fold the various logarithmic
7826 functions. Return NULL_TREE if no simplification can me made.
7827 FUNC is the corresponding MPFR logarithm function. */
7828
7829 static tree
7830 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7831 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7832 {
7833 if (validate_arg (arg, REAL_TYPE))
7834 {
7835 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7836 tree res;
7837 const enum built_in_function fcode = builtin_mathfn_code (arg);
7838
7839 /* Calculate the result when the argument is a constant. */
7840 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7841 return res;
7842
7843 /* Special case, optimize logN(expN(x)) = x. */
7844 if (flag_unsafe_math_optimizations
7845 && ((func == mpfr_log
7846 && (fcode == BUILT_IN_EXP
7847 || fcode == BUILT_IN_EXPF
7848 || fcode == BUILT_IN_EXPL))
7849 || (func == mpfr_log2
7850 && (fcode == BUILT_IN_EXP2
7851 || fcode == BUILT_IN_EXP2F
7852 || fcode == BUILT_IN_EXP2L))
7853 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7854 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7855
7856 /* Optimize logN(func()) for various exponential functions. We
7857 want to determine the value "x" and the power "exponent" in
7858 order to transform logN(x**exponent) into exponent*logN(x). */
7859 if (flag_unsafe_math_optimizations)
7860 {
7861 tree exponent = 0, x = 0;
7862
7863 switch (fcode)
7864 {
7865 CASE_FLT_FN (BUILT_IN_EXP):
7866 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7867 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7868 dconst_e ()));
7869 exponent = CALL_EXPR_ARG (arg, 0);
7870 break;
7871 CASE_FLT_FN (BUILT_IN_EXP2):
7872 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7873 x = build_real (type, dconst2);
7874 exponent = CALL_EXPR_ARG (arg, 0);
7875 break;
7876 CASE_FLT_FN (BUILT_IN_EXP10):
7877 CASE_FLT_FN (BUILT_IN_POW10):
7878 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7879 {
7880 REAL_VALUE_TYPE dconst10;
7881 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7882 x = build_real (type, dconst10);
7883 }
7884 exponent = CALL_EXPR_ARG (arg, 0);
7885 break;
7886 CASE_FLT_FN (BUILT_IN_SQRT):
7887 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7888 x = CALL_EXPR_ARG (arg, 0);
7889 exponent = build_real (type, dconsthalf);
7890 break;
7891 CASE_FLT_FN (BUILT_IN_CBRT):
7892 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7893 x = CALL_EXPR_ARG (arg, 0);
7894 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7895 dconst_third ()));
7896 break;
7897 CASE_FLT_FN (BUILT_IN_POW):
7898 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7899 x = CALL_EXPR_ARG (arg, 0);
7900 exponent = CALL_EXPR_ARG (arg, 1);
7901 break;
7902 default:
7903 break;
7904 }
7905
7906 /* Now perform the optimization. */
7907 if (x && exponent)
7908 {
7909 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7910 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7911 }
7912 }
7913 }
7914
7915 return NULL_TREE;
7916 }
7917
7918 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7919 NULL_TREE if no simplification can be made. */
7920
7921 static tree
7922 fold_builtin_hypot (location_t loc, tree fndecl,
7923 tree arg0, tree arg1, tree type)
7924 {
7925 tree res, narg0, narg1;
7926
7927 if (!validate_arg (arg0, REAL_TYPE)
7928 || !validate_arg (arg1, REAL_TYPE))
7929 return NULL_TREE;
7930
7931 /* Calculate the result when the argument is a constant. */
7932 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7933 return res;
7934
7935 /* If either argument to hypot has a negate or abs, strip that off.
7936 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7937 narg0 = fold_strip_sign_ops (arg0);
7938 narg1 = fold_strip_sign_ops (arg1);
7939 if (narg0 || narg1)
7940 {
7941 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7942 narg1 ? narg1 : arg1);
7943 }
7944
7945 /* If either argument is zero, hypot is fabs of the other. */
7946 if (real_zerop (arg0))
7947 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7948 else if (real_zerop (arg1))
7949 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7950
7951 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7952 if (flag_unsafe_math_optimizations
7953 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7954 {
7955 const REAL_VALUE_TYPE sqrt2_trunc
7956 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7957 return fold_build2_loc (loc, MULT_EXPR, type,
7958 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7959 build_real (type, sqrt2_trunc));
7960 }
7961
7962 return NULL_TREE;
7963 }
7964
7965
7966 /* Fold a builtin function call to pow, powf, or powl. Return
7967 NULL_TREE if no simplification can be made. */
7968 static tree
7969 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7970 {
7971 tree res;
7972
7973 if (!validate_arg (arg0, REAL_TYPE)
7974 || !validate_arg (arg1, REAL_TYPE))
7975 return NULL_TREE;
7976
7977 /* Calculate the result when the argument is a constant. */
7978 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7979 return res;
7980
7981 /* Optimize pow(1.0,y) = 1.0. */
7982 if (real_onep (arg0))
7983 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7984
7985 if (TREE_CODE (arg1) == REAL_CST
7986 && !TREE_OVERFLOW (arg1))
7987 {
7988 REAL_VALUE_TYPE cint;
7989 REAL_VALUE_TYPE c;
7990 HOST_WIDE_INT n;
7991
7992 c = TREE_REAL_CST (arg1);
7993
7994 /* Optimize pow(x,0.0) = 1.0. */
7995 if (REAL_VALUES_EQUAL (c, dconst0))
7996 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7997 arg0);
7998
7999 /* Optimize pow(x,1.0) = x. */
8000 if (REAL_VALUES_EQUAL (c, dconst1))
8001 return arg0;
8002
8003 /* Optimize pow(x,-1.0) = 1.0/x. */
8004 if (REAL_VALUES_EQUAL (c, dconstm1))
8005 return fold_build2_loc (loc, RDIV_EXPR, type,
8006 build_real (type, dconst1), arg0);
8007
8008 /* Optimize pow(x,0.5) = sqrt(x). */
8009 if (flag_unsafe_math_optimizations
8010 && REAL_VALUES_EQUAL (c, dconsthalf))
8011 {
8012 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8013
8014 if (sqrtfn != NULL_TREE)
8015 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8016 }
8017
8018 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8019 if (flag_unsafe_math_optimizations)
8020 {
8021 const REAL_VALUE_TYPE dconstroot
8022 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8023
8024 if (REAL_VALUES_EQUAL (c, dconstroot))
8025 {
8026 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8027 if (cbrtfn != NULL_TREE)
8028 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8029 }
8030 }
8031
8032 /* Check for an integer exponent. */
8033 n = real_to_integer (&c);
8034 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8035 if (real_identical (&c, &cint))
8036 {
8037 /* Attempt to evaluate pow at compile-time, unless this should
8038 raise an exception. */
8039 if (TREE_CODE (arg0) == REAL_CST
8040 && !TREE_OVERFLOW (arg0)
8041 && (n > 0
8042 || (!flag_trapping_math && !flag_errno_math)
8043 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8044 {
8045 REAL_VALUE_TYPE x;
8046 bool inexact;
8047
8048 x = TREE_REAL_CST (arg0);
8049 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8050 if (flag_unsafe_math_optimizations || !inexact)
8051 return build_real (type, x);
8052 }
8053
8054 /* Strip sign ops from even integer powers. */
8055 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8056 {
8057 tree narg0 = fold_strip_sign_ops (arg0);
8058 if (narg0)
8059 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8060 }
8061 }
8062 }
8063
8064 if (flag_unsafe_math_optimizations)
8065 {
8066 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8067
8068 /* Optimize pow(expN(x),y) = expN(x*y). */
8069 if (BUILTIN_EXPONENT_P (fcode))
8070 {
8071 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8072 tree arg = CALL_EXPR_ARG (arg0, 0);
8073 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8074 return build_call_expr_loc (loc, expfn, 1, arg);
8075 }
8076
8077 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8078 if (BUILTIN_SQRT_P (fcode))
8079 {
8080 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8081 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8082 build_real (type, dconsthalf));
8083 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8084 }
8085
8086 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8087 if (BUILTIN_CBRT_P (fcode))
8088 {
8089 tree arg = CALL_EXPR_ARG (arg0, 0);
8090 if (tree_expr_nonnegative_p (arg))
8091 {
8092 const REAL_VALUE_TYPE dconstroot
8093 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8094 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8095 build_real (type, dconstroot));
8096 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8097 }
8098 }
8099
8100 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8101 if (fcode == BUILT_IN_POW
8102 || fcode == BUILT_IN_POWF
8103 || fcode == BUILT_IN_POWL)
8104 {
8105 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8106 if (tree_expr_nonnegative_p (arg00))
8107 {
8108 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8109 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8110 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8111 }
8112 }
8113 }
8114
8115 return NULL_TREE;
8116 }
8117
8118 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8119 Return NULL_TREE if no simplification can be made. */
8120 static tree
8121 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8122 tree arg0, tree arg1, tree type)
8123 {
8124 if (!validate_arg (arg0, REAL_TYPE)
8125 || !validate_arg (arg1, INTEGER_TYPE))
8126 return NULL_TREE;
8127
8128 /* Optimize pow(1.0,y) = 1.0. */
8129 if (real_onep (arg0))
8130 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8131
8132 if (host_integerp (arg1, 0))
8133 {
8134 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8135
8136 /* Evaluate powi at compile-time. */
8137 if (TREE_CODE (arg0) == REAL_CST
8138 && !TREE_OVERFLOW (arg0))
8139 {
8140 REAL_VALUE_TYPE x;
8141 x = TREE_REAL_CST (arg0);
8142 real_powi (&x, TYPE_MODE (type), &x, c);
8143 return build_real (type, x);
8144 }
8145
8146 /* Optimize pow(x,0) = 1.0. */
8147 if (c == 0)
8148 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8149 arg0);
8150
8151 /* Optimize pow(x,1) = x. */
8152 if (c == 1)
8153 return arg0;
8154
8155 /* Optimize pow(x,-1) = 1.0/x. */
8156 if (c == -1)
8157 return fold_build2_loc (loc, RDIV_EXPR, type,
8158 build_real (type, dconst1), arg0);
8159 }
8160
8161 return NULL_TREE;
8162 }
8163
8164 /* A subroutine of fold_builtin to fold the various exponent
8165 functions. Return NULL_TREE if no simplification can be made.
8166 FUNC is the corresponding MPFR exponent function. */
8167
8168 static tree
8169 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8170 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8171 {
8172 if (validate_arg (arg, REAL_TYPE))
8173 {
8174 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8175 tree res;
8176
8177 /* Calculate the result when the argument is a constant. */
8178 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8179 return res;
8180
8181 /* Optimize expN(logN(x)) = x. */
8182 if (flag_unsafe_math_optimizations)
8183 {
8184 const enum built_in_function fcode = builtin_mathfn_code (arg);
8185
8186 if ((func == mpfr_exp
8187 && (fcode == BUILT_IN_LOG
8188 || fcode == BUILT_IN_LOGF
8189 || fcode == BUILT_IN_LOGL))
8190 || (func == mpfr_exp2
8191 && (fcode == BUILT_IN_LOG2
8192 || fcode == BUILT_IN_LOG2F
8193 || fcode == BUILT_IN_LOG2L))
8194 || (func == mpfr_exp10
8195 && (fcode == BUILT_IN_LOG10
8196 || fcode == BUILT_IN_LOG10F
8197 || fcode == BUILT_IN_LOG10L)))
8198 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8199 }
8200 }
8201
8202 return NULL_TREE;
8203 }
8204
8205 /* Return true if VAR is a VAR_DECL or a component thereof. */
8206
8207 static bool
8208 var_decl_component_p (tree var)
8209 {
8210 tree inner = var;
8211 while (handled_component_p (inner))
8212 inner = TREE_OPERAND (inner, 0);
8213 return SSA_VAR_P (inner);
8214 }
8215
8216 /* Fold function call to builtin memset. Return
8217 NULL_TREE if no simplification can be made. */
8218
8219 static tree
8220 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8221 tree type, bool ignore)
8222 {
8223 tree var, ret, etype;
8224 unsigned HOST_WIDE_INT length, cval;
8225
8226 if (! validate_arg (dest, POINTER_TYPE)
8227 || ! validate_arg (c, INTEGER_TYPE)
8228 || ! validate_arg (len, INTEGER_TYPE))
8229 return NULL_TREE;
8230
8231 if (! host_integerp (len, 1))
8232 return NULL_TREE;
8233
8234 /* If the LEN parameter is zero, return DEST. */
8235 if (integer_zerop (len))
8236 return omit_one_operand_loc (loc, type, dest, c);
8237
8238 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8239 return NULL_TREE;
8240
8241 var = dest;
8242 STRIP_NOPS (var);
8243 if (TREE_CODE (var) != ADDR_EXPR)
8244 return NULL_TREE;
8245
8246 var = TREE_OPERAND (var, 0);
8247 if (TREE_THIS_VOLATILE (var))
8248 return NULL_TREE;
8249
8250 etype = TREE_TYPE (var);
8251 if (TREE_CODE (etype) == ARRAY_TYPE)
8252 etype = TREE_TYPE (etype);
8253
8254 if (!INTEGRAL_TYPE_P (etype)
8255 && !POINTER_TYPE_P (etype))
8256 return NULL_TREE;
8257
8258 if (! var_decl_component_p (var))
8259 return NULL_TREE;
8260
8261 length = tree_low_cst (len, 1);
8262 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8263 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8264 < (int) length)
8265 return NULL_TREE;
8266
8267 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8268 return NULL_TREE;
8269
8270 if (integer_zerop (c))
8271 cval = 0;
8272 else
8273 {
8274 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8275 return NULL_TREE;
8276
8277 cval = tree_low_cst (c, 1);
8278 cval &= 0xff;
8279 cval |= cval << 8;
8280 cval |= cval << 16;
8281 cval |= (cval << 31) << 1;
8282 }
8283
8284 ret = build_int_cst_type (etype, cval);
8285 var = build_fold_indirect_ref_loc (loc,
8286 fold_convert_loc (loc,
8287 build_pointer_type (etype),
8288 dest));
8289 ret = build2 (MODIFY_EXPR, etype, var, ret);
8290 if (ignore)
8291 return ret;
8292
8293 return omit_one_operand_loc (loc, type, dest, ret);
8294 }
8295
8296 /* Fold function call to builtin memset. Return
8297 NULL_TREE if no simplification can be made. */
8298
8299 static tree
8300 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8301 {
8302 if (! validate_arg (dest, POINTER_TYPE)
8303 || ! validate_arg (size, INTEGER_TYPE))
8304 return NULL_TREE;
8305
8306 if (!ignore)
8307 return NULL_TREE;
8308
8309 /* New argument list transforming bzero(ptr x, int y) to
8310 memset(ptr x, int 0, size_t y). This is done this way
8311 so that if it isn't expanded inline, we fallback to
8312 calling bzero instead of memset. */
8313
8314 return fold_builtin_memset (loc, dest, integer_zero_node,
8315 fold_convert_loc (loc, sizetype, size),
8316 void_type_node, ignore);
8317 }
8318
8319 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8320 NULL_TREE if no simplification can be made.
8321 If ENDP is 0, return DEST (like memcpy).
8322 If ENDP is 1, return DEST+LEN (like mempcpy).
8323 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8324 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8325 (memmove). */
8326
8327 static tree
8328 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8329 tree len, tree type, bool ignore, int endp)
8330 {
8331 tree destvar, srcvar, expr;
8332
8333 if (! validate_arg (dest, POINTER_TYPE)
8334 || ! validate_arg (src, POINTER_TYPE)
8335 || ! validate_arg (len, INTEGER_TYPE))
8336 return NULL_TREE;
8337
8338 /* If the LEN parameter is zero, return DEST. */
8339 if (integer_zerop (len))
8340 return omit_one_operand_loc (loc, type, dest, src);
8341
8342 /* If SRC and DEST are the same (and not volatile), return
8343 DEST{,+LEN,+LEN-1}. */
8344 if (operand_equal_p (src, dest, 0))
8345 expr = len;
8346 else
8347 {
8348 tree srctype, desttype;
8349 int src_align, dest_align;
8350 tree off0;
8351
8352 if (endp == 3)
8353 {
8354 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8355 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8356
8357 /* Both DEST and SRC must be pointer types.
8358 ??? This is what old code did. Is the testing for pointer types
8359 really mandatory?
8360
8361 If either SRC is readonly or length is 1, we can use memcpy. */
8362 if (!dest_align || !src_align)
8363 return NULL_TREE;
8364 if (readonly_data_expr (src)
8365 || (host_integerp (len, 1)
8366 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8367 >= tree_low_cst (len, 1))))
8368 {
8369 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8370 if (!fn)
8371 return NULL_TREE;
8372 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8373 }
8374
8375 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8376 if (TREE_CODE (src) == ADDR_EXPR
8377 && TREE_CODE (dest) == ADDR_EXPR)
8378 {
8379 tree src_base, dest_base, fn;
8380 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8381 HOST_WIDE_INT size = -1;
8382 HOST_WIDE_INT maxsize = -1;
8383
8384 srcvar = TREE_OPERAND (src, 0);
8385 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8386 &size, &maxsize);
8387 destvar = TREE_OPERAND (dest, 0);
8388 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8389 &size, &maxsize);
8390 if (host_integerp (len, 1))
8391 maxsize = tree_low_cst (len, 1);
8392 else
8393 maxsize = -1;
8394 src_offset /= BITS_PER_UNIT;
8395 dest_offset /= BITS_PER_UNIT;
8396 if (SSA_VAR_P (src_base)
8397 && SSA_VAR_P (dest_base))
8398 {
8399 if (operand_equal_p (src_base, dest_base, 0)
8400 && ranges_overlap_p (src_offset, maxsize,
8401 dest_offset, maxsize))
8402 return NULL_TREE;
8403 }
8404 else if (TREE_CODE (src_base) == MEM_REF
8405 && TREE_CODE (dest_base) == MEM_REF)
8406 {
8407 double_int off;
8408 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8409 TREE_OPERAND (dest_base, 0), 0))
8410 return NULL_TREE;
8411 off = double_int_add (mem_ref_offset (src_base),
8412 shwi_to_double_int (src_offset));
8413 if (!double_int_fits_in_shwi_p (off))
8414 return NULL_TREE;
8415 src_offset = off.low;
8416 off = double_int_add (mem_ref_offset (dest_base),
8417 shwi_to_double_int (dest_offset));
8418 if (!double_int_fits_in_shwi_p (off))
8419 return NULL_TREE;
8420 dest_offset = off.low;
8421 if (ranges_overlap_p (src_offset, maxsize,
8422 dest_offset, maxsize))
8423 return NULL_TREE;
8424 }
8425 else
8426 return NULL_TREE;
8427
8428 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8429 if (!fn)
8430 return NULL_TREE;
8431 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8432 }
8433 return NULL_TREE;
8434 }
8435
8436 if (!host_integerp (len, 0))
8437 return NULL_TREE;
8438 /* FIXME:
8439 This logic lose for arguments like (type *)malloc (sizeof (type)),
8440 since we strip the casts of up to VOID return value from malloc.
8441 Perhaps we ought to inherit type from non-VOID argument here? */
8442 STRIP_NOPS (src);
8443 STRIP_NOPS (dest);
8444 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8445 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8446 {
8447 tree tem = TREE_OPERAND (src, 0);
8448 STRIP_NOPS (tem);
8449 if (tem != TREE_OPERAND (src, 0))
8450 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8451 }
8452 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8453 {
8454 tree tem = TREE_OPERAND (dest, 0);
8455 STRIP_NOPS (tem);
8456 if (tem != TREE_OPERAND (dest, 0))
8457 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8458 }
8459 srctype = TREE_TYPE (TREE_TYPE (src));
8460 if (srctype
8461 && TREE_CODE (srctype) == ARRAY_TYPE
8462 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8463 {
8464 srctype = TREE_TYPE (srctype);
8465 STRIP_NOPS (src);
8466 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8467 }
8468 desttype = TREE_TYPE (TREE_TYPE (dest));
8469 if (desttype
8470 && TREE_CODE (desttype) == ARRAY_TYPE
8471 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8472 {
8473 desttype = TREE_TYPE (desttype);
8474 STRIP_NOPS (dest);
8475 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8476 }
8477 if (!srctype || !desttype
8478 || TREE_ADDRESSABLE (srctype)
8479 || TREE_ADDRESSABLE (desttype)
8480 || !TYPE_SIZE_UNIT (srctype)
8481 || !TYPE_SIZE_UNIT (desttype)
8482 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8483 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8484 return NULL_TREE;
8485
8486 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8487 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8488 if (dest_align < (int) TYPE_ALIGN (desttype)
8489 || src_align < (int) TYPE_ALIGN (srctype))
8490 return NULL_TREE;
8491
8492 if (!ignore)
8493 dest = builtin_save_expr (dest);
8494
8495 /* Build accesses at offset zero with a ref-all character type. */
8496 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8497 ptr_mode, true), 0);
8498
8499 destvar = dest;
8500 STRIP_NOPS (destvar);
8501 if (TREE_CODE (destvar) == ADDR_EXPR
8502 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8503 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8504 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8505 else
8506 destvar = NULL_TREE;
8507
8508 srcvar = src;
8509 STRIP_NOPS (srcvar);
8510 if (TREE_CODE (srcvar) == ADDR_EXPR
8511 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8512 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8513 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8514 srcvar, off0);
8515 else
8516 srcvar = NULL_TREE;
8517
8518 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8519 return NULL_TREE;
8520
8521 if (srcvar == NULL_TREE)
8522 {
8523 STRIP_NOPS (src);
8524 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8525 }
8526 else if (destvar == NULL_TREE)
8527 {
8528 STRIP_NOPS (dest);
8529 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8530 }
8531
8532 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8533 }
8534
8535 if (ignore)
8536 return expr;
8537
8538 if (endp == 0 || endp == 3)
8539 return omit_one_operand_loc (loc, type, dest, expr);
8540
8541 if (expr == len)
8542 expr = NULL_TREE;
8543
8544 if (endp == 2)
8545 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8546 ssize_int (1));
8547
8548 len = fold_convert_loc (loc, sizetype, len);
8549 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8550 dest = fold_convert_loc (loc, type, dest);
8551 if (expr)
8552 dest = omit_one_operand_loc (loc, type, dest, expr);
8553 return dest;
8554 }
8555
8556 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8557 If LEN is not NULL, it represents the length of the string to be
8558 copied. Return NULL_TREE if no simplification can be made. */
8559
8560 tree
8561 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8562 {
8563 tree fn;
8564
8565 if (!validate_arg (dest, POINTER_TYPE)
8566 || !validate_arg (src, POINTER_TYPE))
8567 return NULL_TREE;
8568
8569 /* If SRC and DEST are the same (and not volatile), return DEST. */
8570 if (operand_equal_p (src, dest, 0))
8571 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8572
8573 if (optimize_function_for_size_p (cfun))
8574 return NULL_TREE;
8575
8576 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8577 if (!fn)
8578 return NULL_TREE;
8579
8580 if (!len)
8581 {
8582 len = c_strlen (src, 1);
8583 if (! len || TREE_SIDE_EFFECTS (len))
8584 return NULL_TREE;
8585 }
8586
8587 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8588 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8589 build_call_expr_loc (loc, fn, 3, dest, src, len));
8590 }
8591
8592 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8593 Return NULL_TREE if no simplification can be made. */
8594
8595 static tree
8596 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8597 {
8598 tree fn, len, lenp1, call, type;
8599
8600 if (!validate_arg (dest, POINTER_TYPE)
8601 || !validate_arg (src, POINTER_TYPE))
8602 return NULL_TREE;
8603
8604 len = c_strlen (src, 1);
8605 if (!len
8606 || TREE_CODE (len) != INTEGER_CST)
8607 return NULL_TREE;
8608
8609 if (optimize_function_for_size_p (cfun)
8610 /* If length is zero it's small enough. */
8611 && !integer_zerop (len))
8612 return NULL_TREE;
8613
8614 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8615 if (!fn)
8616 return NULL_TREE;
8617
8618 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8619 /* We use dest twice in building our expression. Save it from
8620 multiple expansions. */
8621 dest = builtin_save_expr (dest);
8622 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8623
8624 type = TREE_TYPE (TREE_TYPE (fndecl));
8625 len = fold_convert_loc (loc, sizetype, len);
8626 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8627 dest = fold_convert_loc (loc, type, dest);
8628 dest = omit_one_operand_loc (loc, type, dest, call);
8629 return dest;
8630 }
8631
8632 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8633 If SLEN is not NULL, it represents the length of the source string.
8634 Return NULL_TREE if no simplification can be made. */
8635
8636 tree
8637 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8638 tree src, tree len, tree slen)
8639 {
8640 tree fn;
8641
8642 if (!validate_arg (dest, POINTER_TYPE)
8643 || !validate_arg (src, POINTER_TYPE)
8644 || !validate_arg (len, INTEGER_TYPE))
8645 return NULL_TREE;
8646
8647 /* If the LEN parameter is zero, return DEST. */
8648 if (integer_zerop (len))
8649 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8650
8651 /* We can't compare slen with len as constants below if len is not a
8652 constant. */
8653 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8654 return NULL_TREE;
8655
8656 if (!slen)
8657 slen = c_strlen (src, 1);
8658
8659 /* Now, we must be passed a constant src ptr parameter. */
8660 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8661 return NULL_TREE;
8662
8663 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8664
8665 /* We do not support simplification of this case, though we do
8666 support it when expanding trees into RTL. */
8667 /* FIXME: generate a call to __builtin_memset. */
8668 if (tree_int_cst_lt (slen, len))
8669 return NULL_TREE;
8670
8671 /* OK transform into builtin memcpy. */
8672 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8673 if (!fn)
8674 return NULL_TREE;
8675 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8676 build_call_expr_loc (loc, fn, 3, dest, src, len));
8677 }
8678
8679 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8680 arguments to the call, and TYPE is its return type.
8681 Return NULL_TREE if no simplification can be made. */
8682
8683 static tree
8684 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8685 {
8686 if (!validate_arg (arg1, POINTER_TYPE)
8687 || !validate_arg (arg2, INTEGER_TYPE)
8688 || !validate_arg (len, INTEGER_TYPE))
8689 return NULL_TREE;
8690 else
8691 {
8692 const char *p1;
8693
8694 if (TREE_CODE (arg2) != INTEGER_CST
8695 || !host_integerp (len, 1))
8696 return NULL_TREE;
8697
8698 p1 = c_getstr (arg1);
8699 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8700 {
8701 char c;
8702 const char *r;
8703 tree tem;
8704
8705 if (target_char_cast (arg2, &c))
8706 return NULL_TREE;
8707
8708 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8709
8710 if (r == NULL)
8711 return build_int_cst (TREE_TYPE (arg1), 0);
8712
8713 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8714 size_int (r - p1));
8715 return fold_convert_loc (loc, type, tem);
8716 }
8717 return NULL_TREE;
8718 }
8719 }
8720
8721 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8722 Return NULL_TREE if no simplification can be made. */
8723
8724 static tree
8725 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8726 {
8727 const char *p1, *p2;
8728
8729 if (!validate_arg (arg1, POINTER_TYPE)
8730 || !validate_arg (arg2, POINTER_TYPE)
8731 || !validate_arg (len, INTEGER_TYPE))
8732 return NULL_TREE;
8733
8734 /* If the LEN parameter is zero, return zero. */
8735 if (integer_zerop (len))
8736 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8737 arg1, arg2);
8738
8739 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8740 if (operand_equal_p (arg1, arg2, 0))
8741 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8742
8743 p1 = c_getstr (arg1);
8744 p2 = c_getstr (arg2);
8745
8746 /* If all arguments are constant, and the value of len is not greater
8747 than the lengths of arg1 and arg2, evaluate at compile-time. */
8748 if (host_integerp (len, 1) && p1 && p2
8749 && compare_tree_int (len, strlen (p1) + 1) <= 0
8750 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8751 {
8752 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8753
8754 if (r > 0)
8755 return integer_one_node;
8756 else if (r < 0)
8757 return integer_minus_one_node;
8758 else
8759 return integer_zero_node;
8760 }
8761
8762 /* If len parameter is one, return an expression corresponding to
8763 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8764 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8765 {
8766 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8767 tree cst_uchar_ptr_node
8768 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8769
8770 tree ind1
8771 = fold_convert_loc (loc, integer_type_node,
8772 build1 (INDIRECT_REF, cst_uchar_node,
8773 fold_convert_loc (loc,
8774 cst_uchar_ptr_node,
8775 arg1)));
8776 tree ind2
8777 = fold_convert_loc (loc, integer_type_node,
8778 build1 (INDIRECT_REF, cst_uchar_node,
8779 fold_convert_loc (loc,
8780 cst_uchar_ptr_node,
8781 arg2)));
8782 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8783 }
8784
8785 return NULL_TREE;
8786 }
8787
8788 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8789 Return NULL_TREE if no simplification can be made. */
8790
8791 static tree
8792 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8793 {
8794 const char *p1, *p2;
8795
8796 if (!validate_arg (arg1, POINTER_TYPE)
8797 || !validate_arg (arg2, POINTER_TYPE))
8798 return NULL_TREE;
8799
8800 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8801 if (operand_equal_p (arg1, arg2, 0))
8802 return integer_zero_node;
8803
8804 p1 = c_getstr (arg1);
8805 p2 = c_getstr (arg2);
8806
8807 if (p1 && p2)
8808 {
8809 const int i = strcmp (p1, p2);
8810 if (i < 0)
8811 return integer_minus_one_node;
8812 else if (i > 0)
8813 return integer_one_node;
8814 else
8815 return integer_zero_node;
8816 }
8817
8818 /* If the second arg is "", return *(const unsigned char*)arg1. */
8819 if (p2 && *p2 == '\0')
8820 {
8821 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8822 tree cst_uchar_ptr_node
8823 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8824
8825 return fold_convert_loc (loc, integer_type_node,
8826 build1 (INDIRECT_REF, cst_uchar_node,
8827 fold_convert_loc (loc,
8828 cst_uchar_ptr_node,
8829 arg1)));
8830 }
8831
8832 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8833 if (p1 && *p1 == '\0')
8834 {
8835 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8836 tree cst_uchar_ptr_node
8837 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8838
8839 tree temp
8840 = fold_convert_loc (loc, integer_type_node,
8841 build1 (INDIRECT_REF, cst_uchar_node,
8842 fold_convert_loc (loc,
8843 cst_uchar_ptr_node,
8844 arg2)));
8845 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8846 }
8847
8848 return NULL_TREE;
8849 }
8850
8851 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8852 Return NULL_TREE if no simplification can be made. */
8853
8854 static tree
8855 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8856 {
8857 const char *p1, *p2;
8858
8859 if (!validate_arg (arg1, POINTER_TYPE)
8860 || !validate_arg (arg2, POINTER_TYPE)
8861 || !validate_arg (len, INTEGER_TYPE))
8862 return NULL_TREE;
8863
8864 /* If the LEN parameter is zero, return zero. */
8865 if (integer_zerop (len))
8866 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8867 arg1, arg2);
8868
8869 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8870 if (operand_equal_p (arg1, arg2, 0))
8871 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8872
8873 p1 = c_getstr (arg1);
8874 p2 = c_getstr (arg2);
8875
8876 if (host_integerp (len, 1) && p1 && p2)
8877 {
8878 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8879 if (i > 0)
8880 return integer_one_node;
8881 else if (i < 0)
8882 return integer_minus_one_node;
8883 else
8884 return integer_zero_node;
8885 }
8886
8887 /* If the second arg is "", and the length is greater than zero,
8888 return *(const unsigned char*)arg1. */
8889 if (p2 && *p2 == '\0'
8890 && TREE_CODE (len) == INTEGER_CST
8891 && tree_int_cst_sgn (len) == 1)
8892 {
8893 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8894 tree cst_uchar_ptr_node
8895 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8896
8897 return fold_convert_loc (loc, integer_type_node,
8898 build1 (INDIRECT_REF, cst_uchar_node,
8899 fold_convert_loc (loc,
8900 cst_uchar_ptr_node,
8901 arg1)));
8902 }
8903
8904 /* If the first arg is "", and the length is greater than zero,
8905 return -*(const unsigned char*)arg2. */
8906 if (p1 && *p1 == '\0'
8907 && TREE_CODE (len) == INTEGER_CST
8908 && tree_int_cst_sgn (len) == 1)
8909 {
8910 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8911 tree cst_uchar_ptr_node
8912 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8913
8914 tree temp = fold_convert_loc (loc, integer_type_node,
8915 build1 (INDIRECT_REF, cst_uchar_node,
8916 fold_convert_loc (loc,
8917 cst_uchar_ptr_node,
8918 arg2)));
8919 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8920 }
8921
8922 /* If len parameter is one, return an expression corresponding to
8923 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8924 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8925 {
8926 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8927 tree cst_uchar_ptr_node
8928 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8929
8930 tree ind1 = fold_convert_loc (loc, integer_type_node,
8931 build1 (INDIRECT_REF, cst_uchar_node,
8932 fold_convert_loc (loc,
8933 cst_uchar_ptr_node,
8934 arg1)));
8935 tree ind2 = fold_convert_loc (loc, integer_type_node,
8936 build1 (INDIRECT_REF, cst_uchar_node,
8937 fold_convert_loc (loc,
8938 cst_uchar_ptr_node,
8939 arg2)));
8940 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8941 }
8942
8943 return NULL_TREE;
8944 }
8945
8946 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8947 ARG. Return NULL_TREE if no simplification can be made. */
8948
8949 static tree
8950 fold_builtin_signbit (location_t loc, tree arg, tree type)
8951 {
8952 tree temp;
8953
8954 if (!validate_arg (arg, REAL_TYPE))
8955 return NULL_TREE;
8956
8957 /* If ARG is a compile-time constant, determine the result. */
8958 if (TREE_CODE (arg) == REAL_CST
8959 && !TREE_OVERFLOW (arg))
8960 {
8961 REAL_VALUE_TYPE c;
8962
8963 c = TREE_REAL_CST (arg);
8964 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8965 return fold_convert_loc (loc, type, temp);
8966 }
8967
8968 /* If ARG is non-negative, the result is always zero. */
8969 if (tree_expr_nonnegative_p (arg))
8970 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8971
8972 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8973 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8974 return fold_build2_loc (loc, LT_EXPR, type, arg,
8975 build_real (TREE_TYPE (arg), dconst0));
8976
8977 return NULL_TREE;
8978 }
8979
8980 /* Fold function call to builtin copysign, copysignf or copysignl with
8981 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8982 be made. */
8983
8984 static tree
8985 fold_builtin_copysign (location_t loc, tree fndecl,
8986 tree arg1, tree arg2, tree type)
8987 {
8988 tree tem;
8989
8990 if (!validate_arg (arg1, REAL_TYPE)
8991 || !validate_arg (arg2, REAL_TYPE))
8992 return NULL_TREE;
8993
8994 /* copysign(X,X) is X. */
8995 if (operand_equal_p (arg1, arg2, 0))
8996 return fold_convert_loc (loc, type, arg1);
8997
8998 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8999 if (TREE_CODE (arg1) == REAL_CST
9000 && TREE_CODE (arg2) == REAL_CST
9001 && !TREE_OVERFLOW (arg1)
9002 && !TREE_OVERFLOW (arg2))
9003 {
9004 REAL_VALUE_TYPE c1, c2;
9005
9006 c1 = TREE_REAL_CST (arg1);
9007 c2 = TREE_REAL_CST (arg2);
9008 /* c1.sign := c2.sign. */
9009 real_copysign (&c1, &c2);
9010 return build_real (type, c1);
9011 }
9012
9013 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9014 Remember to evaluate Y for side-effects. */
9015 if (tree_expr_nonnegative_p (arg2))
9016 return omit_one_operand_loc (loc, type,
9017 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9018 arg2);
9019
9020 /* Strip sign changing operations for the first argument. */
9021 tem = fold_strip_sign_ops (arg1);
9022 if (tem)
9023 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9024
9025 return NULL_TREE;
9026 }
9027
9028 /* Fold a call to builtin isascii with argument ARG. */
9029
9030 static tree
9031 fold_builtin_isascii (location_t loc, tree arg)
9032 {
9033 if (!validate_arg (arg, INTEGER_TYPE))
9034 return NULL_TREE;
9035 else
9036 {
9037 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9038 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9039 build_int_cst (NULL_TREE,
9040 ~ (unsigned HOST_WIDE_INT) 0x7f));
9041 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9042 arg, integer_zero_node);
9043 }
9044 }
9045
9046 /* Fold a call to builtin toascii with argument ARG. */
9047
9048 static tree
9049 fold_builtin_toascii (location_t loc, tree arg)
9050 {
9051 if (!validate_arg (arg, INTEGER_TYPE))
9052 return NULL_TREE;
9053
9054 /* Transform toascii(c) -> (c & 0x7f). */
9055 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9056 build_int_cst (NULL_TREE, 0x7f));
9057 }
9058
9059 /* Fold a call to builtin isdigit with argument ARG. */
9060
9061 static tree
9062 fold_builtin_isdigit (location_t loc, tree arg)
9063 {
9064 if (!validate_arg (arg, INTEGER_TYPE))
9065 return NULL_TREE;
9066 else
9067 {
9068 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9069 /* According to the C standard, isdigit is unaffected by locale.
9070 However, it definitely is affected by the target character set. */
9071 unsigned HOST_WIDE_INT target_digit0
9072 = lang_hooks.to_target_charset ('0');
9073
9074 if (target_digit0 == 0)
9075 return NULL_TREE;
9076
9077 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9078 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9079 build_int_cst (unsigned_type_node, target_digit0));
9080 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9081 build_int_cst (unsigned_type_node, 9));
9082 }
9083 }
9084
9085 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9086
9087 static tree
9088 fold_builtin_fabs (location_t loc, tree arg, tree type)
9089 {
9090 if (!validate_arg (arg, REAL_TYPE))
9091 return NULL_TREE;
9092
9093 arg = fold_convert_loc (loc, type, arg);
9094 if (TREE_CODE (arg) == REAL_CST)
9095 return fold_abs_const (arg, type);
9096 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9097 }
9098
9099 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9100
9101 static tree
9102 fold_builtin_abs (location_t loc, tree arg, tree type)
9103 {
9104 if (!validate_arg (arg, INTEGER_TYPE))
9105 return NULL_TREE;
9106
9107 arg = fold_convert_loc (loc, type, arg);
9108 if (TREE_CODE (arg) == INTEGER_CST)
9109 return fold_abs_const (arg, type);
9110 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9111 }
9112
9113 /* Fold a call to builtin fmin or fmax. */
9114
9115 static tree
9116 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9117 tree type, bool max)
9118 {
9119 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9120 {
9121 /* Calculate the result when the argument is a constant. */
9122 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9123
9124 if (res)
9125 return res;
9126
9127 /* If either argument is NaN, return the other one. Avoid the
9128 transformation if we get (and honor) a signalling NaN. Using
9129 omit_one_operand() ensures we create a non-lvalue. */
9130 if (TREE_CODE (arg0) == REAL_CST
9131 && real_isnan (&TREE_REAL_CST (arg0))
9132 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9133 || ! TREE_REAL_CST (arg0).signalling))
9134 return omit_one_operand_loc (loc, type, arg1, arg0);
9135 if (TREE_CODE (arg1) == REAL_CST
9136 && real_isnan (&TREE_REAL_CST (arg1))
9137 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9138 || ! TREE_REAL_CST (arg1).signalling))
9139 return omit_one_operand_loc (loc, type, arg0, arg1);
9140
9141 /* Transform fmin/fmax(x,x) -> x. */
9142 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9143 return omit_one_operand_loc (loc, type, arg0, arg1);
9144
9145 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9146 functions to return the numeric arg if the other one is NaN.
9147 These tree codes don't honor that, so only transform if
9148 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9149 handled, so we don't have to worry about it either. */
9150 if (flag_finite_math_only)
9151 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9152 fold_convert_loc (loc, type, arg0),
9153 fold_convert_loc (loc, type, arg1));
9154 }
9155 return NULL_TREE;
9156 }
9157
9158 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9159
9160 static tree
9161 fold_builtin_carg (location_t loc, tree arg, tree type)
9162 {
9163 if (validate_arg (arg, COMPLEX_TYPE)
9164 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9165 {
9166 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9167
9168 if (atan2_fn)
9169 {
9170 tree new_arg = builtin_save_expr (arg);
9171 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9172 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9173 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9174 }
9175 }
9176
9177 return NULL_TREE;
9178 }
9179
9180 /* Fold a call to builtin logb/ilogb. */
9181
9182 static tree
9183 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9184 {
9185 if (! validate_arg (arg, REAL_TYPE))
9186 return NULL_TREE;
9187
9188 STRIP_NOPS (arg);
9189
9190 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9191 {
9192 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9193
9194 switch (value->cl)
9195 {
9196 case rvc_nan:
9197 case rvc_inf:
9198 /* If arg is Inf or NaN and we're logb, return it. */
9199 if (TREE_CODE (rettype) == REAL_TYPE)
9200 return fold_convert_loc (loc, rettype, arg);
9201 /* Fall through... */
9202 case rvc_zero:
9203 /* Zero may set errno and/or raise an exception for logb, also
9204 for ilogb we don't know FP_ILOGB0. */
9205 return NULL_TREE;
9206 case rvc_normal:
9207 /* For normal numbers, proceed iff radix == 2. In GCC,
9208 normalized significands are in the range [0.5, 1.0). We
9209 want the exponent as if they were [1.0, 2.0) so get the
9210 exponent and subtract 1. */
9211 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9212 return fold_convert_loc (loc, rettype,
9213 build_int_cst (NULL_TREE,
9214 REAL_EXP (value)-1));
9215 break;
9216 }
9217 }
9218
9219 return NULL_TREE;
9220 }
9221
9222 /* Fold a call to builtin significand, if radix == 2. */
9223
9224 static tree
9225 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9226 {
9227 if (! validate_arg (arg, REAL_TYPE))
9228 return NULL_TREE;
9229
9230 STRIP_NOPS (arg);
9231
9232 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9233 {
9234 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9235
9236 switch (value->cl)
9237 {
9238 case rvc_zero:
9239 case rvc_nan:
9240 case rvc_inf:
9241 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9242 return fold_convert_loc (loc, rettype, arg);
9243 case rvc_normal:
9244 /* For normal numbers, proceed iff radix == 2. */
9245 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9246 {
9247 REAL_VALUE_TYPE result = *value;
9248 /* In GCC, normalized significands are in the range [0.5,
9249 1.0). We want them to be [1.0, 2.0) so set the
9250 exponent to 1. */
9251 SET_REAL_EXP (&result, 1);
9252 return build_real (rettype, result);
9253 }
9254 break;
9255 }
9256 }
9257
9258 return NULL_TREE;
9259 }
9260
9261 /* Fold a call to builtin frexp, we can assume the base is 2. */
9262
9263 static tree
9264 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9265 {
9266 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9267 return NULL_TREE;
9268
9269 STRIP_NOPS (arg0);
9270
9271 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9272 return NULL_TREE;
9273
9274 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9275
9276 /* Proceed if a valid pointer type was passed in. */
9277 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9278 {
9279 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9280 tree frac, exp;
9281
9282 switch (value->cl)
9283 {
9284 case rvc_zero:
9285 /* For +-0, return (*exp = 0, +-0). */
9286 exp = integer_zero_node;
9287 frac = arg0;
9288 break;
9289 case rvc_nan:
9290 case rvc_inf:
9291 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9292 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9293 case rvc_normal:
9294 {
9295 /* Since the frexp function always expects base 2, and in
9296 GCC normalized significands are already in the range
9297 [0.5, 1.0), we have exactly what frexp wants. */
9298 REAL_VALUE_TYPE frac_rvt = *value;
9299 SET_REAL_EXP (&frac_rvt, 0);
9300 frac = build_real (rettype, frac_rvt);
9301 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9302 }
9303 break;
9304 default:
9305 gcc_unreachable ();
9306 }
9307
9308 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9309 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9310 TREE_SIDE_EFFECTS (arg1) = 1;
9311 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9312 }
9313
9314 return NULL_TREE;
9315 }
9316
9317 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9318 then we can assume the base is two. If it's false, then we have to
9319 check the mode of the TYPE parameter in certain cases. */
9320
9321 static tree
9322 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9323 tree type, bool ldexp)
9324 {
9325 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9326 {
9327 STRIP_NOPS (arg0);
9328 STRIP_NOPS (arg1);
9329
9330 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9331 if (real_zerop (arg0) || integer_zerop (arg1)
9332 || (TREE_CODE (arg0) == REAL_CST
9333 && !real_isfinite (&TREE_REAL_CST (arg0))))
9334 return omit_one_operand_loc (loc, type, arg0, arg1);
9335
9336 /* If both arguments are constant, then try to evaluate it. */
9337 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9338 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9339 && host_integerp (arg1, 0))
9340 {
9341 /* Bound the maximum adjustment to twice the range of the
9342 mode's valid exponents. Use abs to ensure the range is
9343 positive as a sanity check. */
9344 const long max_exp_adj = 2 *
9345 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9346 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9347
9348 /* Get the user-requested adjustment. */
9349 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9350
9351 /* The requested adjustment must be inside this range. This
9352 is a preliminary cap to avoid things like overflow, we
9353 may still fail to compute the result for other reasons. */
9354 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9355 {
9356 REAL_VALUE_TYPE initial_result;
9357
9358 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9359
9360 /* Ensure we didn't overflow. */
9361 if (! real_isinf (&initial_result))
9362 {
9363 const REAL_VALUE_TYPE trunc_result
9364 = real_value_truncate (TYPE_MODE (type), initial_result);
9365
9366 /* Only proceed if the target mode can hold the
9367 resulting value. */
9368 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9369 return build_real (type, trunc_result);
9370 }
9371 }
9372 }
9373 }
9374
9375 return NULL_TREE;
9376 }
9377
9378 /* Fold a call to builtin modf. */
9379
9380 static tree
9381 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9382 {
9383 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9384 return NULL_TREE;
9385
9386 STRIP_NOPS (arg0);
9387
9388 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9389 return NULL_TREE;
9390
9391 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9392
9393 /* Proceed if a valid pointer type was passed in. */
9394 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9395 {
9396 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9397 REAL_VALUE_TYPE trunc, frac;
9398
9399 switch (value->cl)
9400 {
9401 case rvc_nan:
9402 case rvc_zero:
9403 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9404 trunc = frac = *value;
9405 break;
9406 case rvc_inf:
9407 /* For +-Inf, return (*arg1 = arg0, +-0). */
9408 frac = dconst0;
9409 frac.sign = value->sign;
9410 trunc = *value;
9411 break;
9412 case rvc_normal:
9413 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9414 real_trunc (&trunc, VOIDmode, value);
9415 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9416 /* If the original number was negative and already
9417 integral, then the fractional part is -0.0. */
9418 if (value->sign && frac.cl == rvc_zero)
9419 frac.sign = value->sign;
9420 break;
9421 }
9422
9423 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9424 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9425 build_real (rettype, trunc));
9426 TREE_SIDE_EFFECTS (arg1) = 1;
9427 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9428 build_real (rettype, frac));
9429 }
9430
9431 return NULL_TREE;
9432 }
9433
9434 /* Given a location LOC, an interclass builtin function decl FNDECL
9435 and its single argument ARG, return an folded expression computing
9436 the same, or NULL_TREE if we either couldn't or didn't want to fold
9437 (the latter happen if there's an RTL instruction available). */
9438
9439 static tree
9440 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9441 {
9442 enum machine_mode mode;
9443
9444 if (!validate_arg (arg, REAL_TYPE))
9445 return NULL_TREE;
9446
9447 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9448 return NULL_TREE;
9449
9450 mode = TYPE_MODE (TREE_TYPE (arg));
9451
9452 /* If there is no optab, try generic code. */
9453 switch (DECL_FUNCTION_CODE (fndecl))
9454 {
9455 tree result;
9456
9457 CASE_FLT_FN (BUILT_IN_ISINF):
9458 {
9459 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9460 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9461 tree const type = TREE_TYPE (arg);
9462 REAL_VALUE_TYPE r;
9463 char buf[128];
9464
9465 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9466 real_from_string (&r, buf);
9467 result = build_call_expr (isgr_fn, 2,
9468 fold_build1_loc (loc, ABS_EXPR, type, arg),
9469 build_real (type, r));
9470 return result;
9471 }
9472 CASE_FLT_FN (BUILT_IN_FINITE):
9473 case BUILT_IN_ISFINITE:
9474 {
9475 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9476 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9477 tree const type = TREE_TYPE (arg);
9478 REAL_VALUE_TYPE r;
9479 char buf[128];
9480
9481 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9482 real_from_string (&r, buf);
9483 result = build_call_expr (isle_fn, 2,
9484 fold_build1_loc (loc, ABS_EXPR, type, arg),
9485 build_real (type, r));
9486 /*result = fold_build2_loc (loc, UNGT_EXPR,
9487 TREE_TYPE (TREE_TYPE (fndecl)),
9488 fold_build1_loc (loc, ABS_EXPR, type, arg),
9489 build_real (type, r));
9490 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9491 TREE_TYPE (TREE_TYPE (fndecl)),
9492 result);*/
9493 return result;
9494 }
9495 case BUILT_IN_ISNORMAL:
9496 {
9497 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9498 islessequal(fabs(x),DBL_MAX). */
9499 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9500 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9501 tree const type = TREE_TYPE (arg);
9502 REAL_VALUE_TYPE rmax, rmin;
9503 char buf[128];
9504
9505 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9506 real_from_string (&rmax, buf);
9507 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9508 real_from_string (&rmin, buf);
9509 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9510 result = build_call_expr (isle_fn, 2, arg,
9511 build_real (type, rmax));
9512 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9513 build_call_expr (isge_fn, 2, arg,
9514 build_real (type, rmin)));
9515 return result;
9516 }
9517 default:
9518 break;
9519 }
9520
9521 return NULL_TREE;
9522 }
9523
9524 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9525 ARG is the argument for the call. */
9526
9527 static tree
9528 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9529 {
9530 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9531 REAL_VALUE_TYPE r;
9532
9533 if (!validate_arg (arg, REAL_TYPE))
9534 return NULL_TREE;
9535
9536 switch (builtin_index)
9537 {
9538 case BUILT_IN_ISINF:
9539 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9540 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9541
9542 if (TREE_CODE (arg) == REAL_CST)
9543 {
9544 r = TREE_REAL_CST (arg);
9545 if (real_isinf (&r))
9546 return real_compare (GT_EXPR, &r, &dconst0)
9547 ? integer_one_node : integer_minus_one_node;
9548 else
9549 return integer_zero_node;
9550 }
9551
9552 return NULL_TREE;
9553
9554 case BUILT_IN_ISINF_SIGN:
9555 {
9556 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9557 /* In a boolean context, GCC will fold the inner COND_EXPR to
9558 1. So e.g. "if (isinf_sign(x))" would be folded to just
9559 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9560 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9561 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9562 tree tmp = NULL_TREE;
9563
9564 arg = builtin_save_expr (arg);
9565
9566 if (signbit_fn && isinf_fn)
9567 {
9568 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9569 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9570
9571 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9572 signbit_call, integer_zero_node);
9573 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9574 isinf_call, integer_zero_node);
9575
9576 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9577 integer_minus_one_node, integer_one_node);
9578 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9579 isinf_call, tmp,
9580 integer_zero_node);
9581 }
9582
9583 return tmp;
9584 }
9585
9586 case BUILT_IN_ISFINITE:
9587 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9588 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9589 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9590
9591 if (TREE_CODE (arg) == REAL_CST)
9592 {
9593 r = TREE_REAL_CST (arg);
9594 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9595 }
9596
9597 return NULL_TREE;
9598
9599 case BUILT_IN_ISNAN:
9600 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9601 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9602
9603 if (TREE_CODE (arg) == REAL_CST)
9604 {
9605 r = TREE_REAL_CST (arg);
9606 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9607 }
9608
9609 arg = builtin_save_expr (arg);
9610 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9611
9612 default:
9613 gcc_unreachable ();
9614 }
9615 }
9616
9617 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9618 This builtin will generate code to return the appropriate floating
9619 point classification depending on the value of the floating point
9620 number passed in. The possible return values must be supplied as
9621 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9622 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9623 one floating point argument which is "type generic". */
9624
9625 static tree
9626 fold_builtin_fpclassify (location_t loc, tree exp)
9627 {
9628 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9629 arg, type, res, tmp;
9630 enum machine_mode mode;
9631 REAL_VALUE_TYPE r;
9632 char buf[128];
9633
9634 /* Verify the required arguments in the original call. */
9635 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9636 INTEGER_TYPE, INTEGER_TYPE,
9637 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9638 return NULL_TREE;
9639
9640 fp_nan = CALL_EXPR_ARG (exp, 0);
9641 fp_infinite = CALL_EXPR_ARG (exp, 1);
9642 fp_normal = CALL_EXPR_ARG (exp, 2);
9643 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9644 fp_zero = CALL_EXPR_ARG (exp, 4);
9645 arg = CALL_EXPR_ARG (exp, 5);
9646 type = TREE_TYPE (arg);
9647 mode = TYPE_MODE (type);
9648 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9649
9650 /* fpclassify(x) ->
9651 isnan(x) ? FP_NAN :
9652 (fabs(x) == Inf ? FP_INFINITE :
9653 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9654 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9655
9656 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9657 build_real (type, dconst0));
9658 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9659 tmp, fp_zero, fp_subnormal);
9660
9661 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9662 real_from_string (&r, buf);
9663 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9664 arg, build_real (type, r));
9665 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9666
9667 if (HONOR_INFINITIES (mode))
9668 {
9669 real_inf (&r);
9670 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9671 build_real (type, r));
9672 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9673 fp_infinite, res);
9674 }
9675
9676 if (HONOR_NANS (mode))
9677 {
9678 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9679 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9680 }
9681
9682 return res;
9683 }
9684
9685 /* Fold a call to an unordered comparison function such as
9686 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9687 being called and ARG0 and ARG1 are the arguments for the call.
9688 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9689 the opposite of the desired result. UNORDERED_CODE is used
9690 for modes that can hold NaNs and ORDERED_CODE is used for
9691 the rest. */
9692
9693 static tree
9694 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9695 enum tree_code unordered_code,
9696 enum tree_code ordered_code)
9697 {
9698 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9699 enum tree_code code;
9700 tree type0, type1;
9701 enum tree_code code0, code1;
9702 tree cmp_type = NULL_TREE;
9703
9704 type0 = TREE_TYPE (arg0);
9705 type1 = TREE_TYPE (arg1);
9706
9707 code0 = TREE_CODE (type0);
9708 code1 = TREE_CODE (type1);
9709
9710 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9711 /* Choose the wider of two real types. */
9712 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9713 ? type0 : type1;
9714 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9715 cmp_type = type0;
9716 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9717 cmp_type = type1;
9718
9719 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9720 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9721
9722 if (unordered_code == UNORDERED_EXPR)
9723 {
9724 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9725 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9726 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9727 }
9728
9729 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9730 : ordered_code;
9731 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9732 fold_build2_loc (loc, code, type, arg0, arg1));
9733 }
9734
9735 /* Fold a call to built-in function FNDECL with 0 arguments.
9736 IGNORE is true if the result of the function call is ignored. This
9737 function returns NULL_TREE if no simplification was possible. */
9738
9739 static tree
9740 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9741 {
9742 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9743 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9744 switch (fcode)
9745 {
9746 CASE_FLT_FN (BUILT_IN_INF):
9747 case BUILT_IN_INFD32:
9748 case BUILT_IN_INFD64:
9749 case BUILT_IN_INFD128:
9750 return fold_builtin_inf (loc, type, true);
9751
9752 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9753 return fold_builtin_inf (loc, type, false);
9754
9755 case BUILT_IN_CLASSIFY_TYPE:
9756 return fold_builtin_classify_type (NULL_TREE);
9757
9758 default:
9759 break;
9760 }
9761 return NULL_TREE;
9762 }
9763
9764 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9765 IGNORE is true if the result of the function call is ignored. This
9766 function returns NULL_TREE if no simplification was possible. */
9767
9768 static tree
9769 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9770 {
9771 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9772 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9773 switch (fcode)
9774 {
9775 case BUILT_IN_CONSTANT_P:
9776 {
9777 tree val = fold_builtin_constant_p (arg0);
9778
9779 /* Gimplification will pull the CALL_EXPR for the builtin out of
9780 an if condition. When not optimizing, we'll not CSE it back.
9781 To avoid link error types of regressions, return false now. */
9782 if (!val && !optimize)
9783 val = integer_zero_node;
9784
9785 return val;
9786 }
9787
9788 case BUILT_IN_CLASSIFY_TYPE:
9789 return fold_builtin_classify_type (arg0);
9790
9791 case BUILT_IN_STRLEN:
9792 return fold_builtin_strlen (loc, type, arg0);
9793
9794 CASE_FLT_FN (BUILT_IN_FABS):
9795 return fold_builtin_fabs (loc, arg0, type);
9796
9797 case BUILT_IN_ABS:
9798 case BUILT_IN_LABS:
9799 case BUILT_IN_LLABS:
9800 case BUILT_IN_IMAXABS:
9801 return fold_builtin_abs (loc, arg0, type);
9802
9803 CASE_FLT_FN (BUILT_IN_CONJ):
9804 if (validate_arg (arg0, COMPLEX_TYPE)
9805 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9806 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9807 break;
9808
9809 CASE_FLT_FN (BUILT_IN_CREAL):
9810 if (validate_arg (arg0, COMPLEX_TYPE)
9811 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9812 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9813 break;
9814
9815 CASE_FLT_FN (BUILT_IN_CIMAG):
9816 if (validate_arg (arg0, COMPLEX_TYPE)
9817 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9818 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9819 break;
9820
9821 CASE_FLT_FN (BUILT_IN_CCOS):
9822 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9823
9824 CASE_FLT_FN (BUILT_IN_CCOSH):
9825 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9826
9827 CASE_FLT_FN (BUILT_IN_CPROJ):
9828 return fold_builtin_cproj(loc, arg0, type);
9829
9830 CASE_FLT_FN (BUILT_IN_CSIN):
9831 if (validate_arg (arg0, COMPLEX_TYPE)
9832 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9833 return do_mpc_arg1 (arg0, type, mpc_sin);
9834 break;
9835
9836 CASE_FLT_FN (BUILT_IN_CSINH):
9837 if (validate_arg (arg0, COMPLEX_TYPE)
9838 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9839 return do_mpc_arg1 (arg0, type, mpc_sinh);
9840 break;
9841
9842 CASE_FLT_FN (BUILT_IN_CTAN):
9843 if (validate_arg (arg0, COMPLEX_TYPE)
9844 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9845 return do_mpc_arg1 (arg0, type, mpc_tan);
9846 break;
9847
9848 CASE_FLT_FN (BUILT_IN_CTANH):
9849 if (validate_arg (arg0, COMPLEX_TYPE)
9850 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9851 return do_mpc_arg1 (arg0, type, mpc_tanh);
9852 break;
9853
9854 CASE_FLT_FN (BUILT_IN_CLOG):
9855 if (validate_arg (arg0, COMPLEX_TYPE)
9856 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9857 return do_mpc_arg1 (arg0, type, mpc_log);
9858 break;
9859
9860 CASE_FLT_FN (BUILT_IN_CSQRT):
9861 if (validate_arg (arg0, COMPLEX_TYPE)
9862 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9863 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9864 break;
9865
9866 CASE_FLT_FN (BUILT_IN_CASIN):
9867 if (validate_arg (arg0, COMPLEX_TYPE)
9868 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9869 return do_mpc_arg1 (arg0, type, mpc_asin);
9870 break;
9871
9872 CASE_FLT_FN (BUILT_IN_CACOS):
9873 if (validate_arg (arg0, COMPLEX_TYPE)
9874 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9875 return do_mpc_arg1 (arg0, type, mpc_acos);
9876 break;
9877
9878 CASE_FLT_FN (BUILT_IN_CATAN):
9879 if (validate_arg (arg0, COMPLEX_TYPE)
9880 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9881 return do_mpc_arg1 (arg0, type, mpc_atan);
9882 break;
9883
9884 CASE_FLT_FN (BUILT_IN_CASINH):
9885 if (validate_arg (arg0, COMPLEX_TYPE)
9886 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9887 return do_mpc_arg1 (arg0, type, mpc_asinh);
9888 break;
9889
9890 CASE_FLT_FN (BUILT_IN_CACOSH):
9891 if (validate_arg (arg0, COMPLEX_TYPE)
9892 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9893 return do_mpc_arg1 (arg0, type, mpc_acosh);
9894 break;
9895
9896 CASE_FLT_FN (BUILT_IN_CATANH):
9897 if (validate_arg (arg0, COMPLEX_TYPE)
9898 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9899 return do_mpc_arg1 (arg0, type, mpc_atanh);
9900 break;
9901
9902 CASE_FLT_FN (BUILT_IN_CABS):
9903 return fold_builtin_cabs (loc, arg0, type, fndecl);
9904
9905 CASE_FLT_FN (BUILT_IN_CARG):
9906 return fold_builtin_carg (loc, arg0, type);
9907
9908 CASE_FLT_FN (BUILT_IN_SQRT):
9909 return fold_builtin_sqrt (loc, arg0, type);
9910
9911 CASE_FLT_FN (BUILT_IN_CBRT):
9912 return fold_builtin_cbrt (loc, arg0, type);
9913
9914 CASE_FLT_FN (BUILT_IN_ASIN):
9915 if (validate_arg (arg0, REAL_TYPE))
9916 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9917 &dconstm1, &dconst1, true);
9918 break;
9919
9920 CASE_FLT_FN (BUILT_IN_ACOS):
9921 if (validate_arg (arg0, REAL_TYPE))
9922 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9923 &dconstm1, &dconst1, true);
9924 break;
9925
9926 CASE_FLT_FN (BUILT_IN_ATAN):
9927 if (validate_arg (arg0, REAL_TYPE))
9928 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9929 break;
9930
9931 CASE_FLT_FN (BUILT_IN_ASINH):
9932 if (validate_arg (arg0, REAL_TYPE))
9933 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9934 break;
9935
9936 CASE_FLT_FN (BUILT_IN_ACOSH):
9937 if (validate_arg (arg0, REAL_TYPE))
9938 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9939 &dconst1, NULL, true);
9940 break;
9941
9942 CASE_FLT_FN (BUILT_IN_ATANH):
9943 if (validate_arg (arg0, REAL_TYPE))
9944 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9945 &dconstm1, &dconst1, false);
9946 break;
9947
9948 CASE_FLT_FN (BUILT_IN_SIN):
9949 if (validate_arg (arg0, REAL_TYPE))
9950 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9951 break;
9952
9953 CASE_FLT_FN (BUILT_IN_COS):
9954 return fold_builtin_cos (loc, arg0, type, fndecl);
9955
9956 CASE_FLT_FN (BUILT_IN_TAN):
9957 return fold_builtin_tan (arg0, type);
9958
9959 CASE_FLT_FN (BUILT_IN_CEXP):
9960 return fold_builtin_cexp (loc, arg0, type);
9961
9962 CASE_FLT_FN (BUILT_IN_CEXPI):
9963 if (validate_arg (arg0, REAL_TYPE))
9964 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9965 break;
9966
9967 CASE_FLT_FN (BUILT_IN_SINH):
9968 if (validate_arg (arg0, REAL_TYPE))
9969 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9970 break;
9971
9972 CASE_FLT_FN (BUILT_IN_COSH):
9973 return fold_builtin_cosh (loc, arg0, type, fndecl);
9974
9975 CASE_FLT_FN (BUILT_IN_TANH):
9976 if (validate_arg (arg0, REAL_TYPE))
9977 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9978 break;
9979
9980 CASE_FLT_FN (BUILT_IN_ERF):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9983 break;
9984
9985 CASE_FLT_FN (BUILT_IN_ERFC):
9986 if (validate_arg (arg0, REAL_TYPE))
9987 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9988 break;
9989
9990 CASE_FLT_FN (BUILT_IN_TGAMMA):
9991 if (validate_arg (arg0, REAL_TYPE))
9992 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9993 break;
9994
9995 CASE_FLT_FN (BUILT_IN_EXP):
9996 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9997
9998 CASE_FLT_FN (BUILT_IN_EXP2):
9999 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10000
10001 CASE_FLT_FN (BUILT_IN_EXP10):
10002 CASE_FLT_FN (BUILT_IN_POW10):
10003 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10004
10005 CASE_FLT_FN (BUILT_IN_EXPM1):
10006 if (validate_arg (arg0, REAL_TYPE))
10007 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10008 break;
10009
10010 CASE_FLT_FN (BUILT_IN_LOG):
10011 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10012
10013 CASE_FLT_FN (BUILT_IN_LOG2):
10014 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10015
10016 CASE_FLT_FN (BUILT_IN_LOG10):
10017 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10018
10019 CASE_FLT_FN (BUILT_IN_LOG1P):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10022 &dconstm1, NULL, false);
10023 break;
10024
10025 CASE_FLT_FN (BUILT_IN_J0):
10026 if (validate_arg (arg0, REAL_TYPE))
10027 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10028 NULL, NULL, 0);
10029 break;
10030
10031 CASE_FLT_FN (BUILT_IN_J1):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10034 NULL, NULL, 0);
10035 break;
10036
10037 CASE_FLT_FN (BUILT_IN_Y0):
10038 if (validate_arg (arg0, REAL_TYPE))
10039 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10040 &dconst0, NULL, false);
10041 break;
10042
10043 CASE_FLT_FN (BUILT_IN_Y1):
10044 if (validate_arg (arg0, REAL_TYPE))
10045 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10046 &dconst0, NULL, false);
10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_NAN):
10050 case BUILT_IN_NAND32:
10051 case BUILT_IN_NAND64:
10052 case BUILT_IN_NAND128:
10053 return fold_builtin_nan (arg0, type, true);
10054
10055 CASE_FLT_FN (BUILT_IN_NANS):
10056 return fold_builtin_nan (arg0, type, false);
10057
10058 CASE_FLT_FN (BUILT_IN_FLOOR):
10059 return fold_builtin_floor (loc, fndecl, arg0);
10060
10061 CASE_FLT_FN (BUILT_IN_CEIL):
10062 return fold_builtin_ceil (loc, fndecl, arg0);
10063
10064 CASE_FLT_FN (BUILT_IN_TRUNC):
10065 return fold_builtin_trunc (loc, fndecl, arg0);
10066
10067 CASE_FLT_FN (BUILT_IN_ROUND):
10068 return fold_builtin_round (loc, fndecl, arg0);
10069
10070 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10071 CASE_FLT_FN (BUILT_IN_RINT):
10072 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10073
10074 CASE_FLT_FN (BUILT_IN_LCEIL):
10075 CASE_FLT_FN (BUILT_IN_LLCEIL):
10076 CASE_FLT_FN (BUILT_IN_LFLOOR):
10077 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10078 CASE_FLT_FN (BUILT_IN_LROUND):
10079 CASE_FLT_FN (BUILT_IN_LLROUND):
10080 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10081
10082 CASE_FLT_FN (BUILT_IN_LRINT):
10083 CASE_FLT_FN (BUILT_IN_LLRINT):
10084 return fold_fixed_mathfn (loc, fndecl, arg0);
10085
10086 case BUILT_IN_BSWAP32:
10087 case BUILT_IN_BSWAP64:
10088 return fold_builtin_bswap (fndecl, arg0);
10089
10090 CASE_INT_FN (BUILT_IN_FFS):
10091 CASE_INT_FN (BUILT_IN_CLZ):
10092 CASE_INT_FN (BUILT_IN_CTZ):
10093 CASE_INT_FN (BUILT_IN_POPCOUNT):
10094 CASE_INT_FN (BUILT_IN_PARITY):
10095 return fold_builtin_bitop (fndecl, arg0);
10096
10097 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10098 return fold_builtin_signbit (loc, arg0, type);
10099
10100 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10101 return fold_builtin_significand (loc, arg0, type);
10102
10103 CASE_FLT_FN (BUILT_IN_ILOGB):
10104 CASE_FLT_FN (BUILT_IN_LOGB):
10105 return fold_builtin_logb (loc, arg0, type);
10106
10107 case BUILT_IN_ISASCII:
10108 return fold_builtin_isascii (loc, arg0);
10109
10110 case BUILT_IN_TOASCII:
10111 return fold_builtin_toascii (loc, arg0);
10112
10113 case BUILT_IN_ISDIGIT:
10114 return fold_builtin_isdigit (loc, arg0);
10115
10116 CASE_FLT_FN (BUILT_IN_FINITE):
10117 case BUILT_IN_FINITED32:
10118 case BUILT_IN_FINITED64:
10119 case BUILT_IN_FINITED128:
10120 case BUILT_IN_ISFINITE:
10121 {
10122 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10123 if (ret)
10124 return ret;
10125 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10126 }
10127
10128 CASE_FLT_FN (BUILT_IN_ISINF):
10129 case BUILT_IN_ISINFD32:
10130 case BUILT_IN_ISINFD64:
10131 case BUILT_IN_ISINFD128:
10132 {
10133 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10134 if (ret)
10135 return ret;
10136 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10137 }
10138
10139 case BUILT_IN_ISNORMAL:
10140 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10141
10142 case BUILT_IN_ISINF_SIGN:
10143 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10144
10145 CASE_FLT_FN (BUILT_IN_ISNAN):
10146 case BUILT_IN_ISNAND32:
10147 case BUILT_IN_ISNAND64:
10148 case BUILT_IN_ISNAND128:
10149 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10150
10151 case BUILT_IN_PRINTF:
10152 case BUILT_IN_PRINTF_UNLOCKED:
10153 case BUILT_IN_VPRINTF:
10154 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10155
10156 case BUILT_IN_FREE:
10157 if (integer_zerop (arg0))
10158 return build_empty_stmt (loc);
10159 break;
10160
10161 default:
10162 break;
10163 }
10164
10165 return NULL_TREE;
10166
10167 }
10168
10169 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10170 IGNORE is true if the result of the function call is ignored. This
10171 function returns NULL_TREE if no simplification was possible. */
10172
10173 static tree
10174 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10175 {
10176 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10177 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10178
10179 switch (fcode)
10180 {
10181 CASE_FLT_FN (BUILT_IN_JN):
10182 if (validate_arg (arg0, INTEGER_TYPE)
10183 && validate_arg (arg1, REAL_TYPE))
10184 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10185 break;
10186
10187 CASE_FLT_FN (BUILT_IN_YN):
10188 if (validate_arg (arg0, INTEGER_TYPE)
10189 && validate_arg (arg1, REAL_TYPE))
10190 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10191 &dconst0, false);
10192 break;
10193
10194 CASE_FLT_FN (BUILT_IN_DREM):
10195 CASE_FLT_FN (BUILT_IN_REMAINDER):
10196 if (validate_arg (arg0, REAL_TYPE)
10197 && validate_arg(arg1, REAL_TYPE))
10198 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10199 break;
10200
10201 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10202 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10203 if (validate_arg (arg0, REAL_TYPE)
10204 && validate_arg(arg1, POINTER_TYPE))
10205 return do_mpfr_lgamma_r (arg0, arg1, type);
10206 break;
10207
10208 CASE_FLT_FN (BUILT_IN_ATAN2):
10209 if (validate_arg (arg0, REAL_TYPE)
10210 && validate_arg(arg1, REAL_TYPE))
10211 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10212 break;
10213
10214 CASE_FLT_FN (BUILT_IN_FDIM):
10215 if (validate_arg (arg0, REAL_TYPE)
10216 && validate_arg(arg1, REAL_TYPE))
10217 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10218 break;
10219
10220 CASE_FLT_FN (BUILT_IN_HYPOT):
10221 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10222
10223 CASE_FLT_FN (BUILT_IN_CPOW):
10224 if (validate_arg (arg0, COMPLEX_TYPE)
10225 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10226 && validate_arg (arg1, COMPLEX_TYPE)
10227 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10228 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10229 break;
10230
10231 CASE_FLT_FN (BUILT_IN_LDEXP):
10232 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10233 CASE_FLT_FN (BUILT_IN_SCALBN):
10234 CASE_FLT_FN (BUILT_IN_SCALBLN):
10235 return fold_builtin_load_exponent (loc, arg0, arg1,
10236 type, /*ldexp=*/false);
10237
10238 CASE_FLT_FN (BUILT_IN_FREXP):
10239 return fold_builtin_frexp (loc, arg0, arg1, type);
10240
10241 CASE_FLT_FN (BUILT_IN_MODF):
10242 return fold_builtin_modf (loc, arg0, arg1, type);
10243
10244 case BUILT_IN_BZERO:
10245 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10246
10247 case BUILT_IN_FPUTS:
10248 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10249
10250 case BUILT_IN_FPUTS_UNLOCKED:
10251 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10252
10253 case BUILT_IN_STRSTR:
10254 return fold_builtin_strstr (loc, arg0, arg1, type);
10255
10256 case BUILT_IN_STRCAT:
10257 return fold_builtin_strcat (loc, arg0, arg1);
10258
10259 case BUILT_IN_STRSPN:
10260 return fold_builtin_strspn (loc, arg0, arg1);
10261
10262 case BUILT_IN_STRCSPN:
10263 return fold_builtin_strcspn (loc, arg0, arg1);
10264
10265 case BUILT_IN_STRCHR:
10266 case BUILT_IN_INDEX:
10267 return fold_builtin_strchr (loc, arg0, arg1, type);
10268
10269 case BUILT_IN_STRRCHR:
10270 case BUILT_IN_RINDEX:
10271 return fold_builtin_strrchr (loc, arg0, arg1, type);
10272
10273 case BUILT_IN_STRCPY:
10274 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10275
10276 case BUILT_IN_STPCPY:
10277 if (ignore)
10278 {
10279 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10280 if (!fn)
10281 break;
10282
10283 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10284 }
10285 else
10286 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10287 break;
10288
10289 case BUILT_IN_STRCMP:
10290 return fold_builtin_strcmp (loc, arg0, arg1);
10291
10292 case BUILT_IN_STRPBRK:
10293 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10294
10295 case BUILT_IN_EXPECT:
10296 return fold_builtin_expect (loc, arg0, arg1);
10297
10298 CASE_FLT_FN (BUILT_IN_POW):
10299 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10300
10301 CASE_FLT_FN (BUILT_IN_POWI):
10302 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10303
10304 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10305 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10306
10307 CASE_FLT_FN (BUILT_IN_FMIN):
10308 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10309
10310 CASE_FLT_FN (BUILT_IN_FMAX):
10311 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10312
10313 case BUILT_IN_ISGREATER:
10314 return fold_builtin_unordered_cmp (loc, fndecl,
10315 arg0, arg1, UNLE_EXPR, LE_EXPR);
10316 case BUILT_IN_ISGREATEREQUAL:
10317 return fold_builtin_unordered_cmp (loc, fndecl,
10318 arg0, arg1, UNLT_EXPR, LT_EXPR);
10319 case BUILT_IN_ISLESS:
10320 return fold_builtin_unordered_cmp (loc, fndecl,
10321 arg0, arg1, UNGE_EXPR, GE_EXPR);
10322 case BUILT_IN_ISLESSEQUAL:
10323 return fold_builtin_unordered_cmp (loc, fndecl,
10324 arg0, arg1, UNGT_EXPR, GT_EXPR);
10325 case BUILT_IN_ISLESSGREATER:
10326 return fold_builtin_unordered_cmp (loc, fndecl,
10327 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10328 case BUILT_IN_ISUNORDERED:
10329 return fold_builtin_unordered_cmp (loc, fndecl,
10330 arg0, arg1, UNORDERED_EXPR,
10331 NOP_EXPR);
10332
10333 /* We do the folding for va_start in the expander. */
10334 case BUILT_IN_VA_START:
10335 break;
10336
10337 case BUILT_IN_SPRINTF:
10338 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10339
10340 case BUILT_IN_OBJECT_SIZE:
10341 return fold_builtin_object_size (arg0, arg1);
10342
10343 case BUILT_IN_PRINTF:
10344 case BUILT_IN_PRINTF_UNLOCKED:
10345 case BUILT_IN_VPRINTF:
10346 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10347
10348 case BUILT_IN_PRINTF_CHK:
10349 case BUILT_IN_VPRINTF_CHK:
10350 if (!validate_arg (arg0, INTEGER_TYPE)
10351 || TREE_SIDE_EFFECTS (arg0))
10352 return NULL_TREE;
10353 else
10354 return fold_builtin_printf (loc, fndecl,
10355 arg1, NULL_TREE, ignore, fcode);
10356 break;
10357
10358 case BUILT_IN_FPRINTF:
10359 case BUILT_IN_FPRINTF_UNLOCKED:
10360 case BUILT_IN_VFPRINTF:
10361 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10362 ignore, fcode);
10363
10364 default:
10365 break;
10366 }
10367 return NULL_TREE;
10368 }
10369
10370 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10371 and ARG2. IGNORE is true if the result of the function call is ignored.
10372 This function returns NULL_TREE if no simplification was possible. */
10373
10374 static tree
10375 fold_builtin_3 (location_t loc, tree fndecl,
10376 tree arg0, tree arg1, tree arg2, bool ignore)
10377 {
10378 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10379 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10380 switch (fcode)
10381 {
10382
10383 CASE_FLT_FN (BUILT_IN_SINCOS):
10384 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10385
10386 CASE_FLT_FN (BUILT_IN_FMA):
10387 if (validate_arg (arg0, REAL_TYPE)
10388 && validate_arg(arg1, REAL_TYPE)
10389 && validate_arg(arg2, REAL_TYPE))
10390 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10391 break;
10392
10393 CASE_FLT_FN (BUILT_IN_REMQUO):
10394 if (validate_arg (arg0, REAL_TYPE)
10395 && validate_arg(arg1, REAL_TYPE)
10396 && validate_arg(arg2, POINTER_TYPE))
10397 return do_mpfr_remquo (arg0, arg1, arg2);
10398 break;
10399
10400 case BUILT_IN_MEMSET:
10401 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10402
10403 case BUILT_IN_BCOPY:
10404 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10405 void_type_node, true, /*endp=*/3);
10406
10407 case BUILT_IN_MEMCPY:
10408 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10409 type, ignore, /*endp=*/0);
10410
10411 case BUILT_IN_MEMPCPY:
10412 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10413 type, ignore, /*endp=*/1);
10414
10415 case BUILT_IN_MEMMOVE:
10416 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10417 type, ignore, /*endp=*/3);
10418
10419 case BUILT_IN_STRNCAT:
10420 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10421
10422 case BUILT_IN_STRNCPY:
10423 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10424
10425 case BUILT_IN_STRNCMP:
10426 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10427
10428 case BUILT_IN_MEMCHR:
10429 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10430
10431 case BUILT_IN_BCMP:
10432 case BUILT_IN_MEMCMP:
10433 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10434
10435 case BUILT_IN_SPRINTF:
10436 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10437
10438 case BUILT_IN_STRCPY_CHK:
10439 case BUILT_IN_STPCPY_CHK:
10440 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10441 ignore, fcode);
10442
10443 case BUILT_IN_STRCAT_CHK:
10444 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10445
10446 case BUILT_IN_PRINTF_CHK:
10447 case BUILT_IN_VPRINTF_CHK:
10448 if (!validate_arg (arg0, INTEGER_TYPE)
10449 || TREE_SIDE_EFFECTS (arg0))
10450 return NULL_TREE;
10451 else
10452 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10453 break;
10454
10455 case BUILT_IN_FPRINTF:
10456 case BUILT_IN_FPRINTF_UNLOCKED:
10457 case BUILT_IN_VFPRINTF:
10458 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10459 ignore, fcode);
10460
10461 case BUILT_IN_FPRINTF_CHK:
10462 case BUILT_IN_VFPRINTF_CHK:
10463 if (!validate_arg (arg1, INTEGER_TYPE)
10464 || TREE_SIDE_EFFECTS (arg1))
10465 return NULL_TREE;
10466 else
10467 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10468 ignore, fcode);
10469
10470 default:
10471 break;
10472 }
10473 return NULL_TREE;
10474 }
10475
10476 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10477 ARG2, and ARG3. IGNORE is true if the result of the function call is
10478 ignored. This function returns NULL_TREE if no simplification was
10479 possible. */
10480
10481 static tree
10482 fold_builtin_4 (location_t loc, tree fndecl,
10483 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10484 {
10485 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10486
10487 switch (fcode)
10488 {
10489 case BUILT_IN_MEMCPY_CHK:
10490 case BUILT_IN_MEMPCPY_CHK:
10491 case BUILT_IN_MEMMOVE_CHK:
10492 case BUILT_IN_MEMSET_CHK:
10493 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10494 NULL_TREE, ignore,
10495 DECL_FUNCTION_CODE (fndecl));
10496
10497 case BUILT_IN_STRNCPY_CHK:
10498 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10499
10500 case BUILT_IN_STRNCAT_CHK:
10501 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10502
10503 case BUILT_IN_FPRINTF_CHK:
10504 case BUILT_IN_VFPRINTF_CHK:
10505 if (!validate_arg (arg1, INTEGER_TYPE)
10506 || TREE_SIDE_EFFECTS (arg1))
10507 return NULL_TREE;
10508 else
10509 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10510 ignore, fcode);
10511 break;
10512
10513 default:
10514 break;
10515 }
10516 return NULL_TREE;
10517 }
10518
10519 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10520 arguments, where NARGS <= 4. IGNORE is true if the result of the
10521 function call is ignored. This function returns NULL_TREE if no
10522 simplification was possible. Note that this only folds builtins with
10523 fixed argument patterns. Foldings that do varargs-to-varargs
10524 transformations, or that match calls with more than 4 arguments,
10525 need to be handled with fold_builtin_varargs instead. */
10526
10527 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10528
10529 static tree
10530 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10531 {
10532 tree ret = NULL_TREE;
10533
10534 switch (nargs)
10535 {
10536 case 0:
10537 ret = fold_builtin_0 (loc, fndecl, ignore);
10538 break;
10539 case 1:
10540 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10541 break;
10542 case 2:
10543 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10544 break;
10545 case 3:
10546 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10547 break;
10548 case 4:
10549 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10550 ignore);
10551 break;
10552 default:
10553 break;
10554 }
10555 if (ret)
10556 {
10557 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10558 SET_EXPR_LOCATION (ret, loc);
10559 TREE_NO_WARNING (ret) = 1;
10560 return ret;
10561 }
10562 return NULL_TREE;
10563 }
10564
10565 /* Builtins with folding operations that operate on "..." arguments
10566 need special handling; we need to store the arguments in a convenient
10567 data structure before attempting any folding. Fortunately there are
10568 only a few builtins that fall into this category. FNDECL is the
10569 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10570 result of the function call is ignored. */
10571
10572 static tree
10573 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10574 bool ignore ATTRIBUTE_UNUSED)
10575 {
10576 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10577 tree ret = NULL_TREE;
10578
10579 switch (fcode)
10580 {
10581 case BUILT_IN_SPRINTF_CHK:
10582 case BUILT_IN_VSPRINTF_CHK:
10583 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10584 break;
10585
10586 case BUILT_IN_SNPRINTF_CHK:
10587 case BUILT_IN_VSNPRINTF_CHK:
10588 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10589 break;
10590
10591 case BUILT_IN_FPCLASSIFY:
10592 ret = fold_builtin_fpclassify (loc, exp);
10593 break;
10594
10595 default:
10596 break;
10597 }
10598 if (ret)
10599 {
10600 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10601 SET_EXPR_LOCATION (ret, loc);
10602 TREE_NO_WARNING (ret) = 1;
10603 return ret;
10604 }
10605 return NULL_TREE;
10606 }
10607
10608 /* Return true if FNDECL shouldn't be folded right now.
10609 If a built-in function has an inline attribute always_inline
10610 wrapper, defer folding it after always_inline functions have
10611 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10612 might not be performed. */
10613
10614 static bool
10615 avoid_folding_inline_builtin (tree fndecl)
10616 {
10617 return (DECL_DECLARED_INLINE_P (fndecl)
10618 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10619 && cfun
10620 && !cfun->always_inline_functions_inlined
10621 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10622 }
10623
10624 /* A wrapper function for builtin folding that prevents warnings for
10625 "statement without effect" and the like, caused by removing the
10626 call node earlier than the warning is generated. */
10627
10628 tree
10629 fold_call_expr (location_t loc, tree exp, bool ignore)
10630 {
10631 tree ret = NULL_TREE;
10632 tree fndecl = get_callee_fndecl (exp);
10633 if (fndecl
10634 && TREE_CODE (fndecl) == FUNCTION_DECL
10635 && DECL_BUILT_IN (fndecl)
10636 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10637 yet. Defer folding until we see all the arguments
10638 (after inlining). */
10639 && !CALL_EXPR_VA_ARG_PACK (exp))
10640 {
10641 int nargs = call_expr_nargs (exp);
10642
10643 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10644 instead last argument is __builtin_va_arg_pack (). Defer folding
10645 even in that case, until arguments are finalized. */
10646 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10647 {
10648 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10649 if (fndecl2
10650 && TREE_CODE (fndecl2) == FUNCTION_DECL
10651 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10652 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10653 return NULL_TREE;
10654 }
10655
10656 if (avoid_folding_inline_builtin (fndecl))
10657 return NULL_TREE;
10658
10659 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10660 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10661 CALL_EXPR_ARGP (exp), ignore);
10662 else
10663 {
10664 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10665 {
10666 tree *args = CALL_EXPR_ARGP (exp);
10667 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10668 }
10669 if (!ret)
10670 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10671 if (ret)
10672 return ret;
10673 }
10674 }
10675 return NULL_TREE;
10676 }
10677
10678 /* Conveniently construct a function call expression. FNDECL names the
10679 function to be called and ARGLIST is a TREE_LIST of arguments. */
10680
10681 tree
10682 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10683 {
10684 tree fntype = TREE_TYPE (fndecl);
10685 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10686 int n = list_length (arglist);
10687 tree *argarray = (tree *) alloca (n * sizeof (tree));
10688 int i;
10689
10690 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10691 argarray[i] = TREE_VALUE (arglist);
10692 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10693 }
10694
10695 /* Conveniently construct a function call expression. FNDECL names the
10696 function to be called, N is the number of arguments, and the "..."
10697 parameters are the argument expressions. */
10698
10699 tree
10700 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10701 {
10702 va_list ap;
10703 tree fntype = TREE_TYPE (fndecl);
10704 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10705 tree *argarray = (tree *) alloca (n * sizeof (tree));
10706 int i;
10707
10708 va_start (ap, n);
10709 for (i = 0; i < n; i++)
10710 argarray[i] = va_arg (ap, tree);
10711 va_end (ap);
10712 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10713 }
10714
10715 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10716 varargs macros aren't supported by all bootstrap compilers. */
10717
10718 tree
10719 build_call_expr (tree fndecl, int n, ...)
10720 {
10721 va_list ap;
10722 tree fntype = TREE_TYPE (fndecl);
10723 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10724 tree *argarray = (tree *) alloca (n * sizeof (tree));
10725 int i;
10726
10727 va_start (ap, n);
10728 for (i = 0; i < n; i++)
10729 argarray[i] = va_arg (ap, tree);
10730 va_end (ap);
10731 return fold_builtin_call_array (UNKNOWN_LOCATION, TREE_TYPE (fntype),
10732 fn, n, argarray);
10733 }
10734
10735 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10736 N arguments are passed in the array ARGARRAY. */
10737
10738 tree
10739 fold_builtin_call_array (location_t loc, tree type,
10740 tree fn,
10741 int n,
10742 tree *argarray)
10743 {
10744 tree ret = NULL_TREE;
10745 tree exp;
10746
10747 if (TREE_CODE (fn) == ADDR_EXPR)
10748 {
10749 tree fndecl = TREE_OPERAND (fn, 0);
10750 if (TREE_CODE (fndecl) == FUNCTION_DECL
10751 && DECL_BUILT_IN (fndecl))
10752 {
10753 /* If last argument is __builtin_va_arg_pack (), arguments to this
10754 function are not finalized yet. Defer folding until they are. */
10755 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10756 {
10757 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10758 if (fndecl2
10759 && TREE_CODE (fndecl2) == FUNCTION_DECL
10760 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10761 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10762 return build_call_array_loc (loc, type, fn, n, argarray);
10763 }
10764 if (avoid_folding_inline_builtin (fndecl))
10765 return build_call_array_loc (loc, type, fn, n, argarray);
10766 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10767 {
10768 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10769 if (ret)
10770 return ret;
10771
10772 return build_call_array_loc (loc, type, fn, n, argarray);
10773 }
10774 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10775 {
10776 /* First try the transformations that don't require consing up
10777 an exp. */
10778 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10779 if (ret)
10780 return ret;
10781 }
10782
10783 /* If we got this far, we need to build an exp. */
10784 exp = build_call_array_loc (loc, type, fn, n, argarray);
10785 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10786 return ret ? ret : exp;
10787 }
10788 }
10789
10790 return build_call_array_loc (loc, type, fn, n, argarray);
10791 }
10792
10793 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10794 along with N new arguments specified as the "..." parameters. SKIP
10795 is the number of arguments in EXP to be omitted. This function is used
10796 to do varargs-to-varargs transformations. */
10797
10798 static tree
10799 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10800 {
10801 int oldnargs = call_expr_nargs (exp);
10802 int nargs = oldnargs - skip + n;
10803 tree fntype = TREE_TYPE (fndecl);
10804 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10805 tree *buffer;
10806
10807 if (n > 0)
10808 {
10809 int i, j;
10810 va_list ap;
10811
10812 buffer = XALLOCAVEC (tree, nargs);
10813 va_start (ap, n);
10814 for (i = 0; i < n; i++)
10815 buffer[i] = va_arg (ap, tree);
10816 va_end (ap);
10817 for (j = skip; j < oldnargs; j++, i++)
10818 buffer[i] = CALL_EXPR_ARG (exp, j);
10819 }
10820 else
10821 buffer = CALL_EXPR_ARGP (exp) + skip;
10822
10823 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10824 }
10825
10826 /* Validate a single argument ARG against a tree code CODE representing
10827 a type. */
10828
10829 static bool
10830 validate_arg (const_tree arg, enum tree_code code)
10831 {
10832 if (!arg)
10833 return false;
10834 else if (code == POINTER_TYPE)
10835 return POINTER_TYPE_P (TREE_TYPE (arg));
10836 else if (code == INTEGER_TYPE)
10837 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10838 return code == TREE_CODE (TREE_TYPE (arg));
10839 }
10840
10841 /* This function validates the types of a function call argument list
10842 against a specified list of tree_codes. If the last specifier is a 0,
10843 that represents an ellipses, otherwise the last specifier must be a
10844 VOID_TYPE.
10845
10846 This is the GIMPLE version of validate_arglist. Eventually we want to
10847 completely convert builtins.c to work from GIMPLEs and the tree based
10848 validate_arglist will then be removed. */
10849
10850 bool
10851 validate_gimple_arglist (const_gimple call, ...)
10852 {
10853 enum tree_code code;
10854 bool res = 0;
10855 va_list ap;
10856 const_tree arg;
10857 size_t i;
10858
10859 va_start (ap, call);
10860 i = 0;
10861
10862 do
10863 {
10864 code = (enum tree_code) va_arg (ap, int);
10865 switch (code)
10866 {
10867 case 0:
10868 /* This signifies an ellipses, any further arguments are all ok. */
10869 res = true;
10870 goto end;
10871 case VOID_TYPE:
10872 /* This signifies an endlink, if no arguments remain, return
10873 true, otherwise return false. */
10874 res = (i == gimple_call_num_args (call));
10875 goto end;
10876 default:
10877 /* If no parameters remain or the parameter's code does not
10878 match the specified code, return false. Otherwise continue
10879 checking any remaining arguments. */
10880 arg = gimple_call_arg (call, i++);
10881 if (!validate_arg (arg, code))
10882 goto end;
10883 break;
10884 }
10885 }
10886 while (1);
10887
10888 /* We need gotos here since we can only have one VA_CLOSE in a
10889 function. */
10890 end: ;
10891 va_end (ap);
10892
10893 return res;
10894 }
10895
10896 /* This function validates the types of a function call argument list
10897 against a specified list of tree_codes. If the last specifier is a 0,
10898 that represents an ellipses, otherwise the last specifier must be a
10899 VOID_TYPE. */
10900
10901 bool
10902 validate_arglist (const_tree callexpr, ...)
10903 {
10904 enum tree_code code;
10905 bool res = 0;
10906 va_list ap;
10907 const_call_expr_arg_iterator iter;
10908 const_tree arg;
10909
10910 va_start (ap, callexpr);
10911 init_const_call_expr_arg_iterator (callexpr, &iter);
10912
10913 do
10914 {
10915 code = (enum tree_code) va_arg (ap, int);
10916 switch (code)
10917 {
10918 case 0:
10919 /* This signifies an ellipses, any further arguments are all ok. */
10920 res = true;
10921 goto end;
10922 case VOID_TYPE:
10923 /* This signifies an endlink, if no arguments remain, return
10924 true, otherwise return false. */
10925 res = !more_const_call_expr_args_p (&iter);
10926 goto end;
10927 default:
10928 /* If no parameters remain or the parameter's code does not
10929 match the specified code, return false. Otherwise continue
10930 checking any remaining arguments. */
10931 arg = next_const_call_expr_arg (&iter);
10932 if (!validate_arg (arg, code))
10933 goto end;
10934 break;
10935 }
10936 }
10937 while (1);
10938
10939 /* We need gotos here since we can only have one VA_CLOSE in a
10940 function. */
10941 end: ;
10942 va_end (ap);
10943
10944 return res;
10945 }
10946
10947 /* Default target-specific builtin expander that does nothing. */
10948
10949 rtx
10950 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10951 rtx target ATTRIBUTE_UNUSED,
10952 rtx subtarget ATTRIBUTE_UNUSED,
10953 enum machine_mode mode ATTRIBUTE_UNUSED,
10954 int ignore ATTRIBUTE_UNUSED)
10955 {
10956 return NULL_RTX;
10957 }
10958
10959 /* Returns true is EXP represents data that would potentially reside
10960 in a readonly section. */
10961
10962 static bool
10963 readonly_data_expr (tree exp)
10964 {
10965 STRIP_NOPS (exp);
10966
10967 if (TREE_CODE (exp) != ADDR_EXPR)
10968 return false;
10969
10970 exp = get_base_address (TREE_OPERAND (exp, 0));
10971 if (!exp)
10972 return false;
10973
10974 /* Make sure we call decl_readonly_section only for trees it
10975 can handle (since it returns true for everything it doesn't
10976 understand). */
10977 if (TREE_CODE (exp) == STRING_CST
10978 || TREE_CODE (exp) == CONSTRUCTOR
10979 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10980 return decl_readonly_section (exp, 0);
10981 else
10982 return false;
10983 }
10984
10985 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10986 to the call, and TYPE is its return type.
10987
10988 Return NULL_TREE if no simplification was possible, otherwise return the
10989 simplified form of the call as a tree.
10990
10991 The simplified form may be a constant or other expression which
10992 computes the same value, but in a more efficient manner (including
10993 calls to other builtin functions).
10994
10995 The call may contain arguments which need to be evaluated, but
10996 which are not useful to determine the result of the call. In
10997 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10998 COMPOUND_EXPR will be an argument which must be evaluated.
10999 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11000 COMPOUND_EXPR in the chain will contain the tree for the simplified
11001 form of the builtin function call. */
11002
11003 static tree
11004 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11005 {
11006 if (!validate_arg (s1, POINTER_TYPE)
11007 || !validate_arg (s2, POINTER_TYPE))
11008 return NULL_TREE;
11009 else
11010 {
11011 tree fn;
11012 const char *p1, *p2;
11013
11014 p2 = c_getstr (s2);
11015 if (p2 == NULL)
11016 return NULL_TREE;
11017
11018 p1 = c_getstr (s1);
11019 if (p1 != NULL)
11020 {
11021 const char *r = strstr (p1, p2);
11022 tree tem;
11023
11024 if (r == NULL)
11025 return build_int_cst (TREE_TYPE (s1), 0);
11026
11027 /* Return an offset into the constant string argument. */
11028 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11029 s1, size_int (r - p1));
11030 return fold_convert_loc (loc, type, tem);
11031 }
11032
11033 /* The argument is const char *, and the result is char *, so we need
11034 a type conversion here to avoid a warning. */
11035 if (p2[0] == '\0')
11036 return fold_convert_loc (loc, type, s1);
11037
11038 if (p2[1] != '\0')
11039 return NULL_TREE;
11040
11041 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11042 if (!fn)
11043 return NULL_TREE;
11044
11045 /* New argument list transforming strstr(s1, s2) to
11046 strchr(s1, s2[0]). */
11047 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11048 }
11049 }
11050
11051 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11052 the call, and TYPE is its return type.
11053
11054 Return NULL_TREE if no simplification was possible, otherwise return the
11055 simplified form of the call as a tree.
11056
11057 The simplified form may be a constant or other expression which
11058 computes the same value, but in a more efficient manner (including
11059 calls to other builtin functions).
11060
11061 The call may contain arguments which need to be evaluated, but
11062 which are not useful to determine the result of the call. In
11063 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11064 COMPOUND_EXPR will be an argument which must be evaluated.
11065 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11066 COMPOUND_EXPR in the chain will contain the tree for the simplified
11067 form of the builtin function call. */
11068
11069 static tree
11070 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11071 {
11072 if (!validate_arg (s1, POINTER_TYPE)
11073 || !validate_arg (s2, INTEGER_TYPE))
11074 return NULL_TREE;
11075 else
11076 {
11077 const char *p1;
11078
11079 if (TREE_CODE (s2) != INTEGER_CST)
11080 return NULL_TREE;
11081
11082 p1 = c_getstr (s1);
11083 if (p1 != NULL)
11084 {
11085 char c;
11086 const char *r;
11087 tree tem;
11088
11089 if (target_char_cast (s2, &c))
11090 return NULL_TREE;
11091
11092 r = strchr (p1, c);
11093
11094 if (r == NULL)
11095 return build_int_cst (TREE_TYPE (s1), 0);
11096
11097 /* Return an offset into the constant string argument. */
11098 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11099 s1, size_int (r - p1));
11100 return fold_convert_loc (loc, type, tem);
11101 }
11102 return NULL_TREE;
11103 }
11104 }
11105
11106 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11107 the call, and TYPE is its return type.
11108
11109 Return NULL_TREE if no simplification was possible, otherwise return the
11110 simplified form of the call as a tree.
11111
11112 The simplified form may be a constant or other expression which
11113 computes the same value, but in a more efficient manner (including
11114 calls to other builtin functions).
11115
11116 The call may contain arguments which need to be evaluated, but
11117 which are not useful to determine the result of the call. In
11118 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11119 COMPOUND_EXPR will be an argument which must be evaluated.
11120 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11121 COMPOUND_EXPR in the chain will contain the tree for the simplified
11122 form of the builtin function call. */
11123
11124 static tree
11125 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11126 {
11127 if (!validate_arg (s1, POINTER_TYPE)
11128 || !validate_arg (s2, INTEGER_TYPE))
11129 return NULL_TREE;
11130 else
11131 {
11132 tree fn;
11133 const char *p1;
11134
11135 if (TREE_CODE (s2) != INTEGER_CST)
11136 return NULL_TREE;
11137
11138 p1 = c_getstr (s1);
11139 if (p1 != NULL)
11140 {
11141 char c;
11142 const char *r;
11143 tree tem;
11144
11145 if (target_char_cast (s2, &c))
11146 return NULL_TREE;
11147
11148 r = strrchr (p1, c);
11149
11150 if (r == NULL)
11151 return build_int_cst (TREE_TYPE (s1), 0);
11152
11153 /* Return an offset into the constant string argument. */
11154 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11155 s1, size_int (r - p1));
11156 return fold_convert_loc (loc, type, tem);
11157 }
11158
11159 if (! integer_zerop (s2))
11160 return NULL_TREE;
11161
11162 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11163 if (!fn)
11164 return NULL_TREE;
11165
11166 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11167 return build_call_expr_loc (loc, fn, 2, s1, s2);
11168 }
11169 }
11170
11171 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11172 to the call, and TYPE is its return type.
11173
11174 Return NULL_TREE if no simplification was possible, otherwise return the
11175 simplified form of the call as a tree.
11176
11177 The simplified form may be a constant or other expression which
11178 computes the same value, but in a more efficient manner (including
11179 calls to other builtin functions).
11180
11181 The call may contain arguments which need to be evaluated, but
11182 which are not useful to determine the result of the call. In
11183 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11184 COMPOUND_EXPR will be an argument which must be evaluated.
11185 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11186 COMPOUND_EXPR in the chain will contain the tree for the simplified
11187 form of the builtin function call. */
11188
11189 static tree
11190 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11191 {
11192 if (!validate_arg (s1, POINTER_TYPE)
11193 || !validate_arg (s2, POINTER_TYPE))
11194 return NULL_TREE;
11195 else
11196 {
11197 tree fn;
11198 const char *p1, *p2;
11199
11200 p2 = c_getstr (s2);
11201 if (p2 == NULL)
11202 return NULL_TREE;
11203
11204 p1 = c_getstr (s1);
11205 if (p1 != NULL)
11206 {
11207 const char *r = strpbrk (p1, p2);
11208 tree tem;
11209
11210 if (r == NULL)
11211 return build_int_cst (TREE_TYPE (s1), 0);
11212
11213 /* Return an offset into the constant string argument. */
11214 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11215 s1, size_int (r - p1));
11216 return fold_convert_loc (loc, type, tem);
11217 }
11218
11219 if (p2[0] == '\0')
11220 /* strpbrk(x, "") == NULL.
11221 Evaluate and ignore s1 in case it had side-effects. */
11222 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11223
11224 if (p2[1] != '\0')
11225 return NULL_TREE; /* Really call strpbrk. */
11226
11227 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11228 if (!fn)
11229 return NULL_TREE;
11230
11231 /* New argument list transforming strpbrk(s1, s2) to
11232 strchr(s1, s2[0]). */
11233 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11234 }
11235 }
11236
11237 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11238 to the call.
11239
11240 Return NULL_TREE if no simplification was possible, otherwise return the
11241 simplified form of the call as a tree.
11242
11243 The simplified form may be a constant or other expression which
11244 computes the same value, but in a more efficient manner (including
11245 calls to other builtin functions).
11246
11247 The call may contain arguments which need to be evaluated, but
11248 which are not useful to determine the result of the call. In
11249 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11250 COMPOUND_EXPR will be an argument which must be evaluated.
11251 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11252 COMPOUND_EXPR in the chain will contain the tree for the simplified
11253 form of the builtin function call. */
11254
11255 static tree
11256 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11257 {
11258 if (!validate_arg (dst, POINTER_TYPE)
11259 || !validate_arg (src, POINTER_TYPE))
11260 return NULL_TREE;
11261 else
11262 {
11263 const char *p = c_getstr (src);
11264
11265 /* If the string length is zero, return the dst parameter. */
11266 if (p && *p == '\0')
11267 return dst;
11268
11269 if (optimize_insn_for_speed_p ())
11270 {
11271 /* See if we can store by pieces into (dst + strlen(dst)). */
11272 tree newdst, call;
11273 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11274 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11275
11276 if (!strlen_fn || !strcpy_fn)
11277 return NULL_TREE;
11278
11279 /* If we don't have a movstr we don't want to emit an strcpy
11280 call. We have to do that if the length of the source string
11281 isn't computable (in that case we can use memcpy probably
11282 later expanding to a sequence of mov instructions). If we
11283 have movstr instructions we can emit strcpy calls. */
11284 if (!HAVE_movstr)
11285 {
11286 tree len = c_strlen (src, 1);
11287 if (! len || TREE_SIDE_EFFECTS (len))
11288 return NULL_TREE;
11289 }
11290
11291 /* Stabilize the argument list. */
11292 dst = builtin_save_expr (dst);
11293
11294 /* Create strlen (dst). */
11295 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11296 /* Create (dst p+ strlen (dst)). */
11297
11298 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11299 TREE_TYPE (dst), dst, newdst);
11300 newdst = builtin_save_expr (newdst);
11301
11302 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11303 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11304 }
11305 return NULL_TREE;
11306 }
11307 }
11308
11309 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11310 arguments to the call.
11311
11312 Return NULL_TREE if no simplification was possible, otherwise return the
11313 simplified form of the call as a tree.
11314
11315 The simplified form may be a constant or other expression which
11316 computes the same value, but in a more efficient manner (including
11317 calls to other builtin functions).
11318
11319 The call may contain arguments which need to be evaluated, but
11320 which are not useful to determine the result of the call. In
11321 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11322 COMPOUND_EXPR will be an argument which must be evaluated.
11323 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11324 COMPOUND_EXPR in the chain will contain the tree for the simplified
11325 form of the builtin function call. */
11326
11327 static tree
11328 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11329 {
11330 if (!validate_arg (dst, POINTER_TYPE)
11331 || !validate_arg (src, POINTER_TYPE)
11332 || !validate_arg (len, INTEGER_TYPE))
11333 return NULL_TREE;
11334 else
11335 {
11336 const char *p = c_getstr (src);
11337
11338 /* If the requested length is zero, or the src parameter string
11339 length is zero, return the dst parameter. */
11340 if (integer_zerop (len) || (p && *p == '\0'))
11341 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11342
11343 /* If the requested len is greater than or equal to the string
11344 length, call strcat. */
11345 if (TREE_CODE (len) == INTEGER_CST && p
11346 && compare_tree_int (len, strlen (p)) >= 0)
11347 {
11348 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11349
11350 /* If the replacement _DECL isn't initialized, don't do the
11351 transformation. */
11352 if (!fn)
11353 return NULL_TREE;
11354
11355 return build_call_expr_loc (loc, fn, 2, dst, src);
11356 }
11357 return NULL_TREE;
11358 }
11359 }
11360
11361 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11362 to the call.
11363
11364 Return NULL_TREE if no simplification was possible, otherwise return the
11365 simplified form of the call as a tree.
11366
11367 The simplified form may be a constant or other expression which
11368 computes the same value, but in a more efficient manner (including
11369 calls to other builtin functions).
11370
11371 The call may contain arguments which need to be evaluated, but
11372 which are not useful to determine the result of the call. In
11373 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11374 COMPOUND_EXPR will be an argument which must be evaluated.
11375 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11376 COMPOUND_EXPR in the chain will contain the tree for the simplified
11377 form of the builtin function call. */
11378
11379 static tree
11380 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11381 {
11382 if (!validate_arg (s1, POINTER_TYPE)
11383 || !validate_arg (s2, POINTER_TYPE))
11384 return NULL_TREE;
11385 else
11386 {
11387 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11388
11389 /* If both arguments are constants, evaluate at compile-time. */
11390 if (p1 && p2)
11391 {
11392 const size_t r = strspn (p1, p2);
11393 return size_int (r);
11394 }
11395
11396 /* If either argument is "", return NULL_TREE. */
11397 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11398 /* Evaluate and ignore both arguments in case either one has
11399 side-effects. */
11400 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11401 s1, s2);
11402 return NULL_TREE;
11403 }
11404 }
11405
11406 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11407 to the call.
11408
11409 Return NULL_TREE if no simplification was possible, otherwise return the
11410 simplified form of the call as a tree.
11411
11412 The simplified form may be a constant or other expression which
11413 computes the same value, but in a more efficient manner (including
11414 calls to other builtin functions).
11415
11416 The call may contain arguments which need to be evaluated, but
11417 which are not useful to determine the result of the call. In
11418 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11419 COMPOUND_EXPR will be an argument which must be evaluated.
11420 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11421 COMPOUND_EXPR in the chain will contain the tree for the simplified
11422 form of the builtin function call. */
11423
11424 static tree
11425 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11426 {
11427 if (!validate_arg (s1, POINTER_TYPE)
11428 || !validate_arg (s2, POINTER_TYPE))
11429 return NULL_TREE;
11430 else
11431 {
11432 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11433
11434 /* If both arguments are constants, evaluate at compile-time. */
11435 if (p1 && p2)
11436 {
11437 const size_t r = strcspn (p1, p2);
11438 return size_int (r);
11439 }
11440
11441 /* If the first argument is "", return NULL_TREE. */
11442 if (p1 && *p1 == '\0')
11443 {
11444 /* Evaluate and ignore argument s2 in case it has
11445 side-effects. */
11446 return omit_one_operand_loc (loc, size_type_node,
11447 size_zero_node, s2);
11448 }
11449
11450 /* If the second argument is "", return __builtin_strlen(s1). */
11451 if (p2 && *p2 == '\0')
11452 {
11453 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11454
11455 /* If the replacement _DECL isn't initialized, don't do the
11456 transformation. */
11457 if (!fn)
11458 return NULL_TREE;
11459
11460 return build_call_expr_loc (loc, fn, 1, s1);
11461 }
11462 return NULL_TREE;
11463 }
11464 }
11465
11466 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11467 to the call. IGNORE is true if the value returned
11468 by the builtin will be ignored. UNLOCKED is true is true if this
11469 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11470 the known length of the string. Return NULL_TREE if no simplification
11471 was possible. */
11472
11473 tree
11474 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11475 bool ignore, bool unlocked, tree len)
11476 {
11477 /* If we're using an unlocked function, assume the other unlocked
11478 functions exist explicitly. */
11479 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11480 : implicit_built_in_decls[BUILT_IN_FPUTC];
11481 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11482 : implicit_built_in_decls[BUILT_IN_FWRITE];
11483
11484 /* If the return value is used, don't do the transformation. */
11485 if (!ignore)
11486 return NULL_TREE;
11487
11488 /* Verify the arguments in the original call. */
11489 if (!validate_arg (arg0, POINTER_TYPE)
11490 || !validate_arg (arg1, POINTER_TYPE))
11491 return NULL_TREE;
11492
11493 if (! len)
11494 len = c_strlen (arg0, 0);
11495
11496 /* Get the length of the string passed to fputs. If the length
11497 can't be determined, punt. */
11498 if (!len
11499 || TREE_CODE (len) != INTEGER_CST)
11500 return NULL_TREE;
11501
11502 switch (compare_tree_int (len, 1))
11503 {
11504 case -1: /* length is 0, delete the call entirely . */
11505 return omit_one_operand_loc (loc, integer_type_node,
11506 integer_zero_node, arg1);;
11507
11508 case 0: /* length is 1, call fputc. */
11509 {
11510 const char *p = c_getstr (arg0);
11511
11512 if (p != NULL)
11513 {
11514 if (fn_fputc)
11515 return build_call_expr_loc (loc, fn_fputc, 2,
11516 build_int_cst (NULL_TREE, p[0]), arg1);
11517 else
11518 return NULL_TREE;
11519 }
11520 }
11521 /* FALLTHROUGH */
11522 case 1: /* length is greater than 1, call fwrite. */
11523 {
11524 /* If optimizing for size keep fputs. */
11525 if (optimize_function_for_size_p (cfun))
11526 return NULL_TREE;
11527 /* New argument list transforming fputs(string, stream) to
11528 fwrite(string, 1, len, stream). */
11529 if (fn_fwrite)
11530 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11531 size_one_node, len, arg1);
11532 else
11533 return NULL_TREE;
11534 }
11535 default:
11536 gcc_unreachable ();
11537 }
11538 return NULL_TREE;
11539 }
11540
11541 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11542 produced. False otherwise. This is done so that we don't output the error
11543 or warning twice or three times. */
11544
11545 bool
11546 fold_builtin_next_arg (tree exp, bool va_start_p)
11547 {
11548 tree fntype = TREE_TYPE (current_function_decl);
11549 int nargs = call_expr_nargs (exp);
11550 tree arg;
11551
11552 if (TYPE_ARG_TYPES (fntype) == 0
11553 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11554 == void_type_node))
11555 {
11556 error ("%<va_start%> used in function with fixed args");
11557 return true;
11558 }
11559
11560 if (va_start_p)
11561 {
11562 if (va_start_p && (nargs != 2))
11563 {
11564 error ("wrong number of arguments to function %<va_start%>");
11565 return true;
11566 }
11567 arg = CALL_EXPR_ARG (exp, 1);
11568 }
11569 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11570 when we checked the arguments and if needed issued a warning. */
11571 else
11572 {
11573 if (nargs == 0)
11574 {
11575 /* Evidently an out of date version of <stdarg.h>; can't validate
11576 va_start's second argument, but can still work as intended. */
11577 warning (0, "%<__builtin_next_arg%> called without an argument");
11578 return true;
11579 }
11580 else if (nargs > 1)
11581 {
11582 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11583 return true;
11584 }
11585 arg = CALL_EXPR_ARG (exp, 0);
11586 }
11587
11588 if (TREE_CODE (arg) == SSA_NAME)
11589 arg = SSA_NAME_VAR (arg);
11590
11591 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11592 or __builtin_next_arg (0) the first time we see it, after checking
11593 the arguments and if needed issuing a warning. */
11594 if (!integer_zerop (arg))
11595 {
11596 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11597
11598 /* Strip off all nops for the sake of the comparison. This
11599 is not quite the same as STRIP_NOPS. It does more.
11600 We must also strip off INDIRECT_EXPR for C++ reference
11601 parameters. */
11602 while (CONVERT_EXPR_P (arg)
11603 || TREE_CODE (arg) == INDIRECT_REF)
11604 arg = TREE_OPERAND (arg, 0);
11605 if (arg != last_parm)
11606 {
11607 /* FIXME: Sometimes with the tree optimizers we can get the
11608 not the last argument even though the user used the last
11609 argument. We just warn and set the arg to be the last
11610 argument so that we will get wrong-code because of
11611 it. */
11612 warning (0, "second parameter of %<va_start%> not last named argument");
11613 }
11614
11615 /* Undefined by C99 7.15.1.4p4 (va_start):
11616 "If the parameter parmN is declared with the register storage
11617 class, with a function or array type, or with a type that is
11618 not compatible with the type that results after application of
11619 the default argument promotions, the behavior is undefined."
11620 */
11621 else if (DECL_REGISTER (arg))
11622 warning (0, "undefined behaviour when second parameter of "
11623 "%<va_start%> is declared with %<register%> storage");
11624
11625 /* We want to verify the second parameter just once before the tree
11626 optimizers are run and then avoid keeping it in the tree,
11627 as otherwise we could warn even for correct code like:
11628 void foo (int i, ...)
11629 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11630 if (va_start_p)
11631 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11632 else
11633 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11634 }
11635 return false;
11636 }
11637
11638
11639 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11640 ORIG may be null if this is a 2-argument call. We don't attempt to
11641 simplify calls with more than 3 arguments.
11642
11643 Return NULL_TREE if no simplification was possible, otherwise return the
11644 simplified form of the call as a tree. If IGNORED is true, it means that
11645 the caller does not use the returned value of the function. */
11646
11647 static tree
11648 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11649 tree orig, int ignored)
11650 {
11651 tree call, retval;
11652 const char *fmt_str = NULL;
11653
11654 /* Verify the required arguments in the original call. We deal with two
11655 types of sprintf() calls: 'sprintf (str, fmt)' and
11656 'sprintf (dest, "%s", orig)'. */
11657 if (!validate_arg (dest, POINTER_TYPE)
11658 || !validate_arg (fmt, POINTER_TYPE))
11659 return NULL_TREE;
11660 if (orig && !validate_arg (orig, POINTER_TYPE))
11661 return NULL_TREE;
11662
11663 /* Check whether the format is a literal string constant. */
11664 fmt_str = c_getstr (fmt);
11665 if (fmt_str == NULL)
11666 return NULL_TREE;
11667
11668 call = NULL_TREE;
11669 retval = NULL_TREE;
11670
11671 if (!init_target_chars ())
11672 return NULL_TREE;
11673
11674 /* If the format doesn't contain % args or %%, use strcpy. */
11675 if (strchr (fmt_str, target_percent) == NULL)
11676 {
11677 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11678
11679 if (!fn)
11680 return NULL_TREE;
11681
11682 /* Don't optimize sprintf (buf, "abc", ptr++). */
11683 if (orig)
11684 return NULL_TREE;
11685
11686 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11687 'format' is known to contain no % formats. */
11688 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11689 if (!ignored)
11690 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11691 }
11692
11693 /* If the format is "%s", use strcpy if the result isn't used. */
11694 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11695 {
11696 tree fn;
11697 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11698
11699 if (!fn)
11700 return NULL_TREE;
11701
11702 /* Don't crash on sprintf (str1, "%s"). */
11703 if (!orig)
11704 return NULL_TREE;
11705
11706 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11707 if (!ignored)
11708 {
11709 retval = c_strlen (orig, 1);
11710 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11711 return NULL_TREE;
11712 }
11713 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11714 }
11715
11716 if (call && retval)
11717 {
11718 retval = fold_convert_loc
11719 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11720 retval);
11721 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11722 }
11723 else
11724 return call;
11725 }
11726
11727 /* Expand a call EXP to __builtin_object_size. */
11728
11729 rtx
11730 expand_builtin_object_size (tree exp)
11731 {
11732 tree ost;
11733 int object_size_type;
11734 tree fndecl = get_callee_fndecl (exp);
11735
11736 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11737 {
11738 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11739 exp, fndecl);
11740 expand_builtin_trap ();
11741 return const0_rtx;
11742 }
11743
11744 ost = CALL_EXPR_ARG (exp, 1);
11745 STRIP_NOPS (ost);
11746
11747 if (TREE_CODE (ost) != INTEGER_CST
11748 || tree_int_cst_sgn (ost) < 0
11749 || compare_tree_int (ost, 3) > 0)
11750 {
11751 error ("%Klast argument of %D is not integer constant between 0 and 3",
11752 exp, fndecl);
11753 expand_builtin_trap ();
11754 return const0_rtx;
11755 }
11756
11757 object_size_type = tree_low_cst (ost, 0);
11758
11759 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11760 }
11761
11762 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11763 FCODE is the BUILT_IN_* to use.
11764 Return NULL_RTX if we failed; the caller should emit a normal call,
11765 otherwise try to get the result in TARGET, if convenient (and in
11766 mode MODE if that's convenient). */
11767
11768 static rtx
11769 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11770 enum built_in_function fcode)
11771 {
11772 tree dest, src, len, size;
11773
11774 if (!validate_arglist (exp,
11775 POINTER_TYPE,
11776 fcode == BUILT_IN_MEMSET_CHK
11777 ? INTEGER_TYPE : POINTER_TYPE,
11778 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11779 return NULL_RTX;
11780
11781 dest = CALL_EXPR_ARG (exp, 0);
11782 src = CALL_EXPR_ARG (exp, 1);
11783 len = CALL_EXPR_ARG (exp, 2);
11784 size = CALL_EXPR_ARG (exp, 3);
11785
11786 if (! host_integerp (size, 1))
11787 return NULL_RTX;
11788
11789 if (host_integerp (len, 1) || integer_all_onesp (size))
11790 {
11791 tree fn;
11792
11793 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11794 {
11795 warning_at (tree_nonartificial_location (exp),
11796 0, "%Kcall to %D will always overflow destination buffer",
11797 exp, get_callee_fndecl (exp));
11798 return NULL_RTX;
11799 }
11800
11801 fn = NULL_TREE;
11802 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11803 mem{cpy,pcpy,move,set} is available. */
11804 switch (fcode)
11805 {
11806 case BUILT_IN_MEMCPY_CHK:
11807 fn = built_in_decls[BUILT_IN_MEMCPY];
11808 break;
11809 case BUILT_IN_MEMPCPY_CHK:
11810 fn = built_in_decls[BUILT_IN_MEMPCPY];
11811 break;
11812 case BUILT_IN_MEMMOVE_CHK:
11813 fn = built_in_decls[BUILT_IN_MEMMOVE];
11814 break;
11815 case BUILT_IN_MEMSET_CHK:
11816 fn = built_in_decls[BUILT_IN_MEMSET];
11817 break;
11818 default:
11819 break;
11820 }
11821
11822 if (! fn)
11823 return NULL_RTX;
11824
11825 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11826 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11827 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11828 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11829 }
11830 else if (fcode == BUILT_IN_MEMSET_CHK)
11831 return NULL_RTX;
11832 else
11833 {
11834 unsigned int dest_align
11835 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11836
11837 /* If DEST is not a pointer type, call the normal function. */
11838 if (dest_align == 0)
11839 return NULL_RTX;
11840
11841 /* If SRC and DEST are the same (and not volatile), do nothing. */
11842 if (operand_equal_p (src, dest, 0))
11843 {
11844 tree expr;
11845
11846 if (fcode != BUILT_IN_MEMPCPY_CHK)
11847 {
11848 /* Evaluate and ignore LEN in case it has side-effects. */
11849 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11850 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11851 }
11852
11853 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11854 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11855 }
11856
11857 /* __memmove_chk special case. */
11858 if (fcode == BUILT_IN_MEMMOVE_CHK)
11859 {
11860 unsigned int src_align
11861 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11862
11863 if (src_align == 0)
11864 return NULL_RTX;
11865
11866 /* If src is categorized for a readonly section we can use
11867 normal __memcpy_chk. */
11868 if (readonly_data_expr (src))
11869 {
11870 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11871 if (!fn)
11872 return NULL_RTX;
11873 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11874 dest, src, len, size);
11875 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11876 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11877 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11878 }
11879 }
11880 return NULL_RTX;
11881 }
11882 }
11883
11884 /* Emit warning if a buffer overflow is detected at compile time. */
11885
11886 static void
11887 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11888 {
11889 int is_strlen = 0;
11890 tree len, size;
11891 location_t loc = tree_nonartificial_location (exp);
11892
11893 switch (fcode)
11894 {
11895 case BUILT_IN_STRCPY_CHK:
11896 case BUILT_IN_STPCPY_CHK:
11897 /* For __strcat_chk the warning will be emitted only if overflowing
11898 by at least strlen (dest) + 1 bytes. */
11899 case BUILT_IN_STRCAT_CHK:
11900 len = CALL_EXPR_ARG (exp, 1);
11901 size = CALL_EXPR_ARG (exp, 2);
11902 is_strlen = 1;
11903 break;
11904 case BUILT_IN_STRNCAT_CHK:
11905 case BUILT_IN_STRNCPY_CHK:
11906 len = CALL_EXPR_ARG (exp, 2);
11907 size = CALL_EXPR_ARG (exp, 3);
11908 break;
11909 case BUILT_IN_SNPRINTF_CHK:
11910 case BUILT_IN_VSNPRINTF_CHK:
11911 len = CALL_EXPR_ARG (exp, 1);
11912 size = CALL_EXPR_ARG (exp, 3);
11913 break;
11914 default:
11915 gcc_unreachable ();
11916 }
11917
11918 if (!len || !size)
11919 return;
11920
11921 if (! host_integerp (size, 1) || integer_all_onesp (size))
11922 return;
11923
11924 if (is_strlen)
11925 {
11926 len = c_strlen (len, 1);
11927 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11928 return;
11929 }
11930 else if (fcode == BUILT_IN_STRNCAT_CHK)
11931 {
11932 tree src = CALL_EXPR_ARG (exp, 1);
11933 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11934 return;
11935 src = c_strlen (src, 1);
11936 if (! src || ! host_integerp (src, 1))
11937 {
11938 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11939 exp, get_callee_fndecl (exp));
11940 return;
11941 }
11942 else if (tree_int_cst_lt (src, size))
11943 return;
11944 }
11945 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11946 return;
11947
11948 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11949 exp, get_callee_fndecl (exp));
11950 }
11951
11952 /* Emit warning if a buffer overflow is detected at compile time
11953 in __sprintf_chk/__vsprintf_chk calls. */
11954
11955 static void
11956 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11957 {
11958 tree size, len, fmt;
11959 const char *fmt_str;
11960 int nargs = call_expr_nargs (exp);
11961
11962 /* Verify the required arguments in the original call. */
11963
11964 if (nargs < 4)
11965 return;
11966 size = CALL_EXPR_ARG (exp, 2);
11967 fmt = CALL_EXPR_ARG (exp, 3);
11968
11969 if (! host_integerp (size, 1) || integer_all_onesp (size))
11970 return;
11971
11972 /* Check whether the format is a literal string constant. */
11973 fmt_str = c_getstr (fmt);
11974 if (fmt_str == NULL)
11975 return;
11976
11977 if (!init_target_chars ())
11978 return;
11979
11980 /* If the format doesn't contain % args or %%, we know its size. */
11981 if (strchr (fmt_str, target_percent) == 0)
11982 len = build_int_cstu (size_type_node, strlen (fmt_str));
11983 /* If the format is "%s" and first ... argument is a string literal,
11984 we know it too. */
11985 else if (fcode == BUILT_IN_SPRINTF_CHK
11986 && strcmp (fmt_str, target_percent_s) == 0)
11987 {
11988 tree arg;
11989
11990 if (nargs < 5)
11991 return;
11992 arg = CALL_EXPR_ARG (exp, 4);
11993 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11994 return;
11995
11996 len = c_strlen (arg, 1);
11997 if (!len || ! host_integerp (len, 1))
11998 return;
11999 }
12000 else
12001 return;
12002
12003 if (! tree_int_cst_lt (len, size))
12004 warning_at (tree_nonartificial_location (exp),
12005 0, "%Kcall to %D will always overflow destination buffer",
12006 exp, get_callee_fndecl (exp));
12007 }
12008
12009 /* Emit warning if a free is called with address of a variable. */
12010
12011 static void
12012 maybe_emit_free_warning (tree exp)
12013 {
12014 tree arg = CALL_EXPR_ARG (exp, 0);
12015
12016 STRIP_NOPS (arg);
12017 if (TREE_CODE (arg) != ADDR_EXPR)
12018 return;
12019
12020 arg = get_base_address (TREE_OPERAND (arg, 0));
12021 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12022 return;
12023
12024 if (SSA_VAR_P (arg))
12025 warning_at (tree_nonartificial_location (exp),
12026 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12027 else
12028 warning_at (tree_nonartificial_location (exp),
12029 0, "%Kattempt to free a non-heap object", exp);
12030 }
12031
12032 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12033 if possible. */
12034
12035 tree
12036 fold_builtin_object_size (tree ptr, tree ost)
12037 {
12038 unsigned HOST_WIDE_INT bytes;
12039 int object_size_type;
12040
12041 if (!validate_arg (ptr, POINTER_TYPE)
12042 || !validate_arg (ost, INTEGER_TYPE))
12043 return NULL_TREE;
12044
12045 STRIP_NOPS (ost);
12046
12047 if (TREE_CODE (ost) != INTEGER_CST
12048 || tree_int_cst_sgn (ost) < 0
12049 || compare_tree_int (ost, 3) > 0)
12050 return NULL_TREE;
12051
12052 object_size_type = tree_low_cst (ost, 0);
12053
12054 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12055 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12056 and (size_t) 0 for types 2 and 3. */
12057 if (TREE_SIDE_EFFECTS (ptr))
12058 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12059
12060 if (TREE_CODE (ptr) == ADDR_EXPR)
12061 {
12062 bytes = compute_builtin_object_size (ptr, object_size_type);
12063 if (double_int_fits_to_tree_p (size_type_node,
12064 uhwi_to_double_int (bytes)))
12065 return build_int_cstu (size_type_node, bytes);
12066 }
12067 else if (TREE_CODE (ptr) == SSA_NAME)
12068 {
12069 /* If object size is not known yet, delay folding until
12070 later. Maybe subsequent passes will help determining
12071 it. */
12072 bytes = compute_builtin_object_size (ptr, object_size_type);
12073 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12074 && double_int_fits_to_tree_p (size_type_node,
12075 uhwi_to_double_int (bytes)))
12076 return build_int_cstu (size_type_node, bytes);
12077 }
12078
12079 return NULL_TREE;
12080 }
12081
12082 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12083 DEST, SRC, LEN, and SIZE are the arguments to the call.
12084 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12085 code of the builtin. If MAXLEN is not NULL, it is maximum length
12086 passed as third argument. */
12087
12088 tree
12089 fold_builtin_memory_chk (location_t loc, tree fndecl,
12090 tree dest, tree src, tree len, tree size,
12091 tree maxlen, bool ignore,
12092 enum built_in_function fcode)
12093 {
12094 tree fn;
12095
12096 if (!validate_arg (dest, POINTER_TYPE)
12097 || !validate_arg (src,
12098 (fcode == BUILT_IN_MEMSET_CHK
12099 ? INTEGER_TYPE : POINTER_TYPE))
12100 || !validate_arg (len, INTEGER_TYPE)
12101 || !validate_arg (size, INTEGER_TYPE))
12102 return NULL_TREE;
12103
12104 /* If SRC and DEST are the same (and not volatile), return DEST
12105 (resp. DEST+LEN for __mempcpy_chk). */
12106 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12107 {
12108 if (fcode != BUILT_IN_MEMPCPY_CHK)
12109 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12110 dest, len);
12111 else
12112 {
12113 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12114 dest, len);
12115 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12116 }
12117 }
12118
12119 if (! host_integerp (size, 1))
12120 return NULL_TREE;
12121
12122 if (! integer_all_onesp (size))
12123 {
12124 if (! host_integerp (len, 1))
12125 {
12126 /* If LEN is not constant, try MAXLEN too.
12127 For MAXLEN only allow optimizing into non-_ocs function
12128 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12129 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12130 {
12131 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12132 {
12133 /* (void) __mempcpy_chk () can be optimized into
12134 (void) __memcpy_chk (). */
12135 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12136 if (!fn)
12137 return NULL_TREE;
12138
12139 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12140 }
12141 return NULL_TREE;
12142 }
12143 }
12144 else
12145 maxlen = len;
12146
12147 if (tree_int_cst_lt (size, maxlen))
12148 return NULL_TREE;
12149 }
12150
12151 fn = NULL_TREE;
12152 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12153 mem{cpy,pcpy,move,set} is available. */
12154 switch (fcode)
12155 {
12156 case BUILT_IN_MEMCPY_CHK:
12157 fn = built_in_decls[BUILT_IN_MEMCPY];
12158 break;
12159 case BUILT_IN_MEMPCPY_CHK:
12160 fn = built_in_decls[BUILT_IN_MEMPCPY];
12161 break;
12162 case BUILT_IN_MEMMOVE_CHK:
12163 fn = built_in_decls[BUILT_IN_MEMMOVE];
12164 break;
12165 case BUILT_IN_MEMSET_CHK:
12166 fn = built_in_decls[BUILT_IN_MEMSET];
12167 break;
12168 default:
12169 break;
12170 }
12171
12172 if (!fn)
12173 return NULL_TREE;
12174
12175 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12176 }
12177
12178 /* Fold a call to the __st[rp]cpy_chk builtin.
12179 DEST, SRC, and SIZE are the arguments to the call.
12180 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12181 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12182 strings passed as second argument. */
12183
12184 tree
12185 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12186 tree src, tree size,
12187 tree maxlen, bool ignore,
12188 enum built_in_function fcode)
12189 {
12190 tree len, fn;
12191
12192 if (!validate_arg (dest, POINTER_TYPE)
12193 || !validate_arg (src, POINTER_TYPE)
12194 || !validate_arg (size, INTEGER_TYPE))
12195 return NULL_TREE;
12196
12197 /* If SRC and DEST are the same (and not volatile), return DEST. */
12198 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12199 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12200
12201 if (! host_integerp (size, 1))
12202 return NULL_TREE;
12203
12204 if (! integer_all_onesp (size))
12205 {
12206 len = c_strlen (src, 1);
12207 if (! len || ! host_integerp (len, 1))
12208 {
12209 /* If LEN is not constant, try MAXLEN too.
12210 For MAXLEN only allow optimizing into non-_ocs function
12211 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12212 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12213 {
12214 if (fcode == BUILT_IN_STPCPY_CHK)
12215 {
12216 if (! ignore)
12217 return NULL_TREE;
12218
12219 /* If return value of __stpcpy_chk is ignored,
12220 optimize into __strcpy_chk. */
12221 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12222 if (!fn)
12223 return NULL_TREE;
12224
12225 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12226 }
12227
12228 if (! len || TREE_SIDE_EFFECTS (len))
12229 return NULL_TREE;
12230
12231 /* If c_strlen returned something, but not a constant,
12232 transform __strcpy_chk into __memcpy_chk. */
12233 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12234 if (!fn)
12235 return NULL_TREE;
12236
12237 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12238 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12239 build_call_expr_loc (loc, fn, 4,
12240 dest, src, len, size));
12241 }
12242 }
12243 else
12244 maxlen = len;
12245
12246 if (! tree_int_cst_lt (maxlen, size))
12247 return NULL_TREE;
12248 }
12249
12250 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12251 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12252 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12253 if (!fn)
12254 return NULL_TREE;
12255
12256 return build_call_expr_loc (loc, fn, 2, dest, src);
12257 }
12258
12259 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12260 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12261 length passed as third argument. */
12262
12263 tree
12264 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12265 tree len, tree size, tree maxlen)
12266 {
12267 tree fn;
12268
12269 if (!validate_arg (dest, POINTER_TYPE)
12270 || !validate_arg (src, POINTER_TYPE)
12271 || !validate_arg (len, INTEGER_TYPE)
12272 || !validate_arg (size, INTEGER_TYPE))
12273 return NULL_TREE;
12274
12275 if (! host_integerp (size, 1))
12276 return NULL_TREE;
12277
12278 if (! integer_all_onesp (size))
12279 {
12280 if (! host_integerp (len, 1))
12281 {
12282 /* If LEN is not constant, try MAXLEN too.
12283 For MAXLEN only allow optimizing into non-_ocs function
12284 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12285 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12286 return NULL_TREE;
12287 }
12288 else
12289 maxlen = len;
12290
12291 if (tree_int_cst_lt (size, maxlen))
12292 return NULL_TREE;
12293 }
12294
12295 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12296 fn = built_in_decls[BUILT_IN_STRNCPY];
12297 if (!fn)
12298 return NULL_TREE;
12299
12300 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12301 }
12302
12303 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12304 are the arguments to the call. */
12305
12306 static tree
12307 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12308 tree src, tree size)
12309 {
12310 tree fn;
12311 const char *p;
12312
12313 if (!validate_arg (dest, POINTER_TYPE)
12314 || !validate_arg (src, POINTER_TYPE)
12315 || !validate_arg (size, INTEGER_TYPE))
12316 return NULL_TREE;
12317
12318 p = c_getstr (src);
12319 /* If the SRC parameter is "", return DEST. */
12320 if (p && *p == '\0')
12321 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12322
12323 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12324 return NULL_TREE;
12325
12326 /* If __builtin_strcat_chk is used, assume strcat is available. */
12327 fn = built_in_decls[BUILT_IN_STRCAT];
12328 if (!fn)
12329 return NULL_TREE;
12330
12331 return build_call_expr_loc (loc, fn, 2, dest, src);
12332 }
12333
12334 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12335 LEN, and SIZE. */
12336
12337 static tree
12338 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12339 tree dest, tree src, tree len, tree size)
12340 {
12341 tree fn;
12342 const char *p;
12343
12344 if (!validate_arg (dest, POINTER_TYPE)
12345 || !validate_arg (src, POINTER_TYPE)
12346 || !validate_arg (size, INTEGER_TYPE)
12347 || !validate_arg (size, INTEGER_TYPE))
12348 return NULL_TREE;
12349
12350 p = c_getstr (src);
12351 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12352 if (p && *p == '\0')
12353 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12354 else if (integer_zerop (len))
12355 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12356
12357 if (! host_integerp (size, 1))
12358 return NULL_TREE;
12359
12360 if (! integer_all_onesp (size))
12361 {
12362 tree src_len = c_strlen (src, 1);
12363 if (src_len
12364 && host_integerp (src_len, 1)
12365 && host_integerp (len, 1)
12366 && ! tree_int_cst_lt (len, src_len))
12367 {
12368 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12369 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12370 if (!fn)
12371 return NULL_TREE;
12372
12373 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12374 }
12375 return NULL_TREE;
12376 }
12377
12378 /* If __builtin_strncat_chk is used, assume strncat is available. */
12379 fn = built_in_decls[BUILT_IN_STRNCAT];
12380 if (!fn)
12381 return NULL_TREE;
12382
12383 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12384 }
12385
12386 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12387 a normal call should be emitted rather than expanding the function
12388 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12389
12390 static tree
12391 fold_builtin_sprintf_chk (location_t loc, tree exp,
12392 enum built_in_function fcode)
12393 {
12394 tree dest, size, len, fn, fmt, flag;
12395 const char *fmt_str;
12396 int nargs = call_expr_nargs (exp);
12397
12398 /* Verify the required arguments in the original call. */
12399 if (nargs < 4)
12400 return NULL_TREE;
12401 dest = CALL_EXPR_ARG (exp, 0);
12402 if (!validate_arg (dest, POINTER_TYPE))
12403 return NULL_TREE;
12404 flag = CALL_EXPR_ARG (exp, 1);
12405 if (!validate_arg (flag, INTEGER_TYPE))
12406 return NULL_TREE;
12407 size = CALL_EXPR_ARG (exp, 2);
12408 if (!validate_arg (size, INTEGER_TYPE))
12409 return NULL_TREE;
12410 fmt = CALL_EXPR_ARG (exp, 3);
12411 if (!validate_arg (fmt, POINTER_TYPE))
12412 return NULL_TREE;
12413
12414 if (! host_integerp (size, 1))
12415 return NULL_TREE;
12416
12417 len = NULL_TREE;
12418
12419 if (!init_target_chars ())
12420 return NULL_TREE;
12421
12422 /* Check whether the format is a literal string constant. */
12423 fmt_str = c_getstr (fmt);
12424 if (fmt_str != NULL)
12425 {
12426 /* If the format doesn't contain % args or %%, we know the size. */
12427 if (strchr (fmt_str, target_percent) == 0)
12428 {
12429 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12430 len = build_int_cstu (size_type_node, strlen (fmt_str));
12431 }
12432 /* If the format is "%s" and first ... argument is a string literal,
12433 we know the size too. */
12434 else if (fcode == BUILT_IN_SPRINTF_CHK
12435 && strcmp (fmt_str, target_percent_s) == 0)
12436 {
12437 tree arg;
12438
12439 if (nargs == 5)
12440 {
12441 arg = CALL_EXPR_ARG (exp, 4);
12442 if (validate_arg (arg, POINTER_TYPE))
12443 {
12444 len = c_strlen (arg, 1);
12445 if (! len || ! host_integerp (len, 1))
12446 len = NULL_TREE;
12447 }
12448 }
12449 }
12450 }
12451
12452 if (! integer_all_onesp (size))
12453 {
12454 if (! len || ! tree_int_cst_lt (len, size))
12455 return NULL_TREE;
12456 }
12457
12458 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12459 or if format doesn't contain % chars or is "%s". */
12460 if (! integer_zerop (flag))
12461 {
12462 if (fmt_str == NULL)
12463 return NULL_TREE;
12464 if (strchr (fmt_str, target_percent) != NULL
12465 && strcmp (fmt_str, target_percent_s))
12466 return NULL_TREE;
12467 }
12468
12469 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12470 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12471 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12472 if (!fn)
12473 return NULL_TREE;
12474
12475 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12476 }
12477
12478 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12479 a normal call should be emitted rather than expanding the function
12480 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12481 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12482 passed as second argument. */
12483
12484 tree
12485 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12486 enum built_in_function fcode)
12487 {
12488 tree dest, size, len, fn, fmt, flag;
12489 const char *fmt_str;
12490
12491 /* Verify the required arguments in the original call. */
12492 if (call_expr_nargs (exp) < 5)
12493 return NULL_TREE;
12494 dest = CALL_EXPR_ARG (exp, 0);
12495 if (!validate_arg (dest, POINTER_TYPE))
12496 return NULL_TREE;
12497 len = CALL_EXPR_ARG (exp, 1);
12498 if (!validate_arg (len, INTEGER_TYPE))
12499 return NULL_TREE;
12500 flag = CALL_EXPR_ARG (exp, 2);
12501 if (!validate_arg (flag, INTEGER_TYPE))
12502 return NULL_TREE;
12503 size = CALL_EXPR_ARG (exp, 3);
12504 if (!validate_arg (size, INTEGER_TYPE))
12505 return NULL_TREE;
12506 fmt = CALL_EXPR_ARG (exp, 4);
12507 if (!validate_arg (fmt, POINTER_TYPE))
12508 return NULL_TREE;
12509
12510 if (! host_integerp (size, 1))
12511 return NULL_TREE;
12512
12513 if (! integer_all_onesp (size))
12514 {
12515 if (! host_integerp (len, 1))
12516 {
12517 /* If LEN is not constant, try MAXLEN too.
12518 For MAXLEN only allow optimizing into non-_ocs function
12519 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12520 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12521 return NULL_TREE;
12522 }
12523 else
12524 maxlen = len;
12525
12526 if (tree_int_cst_lt (size, maxlen))
12527 return NULL_TREE;
12528 }
12529
12530 if (!init_target_chars ())
12531 return NULL_TREE;
12532
12533 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12534 or if format doesn't contain % chars or is "%s". */
12535 if (! integer_zerop (flag))
12536 {
12537 fmt_str = c_getstr (fmt);
12538 if (fmt_str == NULL)
12539 return NULL_TREE;
12540 if (strchr (fmt_str, target_percent) != NULL
12541 && strcmp (fmt_str, target_percent_s))
12542 return NULL_TREE;
12543 }
12544
12545 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12546 available. */
12547 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12548 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12549 if (!fn)
12550 return NULL_TREE;
12551
12552 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12553 }
12554
12555 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12556 FMT and ARG are the arguments to the call; we don't fold cases with
12557 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12558
12559 Return NULL_TREE if no simplification was possible, otherwise return the
12560 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12561 code of the function to be simplified. */
12562
12563 static tree
12564 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12565 tree arg, bool ignore,
12566 enum built_in_function fcode)
12567 {
12568 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12569 const char *fmt_str = NULL;
12570
12571 /* If the return value is used, don't do the transformation. */
12572 if (! ignore)
12573 return NULL_TREE;
12574
12575 /* Verify the required arguments in the original call. */
12576 if (!validate_arg (fmt, POINTER_TYPE))
12577 return NULL_TREE;
12578
12579 /* Check whether the format is a literal string constant. */
12580 fmt_str = c_getstr (fmt);
12581 if (fmt_str == NULL)
12582 return NULL_TREE;
12583
12584 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12585 {
12586 /* If we're using an unlocked function, assume the other
12587 unlocked functions exist explicitly. */
12588 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12589 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12590 }
12591 else
12592 {
12593 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12594 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12595 }
12596
12597 if (!init_target_chars ())
12598 return NULL_TREE;
12599
12600 if (strcmp (fmt_str, target_percent_s) == 0
12601 || strchr (fmt_str, target_percent) == NULL)
12602 {
12603 const char *str;
12604
12605 if (strcmp (fmt_str, target_percent_s) == 0)
12606 {
12607 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12608 return NULL_TREE;
12609
12610 if (!arg || !validate_arg (arg, POINTER_TYPE))
12611 return NULL_TREE;
12612
12613 str = c_getstr (arg);
12614 if (str == NULL)
12615 return NULL_TREE;
12616 }
12617 else
12618 {
12619 /* The format specifier doesn't contain any '%' characters. */
12620 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12621 && arg)
12622 return NULL_TREE;
12623 str = fmt_str;
12624 }
12625
12626 /* If the string was "", printf does nothing. */
12627 if (str[0] == '\0')
12628 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12629
12630 /* If the string has length of 1, call putchar. */
12631 if (str[1] == '\0')
12632 {
12633 /* Given printf("c"), (where c is any one character,)
12634 convert "c"[0] to an int and pass that to the replacement
12635 function. */
12636 newarg = build_int_cst (NULL_TREE, str[0]);
12637 if (fn_putchar)
12638 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12639 }
12640 else
12641 {
12642 /* If the string was "string\n", call puts("string"). */
12643 size_t len = strlen (str);
12644 if ((unsigned char)str[len - 1] == target_newline)
12645 {
12646 /* Create a NUL-terminated string that's one char shorter
12647 than the original, stripping off the trailing '\n'. */
12648 char *newstr = XALLOCAVEC (char, len);
12649 memcpy (newstr, str, len - 1);
12650 newstr[len - 1] = 0;
12651
12652 newarg = build_string_literal (len, newstr);
12653 if (fn_puts)
12654 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12655 }
12656 else
12657 /* We'd like to arrange to call fputs(string,stdout) here,
12658 but we need stdout and don't have a way to get it yet. */
12659 return NULL_TREE;
12660 }
12661 }
12662
12663 /* The other optimizations can be done only on the non-va_list variants. */
12664 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12665 return NULL_TREE;
12666
12667 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12668 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12669 {
12670 if (!arg || !validate_arg (arg, POINTER_TYPE))
12671 return NULL_TREE;
12672 if (fn_puts)
12673 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12674 }
12675
12676 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12677 else if (strcmp (fmt_str, target_percent_c) == 0)
12678 {
12679 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12680 return NULL_TREE;
12681 if (fn_putchar)
12682 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12683 }
12684
12685 if (!call)
12686 return NULL_TREE;
12687
12688 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12689 }
12690
12691 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12692 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12693 more than 3 arguments, and ARG may be null in the 2-argument case.
12694
12695 Return NULL_TREE if no simplification was possible, otherwise return the
12696 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12697 code of the function to be simplified. */
12698
12699 static tree
12700 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12701 tree fmt, tree arg, bool ignore,
12702 enum built_in_function fcode)
12703 {
12704 tree fn_fputc, fn_fputs, call = NULL_TREE;
12705 const char *fmt_str = NULL;
12706
12707 /* If the return value is used, don't do the transformation. */
12708 if (! ignore)
12709 return NULL_TREE;
12710
12711 /* Verify the required arguments in the original call. */
12712 if (!validate_arg (fp, POINTER_TYPE))
12713 return NULL_TREE;
12714 if (!validate_arg (fmt, POINTER_TYPE))
12715 return NULL_TREE;
12716
12717 /* Check whether the format is a literal string constant. */
12718 fmt_str = c_getstr (fmt);
12719 if (fmt_str == NULL)
12720 return NULL_TREE;
12721
12722 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12723 {
12724 /* If we're using an unlocked function, assume the other
12725 unlocked functions exist explicitly. */
12726 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12727 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12728 }
12729 else
12730 {
12731 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12732 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12733 }
12734
12735 if (!init_target_chars ())
12736 return NULL_TREE;
12737
12738 /* If the format doesn't contain % args or %%, use strcpy. */
12739 if (strchr (fmt_str, target_percent) == NULL)
12740 {
12741 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12742 && arg)
12743 return NULL_TREE;
12744
12745 /* If the format specifier was "", fprintf does nothing. */
12746 if (fmt_str[0] == '\0')
12747 {
12748 /* If FP has side-effects, just wait until gimplification is
12749 done. */
12750 if (TREE_SIDE_EFFECTS (fp))
12751 return NULL_TREE;
12752
12753 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12754 }
12755
12756 /* When "string" doesn't contain %, replace all cases of
12757 fprintf (fp, string) with fputs (string, fp). The fputs
12758 builtin will take care of special cases like length == 1. */
12759 if (fn_fputs)
12760 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12761 }
12762
12763 /* The other optimizations can be done only on the non-va_list variants. */
12764 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12765 return NULL_TREE;
12766
12767 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12768 else if (strcmp (fmt_str, target_percent_s) == 0)
12769 {
12770 if (!arg || !validate_arg (arg, POINTER_TYPE))
12771 return NULL_TREE;
12772 if (fn_fputs)
12773 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12774 }
12775
12776 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12777 else if (strcmp (fmt_str, target_percent_c) == 0)
12778 {
12779 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12780 return NULL_TREE;
12781 if (fn_fputc)
12782 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12783 }
12784
12785 if (!call)
12786 return NULL_TREE;
12787 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12788 }
12789
12790 /* Initialize format string characters in the target charset. */
12791
12792 static bool
12793 init_target_chars (void)
12794 {
12795 static bool init;
12796 if (!init)
12797 {
12798 target_newline = lang_hooks.to_target_charset ('\n');
12799 target_percent = lang_hooks.to_target_charset ('%');
12800 target_c = lang_hooks.to_target_charset ('c');
12801 target_s = lang_hooks.to_target_charset ('s');
12802 if (target_newline == 0 || target_percent == 0 || target_c == 0
12803 || target_s == 0)
12804 return false;
12805
12806 target_percent_c[0] = target_percent;
12807 target_percent_c[1] = target_c;
12808 target_percent_c[2] = '\0';
12809
12810 target_percent_s[0] = target_percent;
12811 target_percent_s[1] = target_s;
12812 target_percent_s[2] = '\0';
12813
12814 target_percent_s_newline[0] = target_percent;
12815 target_percent_s_newline[1] = target_s;
12816 target_percent_s_newline[2] = target_newline;
12817 target_percent_s_newline[3] = '\0';
12818
12819 init = true;
12820 }
12821 return true;
12822 }
12823
12824 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12825 and no overflow/underflow occurred. INEXACT is true if M was not
12826 exactly calculated. TYPE is the tree type for the result. This
12827 function assumes that you cleared the MPFR flags and then
12828 calculated M to see if anything subsequently set a flag prior to
12829 entering this function. Return NULL_TREE if any checks fail. */
12830
12831 static tree
12832 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12833 {
12834 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12835 overflow/underflow occurred. If -frounding-math, proceed iff the
12836 result of calling FUNC was exact. */
12837 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12838 && (!flag_rounding_math || !inexact))
12839 {
12840 REAL_VALUE_TYPE rr;
12841
12842 real_from_mpfr (&rr, m, type, GMP_RNDN);
12843 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12844 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12845 but the mpft_t is not, then we underflowed in the
12846 conversion. */
12847 if (real_isfinite (&rr)
12848 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12849 {
12850 REAL_VALUE_TYPE rmode;
12851
12852 real_convert (&rmode, TYPE_MODE (type), &rr);
12853 /* Proceed iff the specified mode can hold the value. */
12854 if (real_identical (&rmode, &rr))
12855 return build_real (type, rmode);
12856 }
12857 }
12858 return NULL_TREE;
12859 }
12860
12861 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12862 number and no overflow/underflow occurred. INEXACT is true if M
12863 was not exactly calculated. TYPE is the tree type for the result.
12864 This function assumes that you cleared the MPFR flags and then
12865 calculated M to see if anything subsequently set a flag prior to
12866 entering this function. Return NULL_TREE if any checks fail, if
12867 FORCE_CONVERT is true, then bypass the checks. */
12868
12869 static tree
12870 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12871 {
12872 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12873 overflow/underflow occurred. If -frounding-math, proceed iff the
12874 result of calling FUNC was exact. */
12875 if (force_convert
12876 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12877 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12878 && (!flag_rounding_math || !inexact)))
12879 {
12880 REAL_VALUE_TYPE re, im;
12881
12882 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12883 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12884 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12885 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12886 but the mpft_t is not, then we underflowed in the
12887 conversion. */
12888 if (force_convert
12889 || (real_isfinite (&re) && real_isfinite (&im)
12890 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12891 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12892 {
12893 REAL_VALUE_TYPE re_mode, im_mode;
12894
12895 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12896 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12897 /* Proceed iff the specified mode can hold the value. */
12898 if (force_convert
12899 || (real_identical (&re_mode, &re)
12900 && real_identical (&im_mode, &im)))
12901 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12902 build_real (TREE_TYPE (type), im_mode));
12903 }
12904 }
12905 return NULL_TREE;
12906 }
12907
12908 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12909 FUNC on it and return the resulting value as a tree with type TYPE.
12910 If MIN and/or MAX are not NULL, then the supplied ARG must be
12911 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12912 acceptable values, otherwise they are not. The mpfr precision is
12913 set to the precision of TYPE. We assume that function FUNC returns
12914 zero if the result could be calculated exactly within the requested
12915 precision. */
12916
12917 static tree
12918 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12919 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12920 bool inclusive)
12921 {
12922 tree result = NULL_TREE;
12923
12924 STRIP_NOPS (arg);
12925
12926 /* To proceed, MPFR must exactly represent the target floating point
12927 format, which only happens when the target base equals two. */
12928 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12929 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12930 {
12931 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12932
12933 if (real_isfinite (ra)
12934 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12935 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12936 {
12937 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12938 const int prec = fmt->p;
12939 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12940 int inexact;
12941 mpfr_t m;
12942
12943 mpfr_init2 (m, prec);
12944 mpfr_from_real (m, ra, GMP_RNDN);
12945 mpfr_clear_flags ();
12946 inexact = func (m, m, rnd);
12947 result = do_mpfr_ckconv (m, type, inexact);
12948 mpfr_clear (m);
12949 }
12950 }
12951
12952 return result;
12953 }
12954
12955 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12956 FUNC on it and return the resulting value as a tree with type TYPE.
12957 The mpfr precision is set to the precision of TYPE. We assume that
12958 function FUNC returns zero if the result could be calculated
12959 exactly within the requested precision. */
12960
12961 static tree
12962 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12963 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12964 {
12965 tree result = NULL_TREE;
12966
12967 STRIP_NOPS (arg1);
12968 STRIP_NOPS (arg2);
12969
12970 /* To proceed, MPFR must exactly represent the target floating point
12971 format, which only happens when the target base equals two. */
12972 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12973 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12974 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12975 {
12976 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12977 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12978
12979 if (real_isfinite (ra1) && real_isfinite (ra2))
12980 {
12981 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12982 const int prec = fmt->p;
12983 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12984 int inexact;
12985 mpfr_t m1, m2;
12986
12987 mpfr_inits2 (prec, m1, m2, NULL);
12988 mpfr_from_real (m1, ra1, GMP_RNDN);
12989 mpfr_from_real (m2, ra2, GMP_RNDN);
12990 mpfr_clear_flags ();
12991 inexact = func (m1, m1, m2, rnd);
12992 result = do_mpfr_ckconv (m1, type, inexact);
12993 mpfr_clears (m1, m2, NULL);
12994 }
12995 }
12996
12997 return result;
12998 }
12999
13000 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13001 FUNC on it and return the resulting value as a tree with type TYPE.
13002 The mpfr precision is set to the precision of TYPE. We assume that
13003 function FUNC returns zero if the result could be calculated
13004 exactly within the requested precision. */
13005
13006 static tree
13007 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13008 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13009 {
13010 tree result = NULL_TREE;
13011
13012 STRIP_NOPS (arg1);
13013 STRIP_NOPS (arg2);
13014 STRIP_NOPS (arg3);
13015
13016 /* To proceed, MPFR must exactly represent the target floating point
13017 format, which only happens when the target base equals two. */
13018 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13019 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13020 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13021 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13022 {
13023 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13024 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13025 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13026
13027 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13028 {
13029 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13030 const int prec = fmt->p;
13031 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13032 int inexact;
13033 mpfr_t m1, m2, m3;
13034
13035 mpfr_inits2 (prec, m1, m2, m3, NULL);
13036 mpfr_from_real (m1, ra1, GMP_RNDN);
13037 mpfr_from_real (m2, ra2, GMP_RNDN);
13038 mpfr_from_real (m3, ra3, GMP_RNDN);
13039 mpfr_clear_flags ();
13040 inexact = func (m1, m1, m2, m3, rnd);
13041 result = do_mpfr_ckconv (m1, type, inexact);
13042 mpfr_clears (m1, m2, m3, NULL);
13043 }
13044 }
13045
13046 return result;
13047 }
13048
13049 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13050 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13051 If ARG_SINP and ARG_COSP are NULL then the result is returned
13052 as a complex value.
13053 The type is taken from the type of ARG and is used for setting the
13054 precision of the calculation and results. */
13055
13056 static tree
13057 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13058 {
13059 tree const type = TREE_TYPE (arg);
13060 tree result = NULL_TREE;
13061
13062 STRIP_NOPS (arg);
13063
13064 /* To proceed, MPFR must exactly represent the target floating point
13065 format, which only happens when the target base equals two. */
13066 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13067 && TREE_CODE (arg) == REAL_CST
13068 && !TREE_OVERFLOW (arg))
13069 {
13070 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13071
13072 if (real_isfinite (ra))
13073 {
13074 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13075 const int prec = fmt->p;
13076 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13077 tree result_s, result_c;
13078 int inexact;
13079 mpfr_t m, ms, mc;
13080
13081 mpfr_inits2 (prec, m, ms, mc, NULL);
13082 mpfr_from_real (m, ra, GMP_RNDN);
13083 mpfr_clear_flags ();
13084 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13085 result_s = do_mpfr_ckconv (ms, type, inexact);
13086 result_c = do_mpfr_ckconv (mc, type, inexact);
13087 mpfr_clears (m, ms, mc, NULL);
13088 if (result_s && result_c)
13089 {
13090 /* If we are to return in a complex value do so. */
13091 if (!arg_sinp && !arg_cosp)
13092 return build_complex (build_complex_type (type),
13093 result_c, result_s);
13094
13095 /* Dereference the sin/cos pointer arguments. */
13096 arg_sinp = build_fold_indirect_ref (arg_sinp);
13097 arg_cosp = build_fold_indirect_ref (arg_cosp);
13098 /* Proceed if valid pointer type were passed in. */
13099 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13100 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13101 {
13102 /* Set the values. */
13103 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13104 result_s);
13105 TREE_SIDE_EFFECTS (result_s) = 1;
13106 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13107 result_c);
13108 TREE_SIDE_EFFECTS (result_c) = 1;
13109 /* Combine the assignments into a compound expr. */
13110 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13111 result_s, result_c));
13112 }
13113 }
13114 }
13115 }
13116 return result;
13117 }
13118
13119 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13120 two-argument mpfr order N Bessel function FUNC on them and return
13121 the resulting value as a tree with type TYPE. The mpfr precision
13122 is set to the precision of TYPE. We assume that function FUNC
13123 returns zero if the result could be calculated exactly within the
13124 requested precision. */
13125 static tree
13126 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13127 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13128 const REAL_VALUE_TYPE *min, bool inclusive)
13129 {
13130 tree result = NULL_TREE;
13131
13132 STRIP_NOPS (arg1);
13133 STRIP_NOPS (arg2);
13134
13135 /* To proceed, MPFR must exactly represent the target floating point
13136 format, which only happens when the target base equals two. */
13137 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13138 && host_integerp (arg1, 0)
13139 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13140 {
13141 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13142 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13143
13144 if (n == (long)n
13145 && real_isfinite (ra)
13146 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13147 {
13148 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13149 const int prec = fmt->p;
13150 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13151 int inexact;
13152 mpfr_t m;
13153
13154 mpfr_init2 (m, prec);
13155 mpfr_from_real (m, ra, GMP_RNDN);
13156 mpfr_clear_flags ();
13157 inexact = func (m, n, m, rnd);
13158 result = do_mpfr_ckconv (m, type, inexact);
13159 mpfr_clear (m);
13160 }
13161 }
13162
13163 return result;
13164 }
13165
13166 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13167 the pointer *(ARG_QUO) and return the result. The type is taken
13168 from the type of ARG0 and is used for setting the precision of the
13169 calculation and results. */
13170
13171 static tree
13172 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13173 {
13174 tree const type = TREE_TYPE (arg0);
13175 tree result = NULL_TREE;
13176
13177 STRIP_NOPS (arg0);
13178 STRIP_NOPS (arg1);
13179
13180 /* To proceed, MPFR must exactly represent the target floating point
13181 format, which only happens when the target base equals two. */
13182 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13183 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13184 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13185 {
13186 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13187 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13188
13189 if (real_isfinite (ra0) && real_isfinite (ra1))
13190 {
13191 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13192 const int prec = fmt->p;
13193 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13194 tree result_rem;
13195 long integer_quo;
13196 mpfr_t m0, m1;
13197
13198 mpfr_inits2 (prec, m0, m1, NULL);
13199 mpfr_from_real (m0, ra0, GMP_RNDN);
13200 mpfr_from_real (m1, ra1, GMP_RNDN);
13201 mpfr_clear_flags ();
13202 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13203 /* Remquo is independent of the rounding mode, so pass
13204 inexact=0 to do_mpfr_ckconv(). */
13205 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13206 mpfr_clears (m0, m1, NULL);
13207 if (result_rem)
13208 {
13209 /* MPFR calculates quo in the host's long so it may
13210 return more bits in quo than the target int can hold
13211 if sizeof(host long) > sizeof(target int). This can
13212 happen even for native compilers in LP64 mode. In
13213 these cases, modulo the quo value with the largest
13214 number that the target int can hold while leaving one
13215 bit for the sign. */
13216 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13217 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13218
13219 /* Dereference the quo pointer argument. */
13220 arg_quo = build_fold_indirect_ref (arg_quo);
13221 /* Proceed iff a valid pointer type was passed in. */
13222 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13223 {
13224 /* Set the value. */
13225 tree result_quo = fold_build2 (MODIFY_EXPR,
13226 TREE_TYPE (arg_quo), arg_quo,
13227 build_int_cst (NULL, integer_quo));
13228 TREE_SIDE_EFFECTS (result_quo) = 1;
13229 /* Combine the quo assignment with the rem. */
13230 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13231 result_quo, result_rem));
13232 }
13233 }
13234 }
13235 }
13236 return result;
13237 }
13238
13239 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13240 resulting value as a tree with type TYPE. The mpfr precision is
13241 set to the precision of TYPE. We assume that this mpfr function
13242 returns zero if the result could be calculated exactly within the
13243 requested precision. In addition, the integer pointer represented
13244 by ARG_SG will be dereferenced and set to the appropriate signgam
13245 (-1,1) value. */
13246
13247 static tree
13248 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13249 {
13250 tree result = NULL_TREE;
13251
13252 STRIP_NOPS (arg);
13253
13254 /* To proceed, MPFR must exactly represent the target floating point
13255 format, which only happens when the target base equals two. Also
13256 verify ARG is a constant and that ARG_SG is an int pointer. */
13257 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13258 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13259 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13260 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13261 {
13262 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13263
13264 /* In addition to NaN and Inf, the argument cannot be zero or a
13265 negative integer. */
13266 if (real_isfinite (ra)
13267 && ra->cl != rvc_zero
13268 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13269 {
13270 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13271 const int prec = fmt->p;
13272 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13273 int inexact, sg;
13274 mpfr_t m;
13275 tree result_lg;
13276
13277 mpfr_init2 (m, prec);
13278 mpfr_from_real (m, ra, GMP_RNDN);
13279 mpfr_clear_flags ();
13280 inexact = mpfr_lgamma (m, &sg, m, rnd);
13281 result_lg = do_mpfr_ckconv (m, type, inexact);
13282 mpfr_clear (m);
13283 if (result_lg)
13284 {
13285 tree result_sg;
13286
13287 /* Dereference the arg_sg pointer argument. */
13288 arg_sg = build_fold_indirect_ref (arg_sg);
13289 /* Assign the signgam value into *arg_sg. */
13290 result_sg = fold_build2 (MODIFY_EXPR,
13291 TREE_TYPE (arg_sg), arg_sg,
13292 build_int_cst (NULL, sg));
13293 TREE_SIDE_EFFECTS (result_sg) = 1;
13294 /* Combine the signgam assignment with the lgamma result. */
13295 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13296 result_sg, result_lg));
13297 }
13298 }
13299 }
13300
13301 return result;
13302 }
13303
13304 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13305 function FUNC on it and return the resulting value as a tree with
13306 type TYPE. The mpfr precision is set to the precision of TYPE. We
13307 assume that function FUNC returns zero if the result could be
13308 calculated exactly within the requested precision. */
13309
13310 static tree
13311 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13312 {
13313 tree result = NULL_TREE;
13314
13315 STRIP_NOPS (arg);
13316
13317 /* To proceed, MPFR must exactly represent the target floating point
13318 format, which only happens when the target base equals two. */
13319 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13320 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13321 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13322 {
13323 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13324 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13325
13326 if (real_isfinite (re) && real_isfinite (im))
13327 {
13328 const struct real_format *const fmt =
13329 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13330 const int prec = fmt->p;
13331 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13332 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13333 int inexact;
13334 mpc_t m;
13335
13336 mpc_init2 (m, prec);
13337 mpfr_from_real (mpc_realref(m), re, rnd);
13338 mpfr_from_real (mpc_imagref(m), im, rnd);
13339 mpfr_clear_flags ();
13340 inexact = func (m, m, crnd);
13341 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13342 mpc_clear (m);
13343 }
13344 }
13345
13346 return result;
13347 }
13348
13349 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13350 mpc function FUNC on it and return the resulting value as a tree
13351 with type TYPE. The mpfr precision is set to the precision of
13352 TYPE. We assume that function FUNC returns zero if the result
13353 could be calculated exactly within the requested precision. If
13354 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13355 in the arguments and/or results. */
13356
13357 tree
13358 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13359 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13360 {
13361 tree result = NULL_TREE;
13362
13363 STRIP_NOPS (arg0);
13364 STRIP_NOPS (arg1);
13365
13366 /* To proceed, MPFR must exactly represent the target floating point
13367 format, which only happens when the target base equals two. */
13368 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13369 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13370 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13372 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13373 {
13374 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13375 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13376 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13377 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13378
13379 if (do_nonfinite
13380 || (real_isfinite (re0) && real_isfinite (im0)
13381 && real_isfinite (re1) && real_isfinite (im1)))
13382 {
13383 const struct real_format *const fmt =
13384 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13385 const int prec = fmt->p;
13386 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13387 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13388 int inexact;
13389 mpc_t m0, m1;
13390
13391 mpc_init2 (m0, prec);
13392 mpc_init2 (m1, prec);
13393 mpfr_from_real (mpc_realref(m0), re0, rnd);
13394 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13395 mpfr_from_real (mpc_realref(m1), re1, rnd);
13396 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13397 mpfr_clear_flags ();
13398 inexact = func (m0, m0, m1, crnd);
13399 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13400 mpc_clear (m0);
13401 mpc_clear (m1);
13402 }
13403 }
13404
13405 return result;
13406 }
13407
13408 /* FIXME tuples.
13409 The functions below provide an alternate interface for folding
13410 builtin function calls presented as GIMPLE_CALL statements rather
13411 than as CALL_EXPRs. The folded result is still expressed as a
13412 tree. There is too much code duplication in the handling of
13413 varargs functions, and a more intrusive re-factoring would permit
13414 better sharing of code between the tree and statement-based
13415 versions of these functions. */
13416
13417 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13418 along with N new arguments specified as the "..." parameters. SKIP
13419 is the number of arguments in STMT to be omitted. This function is used
13420 to do varargs-to-varargs transformations. */
13421
13422 static tree
13423 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13424 {
13425 int oldnargs = gimple_call_num_args (stmt);
13426 int nargs = oldnargs - skip + n;
13427 tree fntype = TREE_TYPE (fndecl);
13428 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13429 tree *buffer;
13430 int i, j;
13431 va_list ap;
13432 location_t loc = gimple_location (stmt);
13433
13434 buffer = XALLOCAVEC (tree, nargs);
13435 va_start (ap, n);
13436 for (i = 0; i < n; i++)
13437 buffer[i] = va_arg (ap, tree);
13438 va_end (ap);
13439 for (j = skip; j < oldnargs; j++, i++)
13440 buffer[i] = gimple_call_arg (stmt, j);
13441
13442 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13443 }
13444
13445 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13446 a normal call should be emitted rather than expanding the function
13447 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13448
13449 static tree
13450 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13451 {
13452 tree dest, size, len, fn, fmt, flag;
13453 const char *fmt_str;
13454 int nargs = gimple_call_num_args (stmt);
13455
13456 /* Verify the required arguments in the original call. */
13457 if (nargs < 4)
13458 return NULL_TREE;
13459 dest = gimple_call_arg (stmt, 0);
13460 if (!validate_arg (dest, POINTER_TYPE))
13461 return NULL_TREE;
13462 flag = gimple_call_arg (stmt, 1);
13463 if (!validate_arg (flag, INTEGER_TYPE))
13464 return NULL_TREE;
13465 size = gimple_call_arg (stmt, 2);
13466 if (!validate_arg (size, INTEGER_TYPE))
13467 return NULL_TREE;
13468 fmt = gimple_call_arg (stmt, 3);
13469 if (!validate_arg (fmt, POINTER_TYPE))
13470 return NULL_TREE;
13471
13472 if (! host_integerp (size, 1))
13473 return NULL_TREE;
13474
13475 len = NULL_TREE;
13476
13477 if (!init_target_chars ())
13478 return NULL_TREE;
13479
13480 /* Check whether the format is a literal string constant. */
13481 fmt_str = c_getstr (fmt);
13482 if (fmt_str != NULL)
13483 {
13484 /* If the format doesn't contain % args or %%, we know the size. */
13485 if (strchr (fmt_str, target_percent) == 0)
13486 {
13487 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13488 len = build_int_cstu (size_type_node, strlen (fmt_str));
13489 }
13490 /* If the format is "%s" and first ... argument is a string literal,
13491 we know the size too. */
13492 else if (fcode == BUILT_IN_SPRINTF_CHK
13493 && strcmp (fmt_str, target_percent_s) == 0)
13494 {
13495 tree arg;
13496
13497 if (nargs == 5)
13498 {
13499 arg = gimple_call_arg (stmt, 4);
13500 if (validate_arg (arg, POINTER_TYPE))
13501 {
13502 len = c_strlen (arg, 1);
13503 if (! len || ! host_integerp (len, 1))
13504 len = NULL_TREE;
13505 }
13506 }
13507 }
13508 }
13509
13510 if (! integer_all_onesp (size))
13511 {
13512 if (! len || ! tree_int_cst_lt (len, size))
13513 return NULL_TREE;
13514 }
13515
13516 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13517 or if format doesn't contain % chars or is "%s". */
13518 if (! integer_zerop (flag))
13519 {
13520 if (fmt_str == NULL)
13521 return NULL_TREE;
13522 if (strchr (fmt_str, target_percent) != NULL
13523 && strcmp (fmt_str, target_percent_s))
13524 return NULL_TREE;
13525 }
13526
13527 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13528 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13529 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13530 if (!fn)
13531 return NULL_TREE;
13532
13533 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13534 }
13535
13536 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13537 a normal call should be emitted rather than expanding the function
13538 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13539 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13540 passed as second argument. */
13541
13542 tree
13543 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13544 enum built_in_function fcode)
13545 {
13546 tree dest, size, len, fn, fmt, flag;
13547 const char *fmt_str;
13548
13549 /* Verify the required arguments in the original call. */
13550 if (gimple_call_num_args (stmt) < 5)
13551 return NULL_TREE;
13552 dest = gimple_call_arg (stmt, 0);
13553 if (!validate_arg (dest, POINTER_TYPE))
13554 return NULL_TREE;
13555 len = gimple_call_arg (stmt, 1);
13556 if (!validate_arg (len, INTEGER_TYPE))
13557 return NULL_TREE;
13558 flag = gimple_call_arg (stmt, 2);
13559 if (!validate_arg (flag, INTEGER_TYPE))
13560 return NULL_TREE;
13561 size = gimple_call_arg (stmt, 3);
13562 if (!validate_arg (size, INTEGER_TYPE))
13563 return NULL_TREE;
13564 fmt = gimple_call_arg (stmt, 4);
13565 if (!validate_arg (fmt, POINTER_TYPE))
13566 return NULL_TREE;
13567
13568 if (! host_integerp (size, 1))
13569 return NULL_TREE;
13570
13571 if (! integer_all_onesp (size))
13572 {
13573 if (! host_integerp (len, 1))
13574 {
13575 /* If LEN is not constant, try MAXLEN too.
13576 For MAXLEN only allow optimizing into non-_ocs function
13577 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13578 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13579 return NULL_TREE;
13580 }
13581 else
13582 maxlen = len;
13583
13584 if (tree_int_cst_lt (size, maxlen))
13585 return NULL_TREE;
13586 }
13587
13588 if (!init_target_chars ())
13589 return NULL_TREE;
13590
13591 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13592 or if format doesn't contain % chars or is "%s". */
13593 if (! integer_zerop (flag))
13594 {
13595 fmt_str = c_getstr (fmt);
13596 if (fmt_str == NULL)
13597 return NULL_TREE;
13598 if (strchr (fmt_str, target_percent) != NULL
13599 && strcmp (fmt_str, target_percent_s))
13600 return NULL_TREE;
13601 }
13602
13603 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13604 available. */
13605 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13606 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13607 if (!fn)
13608 return NULL_TREE;
13609
13610 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13611 }
13612
13613 /* Builtins with folding operations that operate on "..." arguments
13614 need special handling; we need to store the arguments in a convenient
13615 data structure before attempting any folding. Fortunately there are
13616 only a few builtins that fall into this category. FNDECL is the
13617 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13618 result of the function call is ignored. */
13619
13620 static tree
13621 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13622 bool ignore ATTRIBUTE_UNUSED)
13623 {
13624 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13625 tree ret = NULL_TREE;
13626
13627 switch (fcode)
13628 {
13629 case BUILT_IN_SPRINTF_CHK:
13630 case BUILT_IN_VSPRINTF_CHK:
13631 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13632 break;
13633
13634 case BUILT_IN_SNPRINTF_CHK:
13635 case BUILT_IN_VSNPRINTF_CHK:
13636 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13637
13638 default:
13639 break;
13640 }
13641 if (ret)
13642 {
13643 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13644 TREE_NO_WARNING (ret) = 1;
13645 return ret;
13646 }
13647 return NULL_TREE;
13648 }
13649
13650 /* A wrapper function for builtin folding that prevents warnings for
13651 "statement without effect" and the like, caused by removing the
13652 call node earlier than the warning is generated. */
13653
13654 tree
13655 fold_call_stmt (gimple stmt, bool ignore)
13656 {
13657 tree ret = NULL_TREE;
13658 tree fndecl = gimple_call_fndecl (stmt);
13659 location_t loc = gimple_location (stmt);
13660 if (fndecl
13661 && TREE_CODE (fndecl) == FUNCTION_DECL
13662 && DECL_BUILT_IN (fndecl)
13663 && !gimple_call_va_arg_pack_p (stmt))
13664 {
13665 int nargs = gimple_call_num_args (stmt);
13666
13667 if (avoid_folding_inline_builtin (fndecl))
13668 return NULL_TREE;
13669 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13670 {
13671 return targetm.fold_builtin (fndecl, nargs,
13672 (nargs > 0
13673 ? gimple_call_arg_ptr (stmt, 0)
13674 : &error_mark_node), ignore);
13675 }
13676 else
13677 {
13678 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13679 {
13680 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13681 int i;
13682 for (i = 0; i < nargs; i++)
13683 args[i] = gimple_call_arg (stmt, i);
13684 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13685 }
13686 if (!ret)
13687 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13688 if (ret)
13689 {
13690 /* Propagate location information from original call to
13691 expansion of builtin. Otherwise things like
13692 maybe_emit_chk_warning, that operate on the expansion
13693 of a builtin, will use the wrong location information. */
13694 if (gimple_has_location (stmt))
13695 {
13696 tree realret = ret;
13697 if (TREE_CODE (ret) == NOP_EXPR)
13698 realret = TREE_OPERAND (ret, 0);
13699 if (CAN_HAVE_LOCATION_P (realret)
13700 && !EXPR_HAS_LOCATION (realret))
13701 SET_EXPR_LOCATION (realret, loc);
13702 return realret;
13703 }
13704 return ret;
13705 }
13706 }
13707 }
13708 return NULL_TREE;
13709 }
13710
13711 /* Look up the function in built_in_decls that corresponds to DECL
13712 and set ASMSPEC as its user assembler name. DECL must be a
13713 function decl that declares a builtin. */
13714
13715 void
13716 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13717 {
13718 tree builtin;
13719 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13720 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13721 && asmspec != 0);
13722
13723 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13724 set_user_assembler_name (builtin, asmspec);
13725 switch (DECL_FUNCTION_CODE (decl))
13726 {
13727 case BUILT_IN_MEMCPY:
13728 init_block_move_fn (asmspec);
13729 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13730 break;
13731 case BUILT_IN_MEMSET:
13732 init_block_clear_fn (asmspec);
13733 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13734 break;
13735 case BUILT_IN_MEMMOVE:
13736 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13737 break;
13738 case BUILT_IN_MEMCMP:
13739 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13740 break;
13741 case BUILT_IN_ABORT:
13742 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13743 break;
13744 case BUILT_IN_FFS:
13745 if (INT_TYPE_SIZE < BITS_PER_WORD)
13746 {
13747 set_user_assembler_libfunc ("ffs", asmspec);
13748 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13749 MODE_INT, 0), "ffs");
13750 }
13751 break;
13752 default:
13753 break;
13754 }
13755 }
13756
13757 /* Return true if DECL is a builtin that expands to a constant or similarly
13758 simple code. */
13759 bool
13760 is_simple_builtin (tree decl)
13761 {
13762 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13763 switch (DECL_FUNCTION_CODE (decl))
13764 {
13765 /* Builtins that expand to constants. */
13766 case BUILT_IN_CONSTANT_P:
13767 case BUILT_IN_EXPECT:
13768 case BUILT_IN_OBJECT_SIZE:
13769 case BUILT_IN_UNREACHABLE:
13770 /* Simple register moves or loads from stack. */
13771 case BUILT_IN_RETURN_ADDRESS:
13772 case BUILT_IN_EXTRACT_RETURN_ADDR:
13773 case BUILT_IN_FROB_RETURN_ADDR:
13774 case BUILT_IN_RETURN:
13775 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13776 case BUILT_IN_FRAME_ADDRESS:
13777 case BUILT_IN_VA_END:
13778 case BUILT_IN_STACK_SAVE:
13779 case BUILT_IN_STACK_RESTORE:
13780 /* Exception state returns or moves registers around. */
13781 case BUILT_IN_EH_FILTER:
13782 case BUILT_IN_EH_POINTER:
13783 case BUILT_IN_EH_COPY_VALUES:
13784 return true;
13785
13786 default:
13787 return false;
13788 }
13789
13790 return false;
13791 }
13792
13793 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13794 most probably expanded inline into reasonably simple code. This is a
13795 superset of is_simple_builtin. */
13796 bool
13797 is_inexpensive_builtin (tree decl)
13798 {
13799 if (!decl)
13800 return false;
13801 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13802 return true;
13803 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13804 switch (DECL_FUNCTION_CODE (decl))
13805 {
13806 case BUILT_IN_ABS:
13807 case BUILT_IN_ALLOCA:
13808 case BUILT_IN_BSWAP32:
13809 case BUILT_IN_BSWAP64:
13810 case BUILT_IN_CLZ:
13811 case BUILT_IN_CLZIMAX:
13812 case BUILT_IN_CLZL:
13813 case BUILT_IN_CLZLL:
13814 case BUILT_IN_CTZ:
13815 case BUILT_IN_CTZIMAX:
13816 case BUILT_IN_CTZL:
13817 case BUILT_IN_CTZLL:
13818 case BUILT_IN_FFS:
13819 case BUILT_IN_FFSIMAX:
13820 case BUILT_IN_FFSL:
13821 case BUILT_IN_FFSLL:
13822 case BUILT_IN_IMAXABS:
13823 case BUILT_IN_FINITE:
13824 case BUILT_IN_FINITEF:
13825 case BUILT_IN_FINITEL:
13826 case BUILT_IN_FINITED32:
13827 case BUILT_IN_FINITED64:
13828 case BUILT_IN_FINITED128:
13829 case BUILT_IN_FPCLASSIFY:
13830 case BUILT_IN_ISFINITE:
13831 case BUILT_IN_ISINF_SIGN:
13832 case BUILT_IN_ISINF:
13833 case BUILT_IN_ISINFF:
13834 case BUILT_IN_ISINFL:
13835 case BUILT_IN_ISINFD32:
13836 case BUILT_IN_ISINFD64:
13837 case BUILT_IN_ISINFD128:
13838 case BUILT_IN_ISNAN:
13839 case BUILT_IN_ISNANF:
13840 case BUILT_IN_ISNANL:
13841 case BUILT_IN_ISNAND32:
13842 case BUILT_IN_ISNAND64:
13843 case BUILT_IN_ISNAND128:
13844 case BUILT_IN_ISNORMAL:
13845 case BUILT_IN_ISGREATER:
13846 case BUILT_IN_ISGREATEREQUAL:
13847 case BUILT_IN_ISLESS:
13848 case BUILT_IN_ISLESSEQUAL:
13849 case BUILT_IN_ISLESSGREATER:
13850 case BUILT_IN_ISUNORDERED:
13851 case BUILT_IN_VA_ARG_PACK:
13852 case BUILT_IN_VA_ARG_PACK_LEN:
13853 case BUILT_IN_VA_COPY:
13854 case BUILT_IN_TRAP:
13855 case BUILT_IN_SAVEREGS:
13856 case BUILT_IN_POPCOUNTL:
13857 case BUILT_IN_POPCOUNTLL:
13858 case BUILT_IN_POPCOUNTIMAX:
13859 case BUILT_IN_POPCOUNT:
13860 case BUILT_IN_PARITYL:
13861 case BUILT_IN_PARITYLL:
13862 case BUILT_IN_PARITYIMAX:
13863 case BUILT_IN_PARITY:
13864 case BUILT_IN_LABS:
13865 case BUILT_IN_LLABS:
13866 case BUILT_IN_PREFETCH:
13867 return true;
13868
13869 default:
13870 return is_simple_builtin (decl);
13871 }
13872
13873 return false;
13874 }
13875